hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
bbd796481500436066b7a9986f82a3f35e610c78 | 116 | # typed: strict
class BClass
extend T::Sig
sig {returns(A::AClass)}
def get_a
A::AClass.new()
end
end
| 10.545455 | 26 | 0.62931 |
f7e5428903fbd36bd81df030a699117ef6e012b3 | 426 | # Adding clusterization (http://apidock.com/rails/Enumerable/group_by)
module Enumerable
# clumps adjacent elements together
# >> [2,2,2,3,3,4,2,2,1].cluster
# => [[2, 2, 2], [3, 3], [4], [2, 2], [1]]
def cluster
cluster = []
each do |element|
if cluster.last && cluster.last.last == element
cluster.last << element
else
cluster << [element]
end
end
cluster
end
end
| 23.666667 | 70 | 0.58216 |
b97101ed0ca01e9b9f65a04f118d47a0c842fc66 | 2,167 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Backstage
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.time_zone = 'Eastern Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
# Currently, Active Record suppresses errors raised within after_rollback or after_commit callbacks and only
# prints them to the logs. In the next version, these errors will no longer be suppressed. Instead, the errors will
# propagate normally just like in other Active Record callbacks.
config.active_record.raise_in_transactional_callbacks = true
# ActiveJob Queue Adapter
# config.active_job.queue_adapter = :delayed_job
config.generators do |g|
g.test_framework :minitest, spec: true, fixture: false
end
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = false if ENV['NO_EMAIL']
config.paperclip_defaults = {
:storage => :s3,
:s3_credentials => {
access_key_id: ENV['AWS_ACCESS_KEY_ID'],
secret_access_key: ENV['AWS_SECRET_KEY'],
bucket: ENV['AWS_BUCKET']
},
:convert_options => { :all => '-strip' }
}
Paperclip.registered_attachments_styles_path = 'tmp/paperclip_attachments.yml'
end
end
| 40.886792 | 119 | 0.702353 |
3381f089fee92f2d9ea5c3f5d5289214794d1dba | 1,514 | # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "php_serializer/version"
Gem::Specification.new do |spec|
spec.name = "php_serializer"
spec.version = PhpSerializer::VERSION
spec.authors = ["vijayrsv"]
spec.email = ["[email protected]"]
spec.summary = %q{}
spec.description = %q{Native PHP serializer and unserializer(Note: currently only supports PHP primitive data-types) for Ruby and it is heavily inspired by PHP source code.}
spec.homepage = "https://github.com/vijayrsv/php_serializer"
spec.license = "MIT"
spec.required_ruby_version = ">= 1.9.3"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.extensions = %w[ext/php_serializer/extconf.rb]
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.15"
spec.add_development_dependency "rake", "~> 10.0"
end
| 39.842105 | 177 | 0.680317 |
ff3f67e28a20bada695bf4329caed17e0af3303e | 5,208 | class ElasticsearchAT68 < Formula
desc "Distributed search & analytics engine"
homepage "https://www.elastic.co/products/elasticsearch"
url "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-6.8.8.tar.gz"
sha256 "aa2d751ec449d70164338049e9596e77d6f6fed8ab47cd604663605031217364"
license "Apache-2.0"
bottle do
cellar :any_skip_relocation
sha256 "151ca735f37ec4456a1dd03e0280d940278d8543c98226f561386b169e7c03ee" => :catalina
sha256 "151ca735f37ec4456a1dd03e0280d940278d8543c98226f561386b169e7c03ee" => :mojave
sha256 "151ca735f37ec4456a1dd03e0280d940278d8543c98226f561386b169e7c03ee" => :high_sierra
end
keg_only :versioned_formula
depends_on :java => "1.8"
def cluster_name
"elasticsearch_#{ENV["USER"]}"
end
def install
if build.head?
# Build the package from source
system "gradle", "clean", ":distribution:tar:assemble"
# Extract the package to the tar directory
mkdir "tar"
cd "tar"
system "tar", "--strip-components=1", "-xf",
Dir["../distribution/tar/build/distributions/elasticsearch-*.tar.gz"].first
end
# Remove Windows files
rm_f Dir["bin/*.bat"]
rm_f Dir["bin/*.exe"]
# Install everything else into package directory
libexec.install "bin", "config", "lib", "modules"
inreplace libexec/"bin/elasticsearch-env",
"if [ -z \"$ES_PATH_CONF\" ]; then ES_PATH_CONF=\"$ES_HOME\"/config; fi",
"if [ -z \"$ES_PATH_CONF\" ]; then ES_PATH_CONF=\"#{etc}/elasticsearch\"; fi"
# Set up Elasticsearch for local development:
inreplace "#{libexec}/config/elasticsearch.yml" do |s|
# 1. Give the cluster a unique name
s.gsub!(/#\s*cluster\.name: .*/, "cluster.name: #{cluster_name}")
# 2. Configure paths
s.sub!(%r{#\s*path\.data: /path/to.+$}, "path.data: #{var}/lib/elasticsearch/")
s.sub!(%r{#\s*path\.logs: /path/to.+$}, "path.logs: #{var}/log/elasticsearch/")
end
# Move config files into etc
(etc/"elasticsearch").install Dir[libexec/"config/*"]
(libexec/"config").rmtree
bin.install libexec/"bin/elasticsearch",
libexec/"bin/elasticsearch-keystore",
libexec/"bin/elasticsearch-plugin",
libexec/"bin/elasticsearch-translog"
bin.env_script_all_files(libexec/"bin", Language::Java.java_home_env("1.8"))
end
def post_install
# Make sure runtime directories exist
(var/"lib/elasticsearch").mkpath
(var/"log/elasticsearch").mkpath
ln_s etc/"elasticsearch", libexec/"config" unless (libexec/"config").exist?
(var/"elasticsearch/plugins").mkpath
ln_s var/"elasticsearch/plugins", libexec/"plugins" unless (libexec/"plugins").exist?
# fix test not being able to create keystore because of sandbox permissions
system bin/"elasticsearch-keystore", "create" unless (etc/"elasticsearch/elasticsearch.keystore").exist?
end
def caveats
s = <<~EOS
Data: #{var}/lib/elasticsearch/
Logs: #{var}/log/elasticsearch/#{cluster_name}.log
Plugins: #{var}/elasticsearch/plugins/
Config: #{etc}/elasticsearch/
EOS
s
end
plist_options :manual => "elasticsearch"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<false/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/elasticsearch</string>
</array>
<key>EnvironmentVariables</key>
<dict>
</dict>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/elasticsearch.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/elasticsearch.log</string>
</dict>
</plist>
EOS
end
test do
assert_includes(stable.url, "-oss-")
port = free_port
system "#{bin}/elasticsearch-plugin", "list"
pid = testpath/"pid"
begin
system "#{bin}/elasticsearch", "-d", "-p", pid, "-Epath.data=#{testpath}/data", "-Ehttp.port=#{port}"
sleep 10
system "curl", "-XGET", "localhost:#{port}/"
ensure
Process.kill(9, pid.read.to_i)
end
port = free_port
(testpath/"config/elasticsearch.yml").write <<~EOS
path.data: #{testpath}/data
path.logs: #{testpath}/logs
node.name: test-es-path-conf
http.port: #{port}
EOS
cp etc/"elasticsearch/jvm.options", "config"
cp etc/"elasticsearch/log4j2.properties", "config"
ENV["ES_PATH_CONF"] = testpath/"config"
pid = testpath/"pid"
begin
system "#{bin}/elasticsearch", "-d", "-p", pid
sleep 10
system "curl", "-XGET", "localhost:#{port}/"
output = shell_output("curl -s -XGET localhost:#{port}/_cat/nodes")
assert_match "test-es-path-conf", output
ensure
Process.kill(9, pid.read.to_i)
end
end
end
| 32.962025 | 108 | 0.631144 |
260ee3d86d289b21db69f133d03083fe70a1b11b | 65 | module Serverless
module Local
VERSION = "0.1.0"
end
end
| 10.833333 | 21 | 0.661538 |
08d887ec5c885a296a310cf7804b2f0f5b610450 | 303 | cask :v1 => 'macintosh-explorer' do
version :latest
sha256 :no_check
url 'http://www.ragesw.com/downloads/ragesw/mac_explorer_alt.dmg'
homepage 'http://www.ragesw.com/products/explorer.html'
license :unknown # todo: improve this machine-generated value
app 'Macintosh Explorer.app'
end
| 27.545455 | 67 | 0.742574 |
f76990b915e89bccd6498843e666f0fc87d56b86 | 21,920 | require 'molinillo'
require 'cocoapods/podfile'
module Pod
class NoSpecFoundError < Informative
def exit_status
@exit_status ||= 31
end
end
# The resolver is responsible of generating a list of specifications grouped
# by target for a given Podfile.
#
class Resolver
require 'cocoapods/resolver/lazy_specification'
require 'cocoapods/resolver/resolver_specification'
# @return [Sandbox] the Sandbox used by the resolver to find external
# dependencies.
#
attr_reader :sandbox
# @return [Podfile] the Podfile used by the resolver.
#
attr_reader :podfile
# @return [Array<Dependency>] the list of dependencies locked to a specific
# version.
#
attr_reader :locked_dependencies
# @return [Array<Source>] The list of the sources which will be used for
# the resolution.
#
attr_reader :sources
# @return [Bool] Whether the resolver has sources repositories up-to-date.
#
attr_reader :specs_updated
alias specs_updated? specs_updated
# Init a new Resolver
#
# @param [Sandbox] sandbox @see sandbox
# @param [Podfile] podfile @see podfile
# @param [Array<Dependency>] locked_dependencies @see locked_dependencies
# @param [Array<Source>, Source] sources @see sources
# @param [Boolean] specs_updated @see specs_updated
# @param [PodfileDependencyCache] podfile_dependency_cache the podfile dependency cache to use
# within this Resolver.
#
def initialize(sandbox, podfile, locked_dependencies, sources, specs_updated,
podfile_dependency_cache: Installer::Analyzer::PodfileDependencyCache.from_podfile(podfile))
@sandbox = sandbox
@podfile = podfile
@locked_dependencies = locked_dependencies
@sources = Array(sources)
@specs_updated = specs_updated
@podfile_dependency_cache = podfile_dependency_cache
@platforms_by_dependency = Hash.new { |h, k| h[k] = [] }
@cached_sets = {}
@podfile_requirements_by_root_name = @podfile_dependency_cache.podfile_dependencies.group_by(&:root_name).each_value { |a| a.map!(&:requirement) }
@search = {}
@validated_platforms = Set.new
end
#-------------------------------------------------------------------------#
public
# @!group Resolution
# Identifies the specifications that should be installed.
#
# @return [Hash{TargetDefinition => Array<ResolverSpecification>}] resolver_specs_by_target
# the resolved specifications that need to be installed grouped by target
# definition.
#
def resolve
dependencies = @podfile_dependency_cache.target_definition_list.flat_map do |target|
@podfile_dependency_cache.target_definition_dependencies(target).each do |dep|
next unless target.platform
@platforms_by_dependency[dep].push(target.platform)
end
end
@platforms_by_dependency.each_value(&:uniq!)
@activated = Molinillo::Resolver.new(self, self).resolve(dependencies, locked_dependencies)
resolver_specs_by_target
rescue Molinillo::ResolverError => e
handle_resolver_error(e)
end
# @return [Hash{Podfile::TargetDefinition => Array<ResolverSpecification>}]
# returns the resolved specifications grouped by target.
#
# @note The returned specifications can be subspecs.
#
def resolver_specs_by_target
@resolver_specs_by_target ||= {}.tap do |resolver_specs_by_target|
@podfile_dependency_cache.target_definition_list.each do |target|
# can't use vertex.root? since that considers _all_ targets
explicit_dependencies = @podfile_dependency_cache.target_definition_dependencies(target).map(&:name).to_set
vertices = valid_dependencies_for_target(target)
resolver_specs_by_target[target] = vertices.
map do |vertex|
payload = vertex.payload
non_library = (!explicit_dependencies.include?(vertex.name) || payload.test_specification? || payload.app_specification?) &&
(vertex.recursive_predecessors & vertices).all? { |v| !explicit_dependencies.include?(v.name) || v.payload.test_specification? }
spec_source = payload.respond_to?(:spec_source) && payload.spec_source
ResolverSpecification.new(payload, non_library, spec_source)
end.
sort_by(&:name)
end
end
end
#-------------------------------------------------------------------------#
public
# @!group Specification Provider
include Molinillo::SpecificationProvider
# Returns (and caches) the specification that satisfy the given dependency.
#
# @return [Array<Specification>] the specifications that satisfy the given
# `dependency`.
#
# @param [Dependency] dependency the dependency that is being searched for.
#
def search_for(dependency)
@search[dependency] ||= begin
locked_requirement = requirement_for_locked_pod_named(dependency.name)
podfile_deps = Array(@podfile_requirements_by_root_name[dependency.root_name])
podfile_deps << locked_requirement if locked_requirement
specifications_for_dependency(dependency, podfile_deps)
end
@search[dependency].dup
end
# Returns the dependencies of `specification`.
#
# @return [Array<Specification>] all dependencies of `specification`.
#
# @param [Specification] specification the specification whose own
# dependencies are being asked for.
#
def dependencies_for(specification)
specification.all_dependencies.map do |dependency|
if dependency.root_name == Specification.root_name(specification.name)
dependency.dup.tap { |d| d.specific_version = specification.version }
else
dependency
end
end
end
# Returns the name for the given `dependency`.
#
# @return [String] the name for the given `dependency`.
#
# @param [Dependency] dependency the dependency whose name is being
# queried.
#
def name_for(dependency)
dependency.name
end
# @return [String] the user-facing name for a {Podfile}.
#
def name_for_explicit_dependency_source
'Podfile'
end
# @return [String] the user-facing name for a {Lockfile}.
#
def name_for_locking_dependency_source
'Podfile.lock'
end
# Determines whether the given `requirement` is satisfied by the given
# `spec`, in the context of the current `activated` dependency graph.
#
# @return [Boolean] whether `requirement` is satisfied by `spec` in the
# context of the current `activated` dependency graph.
#
# @param [Dependency] requirement the dependency in question.
#
# @param [Molinillo::DependencyGraph] activated the current dependency
# graph in the resolution process.
#
# @param [Specification] spec the specification in question.
#
def requirement_satisfied_by?(requirement, activated, spec)
version = spec.version
return false unless requirement.requirement.satisfied_by?(version)
return false unless valid_possibility_version_for_root_name?(requirement, activated, spec)
return false unless spec_is_platform_compatible?(activated, requirement, spec)
true
end
def valid_possibility_version_for_root_name?(requirement, activated, spec)
return true if prerelease_requirement = requirement.prerelease? || requirement.external_source || !spec.version.prerelease?
activated.each do |vertex|
next unless vertex.payload
next unless Specification.root_name(vertex.name) == requirement.root_name
prerelease_requirement ||= vertex.requirements.any? { |r| r.prerelease? || r.external_source }
if vertex.payload.respond_to?(:version)
return true if vertex.payload.version == spec.version
break
end
end
prerelease_requirement
end
private :valid_possibility_version_for_root_name?
# Sort dependencies so that the ones that are easiest to resolve are first.
# Easiest to resolve is (usually) defined by:
# 1) Is this dependency already activated?
# 2) How relaxed are the requirements?
# 3) Are there any conflicts for this dependency?
# 4) How many possibilities are there to satisfy this dependency?
#
# @return [Array<Dependency>] the sorted dependencies.
#
# @param [Array<Dependency>] dependencies the unsorted dependencies.
#
# @param [Molinillo::DependencyGraph] activated the dependency graph of
# currently activated specs.
#
# @param [{String => Array<Conflict>}] conflicts the current conflicts.
#
def sort_dependencies(dependencies, activated, conflicts)
dependencies.sort_by do |dependency|
name = name_for(dependency)
[
activated.vertex_named(name).payload ? 0 : 1,
dependency.external_source ? 0 : 1,
dependency.prerelease? ? 0 : 1,
conflicts[name] ? 0 : 1,
search_for(dependency).count,
]
end
end
#-------------------------------------------------------------------------#
public
# @!group Resolver UI
include Molinillo::UI
# The UI object the resolver should use for displaying user-facing output.
#
# @return [UserInterface] the normal CocoaPods UI object.
#
def output
UI
end
# Called before resolution starts.
#
# Completely silence this, as we show nothing.
#
# @return [Void]
#
def before_resolution
end
# Called after resolution ends.
#
# Completely silence this, as we show nothing.
#
# @return [Void]
#
def after_resolution
end
# Called during resolution to indicate progress.
#
# Completely silence this, as we show nothing.
#
# @return [Void]
#
def indicate_progress
end
#-------------------------------------------------------------------------#
private
# !@ Resolution context
# @return [Hash<String => Set>] A cache that keeps tracks of the sets
# loaded by the resolution process.
#
# @note Sets store the resolved dependencies and return the highest
# available specification found in the sources. This is done
# globally and not per target definition because there can be just
# one Pod installation, so different version of the same Pods for
# target definitions are not allowed.
#
attr_reader :cached_sets
#-------------------------------------------------------------------------#
private
# @!group Private helpers
# Returns available specifications which satisfy requirements of given dependency
# and additional requirements.
#
# @param [Dependency] dependency
# The dependency whose requirements will be satisfied.
#
# @param [Array<Requirement>] additional_requirements
# List of additional requirements which should also be satisfied.
#
# @return [Array<Specification>] List of specifications satisfying given requirements.
#
def specifications_for_dependency(dependency, additional_requirements = [])
requirement = Requirement.new(dependency.requirement.as_list + additional_requirements.flat_map(&:as_list))
find_cached_set(dependency).
all_specifications(warn_for_multiple_pod_sources).
select { |s| requirement.satisfied_by? s.version }.
map { |s| s.subspec_by_name(dependency.name, false, true) }.
compact
end
# @return [Set] Loads or returns a previously initialized set for the Pod
# of the given dependency.
#
# @param [Dependency] dependency
# The dependency for which the set is needed.
#
# @return [Set] the cached set for a given dependency.
#
def find_cached_set(dependency)
name = dependency.root_name
cached_sets[name] ||= begin
if dependency.external_source
spec = sandbox.specification(name)
unless spec
raise StandardError, '[Bug] Unable to find the specification ' \
"for `#{dependency}`."
end
set = Specification::Set::External.new(spec)
else
set = create_set_from_sources(dependency)
end
unless set
raise Molinillo::NoSuchDependencyError.new(dependency) # rubocop:disable Style/RaiseArgs
end
set
end
end
# @return [Requirement, Nil]
# The {Requirement} that locks the dependency with name `name` in
# {#locked_dependencies}.
#
def requirement_for_locked_pod_named(name)
if vertex = locked_dependencies.vertex_named(name)
if dependency = vertex.payload
dependency.requirement
end
end
end
# @return [Set] Creates a set for the Pod of the given dependency from the
# sources. The set will contain all versions from all sources that
# include the Pod.
#
# @param [Dependency] dependency
# The dependency for which the set is needed.
#
def create_set_from_sources(dependency)
aggregate_for_dependency(dependency).search(dependency)
end
# @return [Source::Aggregate] The aggregate of the {#sources}.
#
def aggregate_for_dependency(dependency)
sources_manager = Config.instance.sources_manager
if dependency && dependency.podspec_repo
sources_manager.aggregate_for_dependency(dependency)
elsif (locked_vertex = @locked_dependencies.vertex_named(dependency.name)) && (locked_dependency = locked_vertex.payload) && locked_dependency.podspec_repo
sources_manager.aggregate_for_dependency(locked_dependency)
else
@aggregate ||= Source::Aggregate.new(sources)
end
end
# Ensures that a specification is compatible with the platform of a target.
#
# @raise If the specification is not supported by the target.
#
# @return [void]
#
def validate_platform(spec, target)
return unless target_platform = target.platform
return unless @validated_platforms.add?([spec.object_id, target_platform])
unless spec.available_platforms.any? { |p| target_platform.to_sym == p.to_sym }
raise Informative, "The platform of the target `#{target.name}` " \
"(#{target.platform}) is not compatible with `#{spec}`, which does " \
"not support `#{target.platform.name}`."
end
end
# Handles errors that come out of a {Molinillo::Resolver}.
#
# @return [void]
#
# @param [Molinillo::ResolverError] error
#
def handle_resolver_error(error)
message = error.message
type = Informative
case error
when Molinillo::VersionConflict
message = error.message_with_trees(
:solver_name => 'CocoaPods',
:possibility_type => 'pod',
:version_for_spec => lambda(&:version),
:additional_message_for_conflict => lambda do |o, name, conflict|
local_pod_parent = conflict.requirement_trees.flatten.reverse.find(&:local?)
if local_pod_parent && !specifications_for_dependency(conflict.requirement).empty? && !conflict.possibility && conflict.locked_requirement
# Conflict was caused by a requirement from a local dependency.
# Tell user to use `pod update`.
o << "\nIt seems like you've changed the constraints of dependency `#{name}` " \
"inside your development pod `#{local_pod_parent.name}`.\nYou should run `pod update #{name}` to apply " \
"changes you've made."
elsif (conflict.possibility && conflict.possibility.version.prerelease?) &&
(conflict.requirement && !(
conflict.requirement.prerelease? ||
conflict.requirement.external_source)
)
# Conflict was caused by not specifying an explicit version for the requirement #[name],
# and there is no available stable version satisfying constraints for the requirement.
o << "\nThere are only pre-release versions available satisfying the following requirements:\n"
conflict.requirements.values.flatten.uniq.each do |r|
unless search_for(r).empty?
o << "\n\t'#{name}', '#{r.requirement}'\n"
end
end
o << "\nYou should explicitly specify the version in order to install a pre-release version"
elsif !conflict.existing
conflicts = conflict.requirements.values.flatten.uniq
found_conflicted_specs = conflicts.reject { |c| search_for(c).empty? }
if found_conflicted_specs.empty?
# There are no existing specification inside any of the spec repos with given requirements.
type = NoSpecFoundError
dependencies = conflicts.count == 1 ? 'dependency' : 'dependencies'
o << "\nNone of your spec sources contain a spec satisfying "\
"the #{dependencies}: `#{conflicts.join(', ')}`." \
"\n\nYou have either:"
unless specs_updated?
o << "\n * out-of-date source repos which you can update with `pod repo update` or with `pod install --repo-update`."
end
o << "\n * mistyped the name or version." \
"\n * not added the source repo that hosts the Podspec to your Podfile." \
"\n\nNote: as of CocoaPods 1.0, `pod repo update` does not happen on `pod install` by default."
else
o << "\nSpecs satisfying the `#{conflicts.join(', ')}` dependency were found, " \
'but they required a higher minimum deployment target.'
end
end
end,
)
when Molinillo::NoSuchDependencyError
message += <<-EOS
You have either:
* out-of-date source repos which you can update with `pod repo update` or with `pod install --repo-update`.
* mistyped the name or version.
* not added the source repo that hosts the Podspec to your Podfile.
Note: as of CocoaPods 1.0, `pod repo update` does not happen on `pod install` by default.
EOS
end
raise type.new(message).tap { |e| e.set_backtrace(error.backtrace) }
end
# Returns whether the given spec is platform-compatible with the dependency
# graph, taking into account the dependency that has required the spec.
#
# @param [Molinillo::DependencyGraph] dependency_graph
#
# @param [Dependency] dependency
#
# @param [Specification] spec
#
# @return [Bool]
def spec_is_platform_compatible?(dependency_graph, dependency, spec)
# This is safe since a pod will only be in locked dependencies if we're
# using the same exact version
return true if locked_dependencies.vertex_named(spec.name)
vertex = dependency_graph.vertex_named(dependency.name)
predecessors = vertex.recursive_predecessors.select(&:root?)
predecessors << vertex if vertex.root?
platforms_to_satisfy = predecessors.flat_map(&:explicit_requirements).flat_map { |r| @platforms_by_dependency[r] }.uniq
available_platforms = spec.available_platforms
platforms_to_satisfy.all? do |platform_to_satisfy|
available_platforms.all? do |spec_platform|
next true unless spec_platform.name == platform_to_satisfy.name
platform_to_satisfy.supports?(spec_platform)
end
end
end
# Returns the target-appropriate nodes that are `successors` of `node`,
# rejecting those that are scoped by target platform and have incompatible
# targets.
#
# @return [Array<Molinillo::DependencyGraph::Vertex>]
# An array of target-appropriate nodes whose `payload`s are
# dependencies for `target`.
#
def valid_dependencies_for_target(target)
dependencies = Set.new
@podfile_dependency_cache.target_definition_dependencies(target).each do |dep|
node = @activated.vertex_named(dep.name)
add_valid_dependencies_from_node(node, target, dependencies)
end
dependencies
end
def add_valid_dependencies_from_node(node, target, dependencies)
return unless dependencies.add?(node)
validate_platform(node.payload, target)
node.outgoing_edges.each do |edge|
next unless edge_is_valid_for_target_platform?(edge, target.platform)
add_valid_dependencies_from_node(edge.destination, target, dependencies)
end
end
EdgeAndPlatform = Struct.new(:edge, :target_platform)
private_constant :EdgeAndPlatform
# Whether the given `edge` should be followed to find dependencies for the
# given `target_platform`.
#
# @return [Bool]
#
def edge_is_valid_for_target_platform?(edge, target_platform)
@edge_validity ||= Hash.new do |hash, edge_and_platform|
e = edge_and_platform.edge
platform = edge_and_platform.target_platform
requirement_name = e.requirement.name
hash[edge_and_platform] = e.origin.payload.all_dependencies(platform).any? do |dep|
dep.name == requirement_name
end
end
@edge_validity[EdgeAndPlatform.new(edge, target_platform)]
end
# @return [Boolean] whether to emit a warning when a pod is found in multiple sources
#
def warn_for_multiple_pod_sources
podfile.installation_options.warn_for_multiple_pod_sources
end
end
end
| 37.66323 | 161 | 0.651962 |
2872e52153659adb16dcd921a1b44eae40cd42ce | 821 | module Byebug
#
# Show byebug settings.
#
class ShowCommand < Command
self.allow_in_control = true
def regexp
/^\s* show (?:\s+(?<setting>\w+))? \s*$/x
end
def execute
key = @match[:setting]
return puts(self.class.help) if key.nil?
full_key = Setting.find(key)
return errmsg("Unknown setting :#{key}") unless full_key
puts Setting.settings[full_key.to_sym].to_s
end
class << self
def names
%w(show)
end
def description
<<-EOD.gsub(/^ {8}/, '')
show <setting> <value>
Generic command for showing byebug settings. You can change them with
the "set" command.
EOD
end
def help(subcmds = [])
Setting.help('show', subcmds.first)
end
end
end
end
| 18.659091 | 79 | 0.559074 |
186f0c1bc9a0038579eb769d169ac64f6f2f39f9 | 612 | class Quote
@@quotes_all = []
attr_accessor :quote, :author, :categories
def initialize(quote)
@quote = quote
@@quotes_all << self
end
def self.all
@@quotes_all
end
def self.find_quote(quote)
@@quotes_all.select{|a| a quote == quote}
end
def self.search_quotes(quote)
if @@quotes_all.include?(quote) == false
author = Quote.new(quote)
else
author = Quote.find_quote(quote)
end
end
def get_author(quote)
quote.author.name
end
def get_categories(quote)
quote.categories
end
end
| 17 | 46 | 0.593137 |
9167192434f06f99d8db56918b5f8447cc72e4ca | 259 | module Sensu
module Extension
class Fail < Mutator
def name
'fail'
end
def description
'fails to do anything'
end
def run(event, settings, &block)
block.call('fail', 2)
end
end
end
end
| 14.388889 | 38 | 0.540541 |
abdab3fc71b0a120f6a4a9cb8bbfbb0edb7c0be1 | 1,396 | #
# EDOS/src/REI/Character.rb
# by IceDragon
# dc 06/05/2013
# dm 11/05/2013
# vr 0.0.1
module REI
class CharacterBase
attr_accessor :unit
##
attr_accessor :face_name
attr_accessor :face_index
attr_accessor :face_hue
##
attr_accessor :character_name
attr_accessor :character_index
attr_accessor :character_hue
##
attr_accessor :portrait_name
attr_accessor :portrait_hue
def initialize
@unit = nil
@face_name = ''
@face_index = 0
@face_hue = 0
@character_name = ''
@character_index = 0
@character_hue = 0
@portrait_name = ''
@portrait_hue = 0
end
def entity
unit.entity
end
end
class Character < CharacterBase
def setup(data_entity)
@face_name = data_entity.face_name
@face_index = data_entity.face_index
@face_hue = data_entity.face_hue
@character_name = data_entity.character_name
@character_index = data_entity.character_index
@character_hue = data_entity.character_hue
@portrait_name = data_entity.portrait_name
@portrait_hue = data_entity.portrait_hue
end
end
class CharacterActor < Character
#
end
class CharacterEnemy < Character
#
end
class CharacterTrap < Character
#
end
class CharacterItem < Character
#
end
end
| 18.368421 | 52 | 0.643983 |
01b096ad109e3204bd0f5e9345349f4062237989 | 3,056 | require 'spec_helper_acceptance'
# Ensure time synchronization is in use - Section 2.2.1.1
describe package('ntp') do
it { should be_installed }
end
describe package('chrony') do
it { should be_installed }
end
# Ensure ntp is configured - Section 2.2.1.2
describe file('/etc/ntp.conf') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
end
describe file('/etc/sysconfig/ntpd') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
end
describe file('/usr/lib/systemd/system/ntpd.service') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
its(:content) { should match /ExecStart=\/usr\/sbin\/ntpd -u ntp:ntp $OPTIONS/ }
end
# Ensure Chrony is configured - Section 2.2.1.3
describe file('/etc/chrony.conf') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
end
describe file('/etc/sysconfig/chronyd') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
its(:content) { should match /OPTIONS="-u chrony"/ }
end
# Ensure X Window System is not installed - Section 2.2.2
describe package('xorg-x11-server-Xorg') do
it { should_not be_installed }
end
# Ensure Avahi Server is not enabled - Section 2.2.3
describe service('avahi-daemon') do
it { should_not be_running }
end
# Ensure CUPS is not enabled - Section 2.2.4
describe service('cups') do
it { should_not be_running }
end
# Ensure DHCP Server is not enabled - Section 2.2.5
describe service('dhcpd') do
it { should_not running }
end
# Ensure LDAP Server is not enabled - Section 2.2.6
describe service('slapd') do
it { should_not be_running }
end
# Ensure NFS and RPC are not enabled - Section 2.2.7
describe service('nfs') do
it { should_not be_running }
end
describe service('nfs-server') do
it { should_not be_running }
end
describe service('rpcbind') do
it { should_not be_running }
end
# Ensure DNS Server is not enabled - Section 2.2.8
describe service('named') do
it { should_not be_running }
end
# Ensure FTP Server is not enabled - Section 2.2.9
describe package('vsftpd') do
it { should_not be_running }
end
# Ensure HTTP Server is not enabled - Section 2.2.10
describe service('httpd') do
it { should_not be_running }
end
# Ensure IMAP and POP3 Server are not enabled - Section 2.2.11
describe service('dovecot') do
it { should_not be_running }
end
# Ensure Samba is not enabled - Section 2.2.12
describe service('smb') do
it { should_not be_running }
end
# Ensure HTTP Proxy Server is not enabled - Section 2.2.13
describe service('squid') do
it { should_not be_running }
end
# Ensure SNMP Server is not enabled - Section 2.2.14
describe service('snmpd') do
it
end
| 24.845528 | 84 | 0.692736 |
87f2dbbcc8ff4cfd6568142e6f8cac59466733f2 | 112 | puts "Enter Voltage: "
u = gets.to_i
puts "Enter Resistance: "
r = gets.to_i
i = u/r
puts "I = #{i.to_s}" | 18.666667 | 26 | 0.589286 |
d5cddc41881154541c0f9ce7434e9a2361b58efb | 84 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'cagnut_toolbox'
| 28 | 58 | 0.75 |
1aa7f6dcba0a8c3b54d9aec60318fa869df32914 | 407 | # coding: utf-8
class ONIX::Series < ONIX::Element
xml_name "Series"
onix_composite :series_identifiers, ONIX::SeriesIdentifier
xml_accessor :title_of_series, :from => "TitleOfSeries"
onix_composite :titles, ONIX::Title
onix_composite :contributors, ONIX::Contributor
xml_accessor :number_within_series, :from => "NumberWithinSeries"
xml_accessor :year_of_annual, :from => "YearOfAnnual"
end
| 33.916667 | 67 | 0.773956 |
5d018f51499222c4dfab4f0d7a73613e4aeb3724 | 915 | # https://github.com/wildbit/postmark-gem
module Mailkick
class Service
class Postmark < Mailkick::Service
REASONS_MAP = {
"SpamNotification" => "spam",
"SpamComplaint" => "spam",
"Unsubscribe" => "unsubscribe",
}
def initialize(options = {})
@client = ::Postmark::ApiClient.new(options[:api_key] || ENV["POSTMARK_API_KEY"])
end
def opt_outs
bounces
end
def bounces
fetch(@client.bounces)
end
def self.discoverable?
!!(defined?(::Postmark) && ENV["POSTMARK_API_KEY"])
end
protected
def fetch(response)
response.map do |record|
{
email: record[:email],
time: ActiveSupport::TimeZone["UTC"].parse(record[:bounced_at]),
reason: REASONS_MAP.fetch(record[:type], "bounce")
}
end
end
end
end
end
| 21.785714 | 89 | 0.555191 |
611c5cc4e9afe7f91e3009cf6d777a6ea1f2e4ad | 384 | require 'spec_helper'
RSpec.describe Graphiti::Util::FieldParams do
describe '.parse' do
it 'collects and normalizes the payload' do
parsed = described_class.parse({
'authors' => 'first_name,last_name',
'books' => 'title'
})
expect(parsed).to eq({
authors: [:first_name, :last_name],
books: [:title]
})
end
end
end
| 22.588235 | 47 | 0.596354 |
4adbf936370dc22cd5d30dabd179b2fe7932c0d6 | 873 | require 'json'
package = JSON.parse(File.read(File.join(__dir__, '..', 'package.json')))
Pod::Spec.new do |s|
s.name = 'EXStoreReview'
s.version = package['version']
s.summary = package['description']
s.description = package['description']
s.license = package['license']
s.author = package['author']
s.homepage = package['homepage']
s.platform = :ios, '11.0'
s.source = { git: 'https://github.com/expo/expo.git' }
s.dependency 'ExpoModulesCore'
if !$ExpoUseSources&.include?(package['name']) && ENV['EXPO_USE_SOURCE'].to_i == 0 && File.exist?("#{s.name}.xcframework") && Gem::Version.new(Pod::VERSION) >= Gem::Version.new('1.10.0')
s.source_files = "#{s.name}/**/*.h"
s.vendored_frameworks = "#{s.name}.xcframework"
else
s.source_files = "#{s.name}/**/*.{h,m}"
end
end
| 34.92 | 188 | 0.594502 |
acda98f4de81dac9e9d00bdd93f3edf8ad2e1d89 | 4,517 | require 'faraday'
class FetchMembersJob < ApplicationJob
HOST = "https://data.parliament.scot"
TIMEOUT = 5
ENDPOINTS = {
constituencies: "/api/Constituencies",
constituency_elections: "/api/MemberElectionConstituencyStatuses",
member_parties: "/api/MemberParties",
members: "/api/Members",
parties: "/api/Parties",
regions: "/api/Regions",
region_elections: "/api/MemberElectionRegionStatuses"
}
NAME_PATTERN = /,\s+/
# The constituencies endpoint currently returns incorrect codes
# for 'Glasgow Provan' and 'Strathkelvin and Bearsden' so we need
# to map those to the correct ONS codes
CONSTITUENCY_FIXES = {
"S16000120" => "S16000147",
"S16000145" => "S16000148"
}
# The regions endpoint currently returns incorrect codes for 'Glasgow'
# so we need to map that to the correct ONS code
REGION_FIXES = {
"S17000010" => "S17000017"
}
# The 'Reform UK' party doesn't appear in the parties endpoint so
# we need to add it to the list manually for now.
ADDITIONAL_PARTIES = {
13 => "Reform UK"
}
GAELIC_PARTIES = {
"No Party Affiliation" => "Gun Cheangal Pàrtaidh",
"Scottish Green Party" => "Pàrtaidh Uaine na h-Alba",
"Independent" => "Neo-eisimeileach",
"Scottish National Party" => "Pàrtaidh Nàiseanta na h-Alba",
"Scottish Conservative and Unionist Party" => "Pàrtaidh Tòraidheach na h-Alba",
"Scottish Liberal Democrats" => "Pàrtaidh Libearal Deamocratach na h-Alba",
"Scottish Labour" => "Pàrtaidh Làbarach na h-Alba",
"Reform UK" => "Reform UK"
}
rescue_from StandardError do |exception|
Appsignal.send_exception exception
end
def perform
Member.transaction do
Member.update_all(region_id: nil, constituency_id: nil)
members.each do |attrs|
person_id = attrs["PersonID"]
retried = false
begin
Member.for(person_id) do |member|
last, first = attrs["ParliamentaryName"].split(NAME_PATTERN)
party = parties[member_parties[person_id]]
member.name_en = "#{first} #{last} MSP"
member.name_gd = "#{first} #{last} BPA"
member.party_en = party
member.party_gd = GAELIC_PARTIES[party]
if region_id = region_elections[person_id]
member.region_id = regions.fetch(region_id)
elsif constituency_id = constituency_elections[person_id]
member.constituency_id = constituencies.fetch(constituency_id)
end
member.save!
end
rescue ActiveRecord::RecordNotUnique => e
if retried
raise e
else
retried = true
retry
end
end
end
end
end
private
def members
@members ||= get(:members).select { |member| member["IsCurrent"] }
end
def constituencies
@constituencies ||= build_map(:constituencies, "ID", "ConstituencyCode").transform_values(&method(:fix_constituencies))
end
def fix_constituencies(code)
CONSTITUENCY_FIXES[code] || code
end
def constituency_elections
@constituency_elections ||= build_map(:constituency_elections, "PersonID", "ConstituencyID")
end
def parties
@parties ||= build_map(:parties, "ID", "PreferredName").merge(ADDITIONAL_PARTIES)
end
def member_parties
@member_parties ||= build_map(:member_parties, "PersonID", "PartyID")
end
def regions
@regions ||= build_map(:regions, "ID", "RegionCode", "EndDate").transform_values(&method(:fix_regions))
end
def fix_regions(code)
REGION_FIXES[code] || code
end
def region_elections
@region_elections ||= build_map(:region_elections, "PersonID", "RegionID")
end
def faraday
Faraday.new(HOST) do |f|
f.response :follow_redirects
f.response :raise_error
f.response :json
f.adapter :net_http_persistent
end
end
def request(entity)
faraday.get(ENDPOINTS[entity]) do |request|
request.options[:timeout] = TIMEOUT
request.options[:open_timeout] = TIMEOUT
end
end
def get(entity)
response = request(entity)
if response.success?
response.body
else
[]
end
rescue Faraday::Error => e
Appsignal.send_exception(e)
return []
end
def build_map(entity, id, name, ends_at = "ValidUntilDate")
get(entity).inject({}) do |objects, object|
if object[ends_at].nil?
objects[object[id]] = object[name]
end
objects
end
end
end
| 26.727811 | 123 | 0.654859 |
ac8f20feff051652ac0d25a82e366dcacfaa8d85 | 20,021 | $:.unshift File.expand_path('../vendor', __FILE__)
require 'thor'
require 'thor/actions'
require 'rubygems/config_file'
# Work around a RubyGems bug
Gem.configuration
module Bundler
class CLI < Thor
include Thor::Actions
def initialize(*)
super
use_shell = options["no-color"] ? Thor::Shell::Basic.new : shell
Bundler.ui = UI::Shell.new(use_shell)
Gem::DefaultUserInteraction.ui = UI::RGProxy.new(Bundler.ui)
end
check_unknown_options! unless ARGV.include?("exec") || ARGV.include?("config")
default_task :install
class_option "no-color", :type => :boolean, :banner => "Disable colorization in output"
def help(cli = nil)
case cli
when "gemfile" then command = "gemfile.5"
when nil then command = "bundle"
else command = "bundle-#{cli}"
end
manpages = %w(
bundle
bundle-config
bundle-exec
bundle-install
bundle-package
bundle-update
gemfile.5)
if manpages.include?(command)
root = File.expand_path("../man", __FILE__)
if have_groff?
groff = "groff -Wall -mtty-char -mandoc -Tascii"
pager = ENV['MANPAGER'] || ENV['PAGER'] || 'more'
Kernel.exec "#{groff} #{root}/#{command} | #{pager}"
else
puts File.read("#{root}/#{command}.txt")
end
else
super
end
end
desc "init", "Generates a Gemfile into the current working directory"
long_desc <<-D
Init generates a default Gemfile in the current working directory. When adding a
Gemfile to a gem with a gemspec, the --gemspec option will automatically add each
dependency listed in the gemspec file to the newly created Gemfile.
D
method_option "gemspec", :type => :string, :banner => "Use the specified .gemspec to create the Gemfile"
def init
opts = options.dup
if File.exist?("Gemfile")
Bundler.ui.error "Gemfile already exists at #{Dir.pwd}/Gemfile"
exit 1
end
if opts[:gemspec]
gemspec = File.expand_path(opts[:gemspec])
unless File.exist?(gemspec)
Bundler.ui.error "Gem specification #{gemspec} doesn't exist"
exit 1
end
spec = Gem::Specification.load(gemspec)
puts "Writing new Gemfile to #{Dir.pwd}/Gemfile"
File.open('Gemfile', 'wb') do |file|
file << "# Generated from #{gemspec}\n"
file << spec.to_gemfile
end
else
puts "Writing new Gemfile to #{Dir.pwd}/Gemfile"
FileUtils.cp(File.expand_path('../templates/Gemfile', __FILE__), 'Gemfile')
end
end
desc "check", "Checks if the dependencies listed in Gemfile are satisfied by currently installed gems"
long_desc <<-D
Check searches the local machine for each of the gems requested in the Gemfile. If
all gems are found, Bundler prints a success message and exits with a status of 0.
If not, the first missing gem is listed and Bundler exits status 1.
D
method_option "gemfile", :type => :string, :banner =>
"Use the specified gemfile instead of Gemfile"
def check
ENV['BUNDLE_GEMFILE'] = File.expand_path(options[:gemfile]) if options[:gemfile]
begin
not_installed = Bundler.definition.missing_specs
rescue GemNotFound, VersionConflict
Bundler.ui.error "Your Gemfile's dependencies could not be satisfied"
Bundler.ui.warn "Install missing gems with `bundle install`"
exit 1
end
if not_installed.any?
Bundler.ui.error "The following gems are missing"
not_installed.each { |s| Bundler.ui.error " * #{s.name} (#{s.version})" }
Bundler.ui.warn "Install missing gems with `bundle install`"
exit 1
else
Bundler.load.lock
Bundler.ui.info "The Gemfile's dependencies are satisfied"
end
end
desc "install", "Install the current environment to the system"
long_desc <<-D
Install will install all of the gems in the current bundle, making them available
for use. In a freshly checked out repository, this command will give you the same
gem versions as the last person who updated the Gemfile and ran `bundle update`.
Passing [DIR] to install (e.g. vendor) will cause the unpacked gems to be installed
into the [DIR] directory rather than into system gems.
If the bundle has already been installed, bundler will tell you so and then exit.
D
method_option "without", :type => :array, :banner =>
"Exclude gems that are part of the specified named group."
method_option "disable-shared-gems", :type => :boolean, :banner =>
"This option is deprecated. Please do not use it."
method_option "gemfile", :type => :string, :banner =>
"Use the specified gemfile instead of Gemfile"
method_option "no-prune", :type => :boolean, :banner =>
"Don't remove stale gems from the cache."
method_option "no-cache", :type => :boolean, :banner =>
"Don't update the existing gem cache."
method_option "quiet", :type => :boolean, :banner =>
"Only output warnings and errors."
method_option "local", :type => :boolean, :banner =>
"Do not attempt to fetch gems remotely and use the gem cache instead"
method_option "binstubs", :type => :string, :lazy_default => "bin", :banner =>
"Generate bin stubs for bundled gems to ./bin"
method_option "path", :type => :string, :banner =>
"Specify a different path than the system default ($BUNDLE_PATH or $GEM_HOME). Bundler will remember this value for future installs on this machine"
method_option "system", :type => :boolean, :banner =>
"Install to the system location ($BUNDLE_PATH or $GEM_HOME) even if the bundle was previously installed somewhere else for this application"
method_option "frozen", :type => :boolean, :banner =>
"Do not allow the Gemfile.lock to be updated after this install"
method_option "deployment", :type => :boolean, :banner =>
"Install using defaults tuned for deployment environments"
method_option "production", :type => :boolean, :banner =>
"Deprecated, please use --deployment instead"
def install(path = nil)
opts = options.dup
opts[:without] ||= []
opts[:without].map! { |g| g.to_sym }
ENV['BUNDLE_GEMFILE'] = File.expand_path(opts[:gemfile]) if opts[:gemfile]
if opts[:production]
opts[:deployment] = true
Bundler.ui.warn "The --production option is deprecated, and will be removed in " \
"the final release of Bundler 1.0. Please use --deployment instead."
end
if (path || opts[:path] || opts[:deployment]) && opts[:system]
Bundler.ui.error "You have specified both a path to install your gems to, \n" \
"as well as --system. Please choose."
exit 1
end
if path && opts[:path]
Bundler.ui.error "You have specified a path via `bundle install #{path}` as well as\n" \
"by `bundle install --path #{options[:path]}`. These options are\n" \
"equivalent, so please use one or the other."
exit 1
end
if opts["disable-shared-gems"]
Bundler.ui.error "The disable-shared-gem option is no longer available.\n\n" \
"Instead, use `bundle install` to install to your system,\n" \
"or `bundle install --path path/to/gems` to install to an isolated\n" \
"location. Bundler will resolve relative paths relative to\n" \
"your `Gemfile`."
exit 1
end
if opts[:deployment] || opts[:frozen]
Bundler.settings[:frozen] = '1'
unless Bundler.default_lockfile.exist?
flag = opts[:deployment] ? '--deployment' : '--frozen'
raise ProductionError, "The #{flag} flag requires a Gemfile.lock. Please make " \
"sure you have checked your Gemfile.lock into version control " \
"before deploying."
end
if Bundler.root.join("vendor/cache").exist?
opts[:local] = true
end
end
# Can't use Bundler.settings for this because settings needs gemfile.dirname
Bundler.settings[:path] = nil if opts[:system]
Bundler.settings[:path] = "vendor/bundle" if opts[:deployment]
Bundler.settings[:path] = path if path
Bundler.settings[:path] = opts[:path] if opts[:path]
Bundler.settings[:bin] = opts["binstubs"] if opts[:binstubs]
Bundler.settings[:disable_shared_gems] = '1' if Bundler.settings[:path]
Bundler.settings.without = opts[:without] unless opts[:without].empty?
Bundler.ui.be_quiet! if opts[:quiet]
Installer.install(Bundler.root, Bundler.definition, opts)
Bundler.load.cache if Bundler.root.join("vendor/cache").exist?
Bundler.ui.confirm "Your bundle is complete! " +
"Use `bundle show [gemname]` to see where a bundled gem is installed."
Bundler.ui.confirm "\nYour bundle was installed to `#{Bundler.settings[:path]}`" if Bundler.settings[:path]
if path
Bundler.ui.warn "\nIf you meant to install it to your system, please remove the\n" \
"`#{path}` directory and run `bundle install --system`"
end
rescue GemNotFound => e
if Bundler.definition.no_sources?
Bundler.ui.warn "Your Gemfile doesn't have any sources. You can add one with a line like 'source :gemcutter'"
end
raise e
end
desc "update", "update the current environment"
long_desc <<-D
Update will install the newest versions of the gems listed in the Gemfile. Use
update when you have changed the Gemfile, or if you want to get the newest
possible versions of the gems in the bundle.
D
method_option "source", :type => :array, :banner => "Update a specific source (and all gems associated with it)"
def update(*gems)
sources = Array(options[:source])
if gems.empty? && sources.empty?
# We're doing a full update
Bundler.definition(true)
else
Bundler.definition(:gems => gems, :sources => sources)
end
Installer.install Bundler.root, Bundler.definition, "update" => true
Bundler.load.cache if Bundler.root.join("vendor/cache").exist?
Bundler.ui.confirm "Your bundle is updated! " +
"Use `bundle show [gemname]` to see where a bundled gem is installed."
end
desc "lock", "Locks the bundle to the current set of dependencies, including all child dependencies."
def lock
Bundler.ui.warn "Lock is deprecated. Your bundle is now locked whenever you run `bundle install`."
end
desc "unlock", "Unlock the bundle. This allows gem versions to be changed."
def unlock
Bundler.ui.warn "Unlock is deprecated. To update to newer gem versions, use `bundle update`."
end
desc "show [GEM]", "Shows all gems that are part of the bundle, or the path to a given gem"
long_desc <<-D
Show lists the names and versions of all gems that are required by your Gemfile.
Calling show with [GEM] will list the exact location of that gem on your machine.
D
def show(gem_name = nil)
Bundler.load.lock
if gem_name
Bundler.ui.info locate_gem(gem_name)
else
Bundler.ui.info "Gems included by the bundle:"
Bundler.load.specs.sort_by { |s| s.name }.each do |s|
Bundler.ui.info " * #{s.name} (#{s.version}#{s.git_version})"
end
end
end
map %w(list) => "show"
desc "cache", "Cache all the gems to vendor/cache", :hide => true
method_option "no-prune", :type => :boolean, :banner => "Don't remove stale gems from the cache."
def cache
Bundler.definition.resolve_with_cache!
Bundler.load.cache
Bundler.settings[:no_prune] = true if options[:no_prune]
Bundler.load.lock
rescue GemNotFound => e
Bundler.ui.error(e.message)
Bundler.ui.warn "Run `bundle install` to install missing gems."
exit 128
end
desc "package", "Locks and then caches all of the gems into vendor/cache"
method_option "no-prune", :type => :boolean, :banner => "Don't remove stale gems from the cache."
long_desc <<-D
The package command will copy the .gem files for every gem in the bundle into the
directory ./vendor/cache. If you then check that directory into your source
control repository, others who check out your source will be able to install the
bundle without having to download any additional gems.
D
def package
install
# TODO: move cache contents here now that all bundles are locked
Bundler.load.cache
end
map %w(pack) => :package
desc "exec", "Run the command in context of the bundle"
long_desc <<-D
Exec runs a command, providing it access to the gems in the bundle. While using
bundle exec you can require and call the bundled gems as if they were installed
into the systemwide Rubygems repository.
D
def exec(*)
ARGV.delete("exec")
Bundler.setup
begin
# Run
Kernel.exec(*ARGV)
rescue Errno::EACCES
Bundler.ui.error "bundler: not executable: #{ARGV.first}"
exit 126
rescue Errno::ENOENT
Bundler.ui.error "bundler: command not found: #{ARGV.first}"
Bundler.ui.warn "Install missing gem binaries with `bundle install`"
exit 127
end
end
desc "config NAME [VALUE]", "retrieve or set a configuration value"
long_desc <<-D
Retrieves or sets a configuration value. If only parameter is provided, retrieve the value. If two parameters are provided, replace the
existing value with the newly provided one.
By default, setting a configuration value sets it for all projects
on the machine.
If a global setting is superceded by local configuration, this command
will show the current value, as well as any superceded values and
where they were specified.
D
def config(name = nil, *args)
values = ARGV.dup
values.shift # remove config
values.shift # remove the name
unless name
Bundler.ui.confirm "Settings are listed in order of priority. The top value will be used.\n"
Bundler.settings.all.each do |setting|
Bundler.ui.confirm "#{setting}"
with_padding do
Bundler.settings.pretty_values_for(setting).each do |line|
Bundler.ui.info line
end
end
Bundler.ui.confirm ""
end
return
end
if values.empty?
Bundler.ui.confirm "Settings for `#{name}` in order of priority. The top value will be used"
with_padding do
Bundler.settings.pretty_values_for(name).each { |line| Bundler.ui.info line }
end
else
locations = Bundler.settings.locations(name)
if local = locations[:local]
Bundler.ui.info "Your application has set #{name} to #{local.inspect}. This will override the " \
"system value you are currently setting"
end
if global = locations[:global]
Bundler.ui.info "You are replacing the current system value of #{name}, which is currently #{global}"
end
if env = locations[:env]
Bundler.ui.info "You have set a bundler environment variable for #{env}. This will take precedence " \
"over the system value you are setting"
end
Bundler.settings.set_global(name, values.join(" "))
end
end
desc "open GEM", "Opens the source directory of the given bundled gem"
def open(name)
editor = [ENV['BUNDLER_EDITOR'], ENV['VISUAL'], ENV['EDITOR']].find{|e| !e.nil? && !e.empty? }
if editor
command = "#{editor} #{locate_gem(name)}"
success = system(command)
Bundler.ui.info "Could not run '#{command}'" unless success
else
Bundler.ui.info("To open a bundled gem, set $EDITOR or $BUNDLER_EDITOR")
end
end
desc "console [GROUP]", "Opens an IRB session with the bundle pre-loaded"
def console(group = nil)
require 'bundler/setup'
group ? Bundler.require(:default, group) : Bundler.require
ARGV.clear
require 'irb'
IRB.start
end
desc "version", "Prints the bundler's version information"
def version
Bundler.ui.info "Bundler version #{Bundler::VERSION}"
end
map %w(-v --version) => :version
desc 'viz', "Generates a visual dependency graph"
long_desc <<-D
Viz generates a PNG file of the current Gemfile as a dependency graph.
Viz requires the ruby-graphviz gem (and its dependencies).
The associated gems must also be installed via 'bundle install'.
D
method_option :file, :type => :string, :default => 'gem_graph.png', :aliases => '-f', :banner => "The name to use for the generated png file."
method_option :version, :type => :boolean, :default => false, :aliases => '-v', :banner => "Set to show each gem version."
method_option :requirements, :type => :boolean, :default => false, :aliases => '-r', :banner => "Set to show the version of each required dependency."
def viz
output_file = File.expand_path(options[:file])
graph = Graph.new( Bundler.load )
begin
graph.viz(output_file, options[:version], options[:requirements])
Bundler.ui.info output_file
rescue LoadError => e
Bundler.ui.error e.inspect
Bundler.ui.warn "Make sure you have the graphviz ruby gem. You can install it with:"
Bundler.ui.warn "`gem install ruby-graphviz`"
rescue StandardError => e
if e.message =~ /GraphViz not installed or dot not in PATH/
Bundler.ui.error e.message
Bundler.ui.warn "The ruby graphviz gem requires GraphViz to be installed"
else
raise
end
end
end
desc "gem GEM", "Creates a skeleton for creating a rubygem"
def gem(name)
target = File.join(Dir.pwd, name)
constant_name = name.split('_').map{|p| p.capitalize}.join
constant_name = constant_name.split('-').map{|q| q.capitalize}.join('::') if constant_name =~ /-/
constant_array = constant_name.split('::')
FileUtils.mkdir_p(File.join(target, 'lib', name))
opts = {:name => name, :constant_name => constant_name, :constant_array => constant_array}
template(File.join('newgem', 'Gemfile.tt'), File.join(target, 'Gemfile'), opts)
template(File.join('newgem', 'Rakefile.tt'), File.join(target, 'Rakefile'), opts)
template(File.join('newgem', 'gitignore.tt'), File.join(target, '.gitignore'), opts)
template(File.join('newgem', 'newgem.gemspec.tt'), File.join(target, "#{name}.gemspec"), opts)
template(File.join('newgem', 'lib', 'newgem.rb.tt'), File.join(target, 'lib', "#{name}.rb"), opts)
template(File.join('newgem', 'lib', 'newgem', 'version.rb.tt'), File.join(target, 'lib', name, 'version.rb'), opts)
Bundler.ui.info "Initializating git repo in #{target}"
Dir.chdir(target) { `git init`; `git add .` }
end
def self.source_root
File.expand_path(File.join(File.dirname(__FILE__), 'templates'))
end
private
def have_groff?
`which groff 2>#{NULL}`
$? == 0
end
def locate_gem(name)
spec = Bundler.load.specs.find{|s| s.name == name }
raise GemNotFound, "Could not find gem '#{name}' in the current bundle." unless spec
if spec.name == 'bundler'
return File.expand_path('../../../', __FILE__)
end
spec.full_gem_path
end
end
end
| 40.52834 | 154 | 0.631637 |
7a4856285045f75e118c45ce1f1d01d25487a527 | 5,299 | # encoding: UTF-8
module Texas
module Template
module Helper
#
# Basic helper methods for finding files and template handling
#
module Base
def default_search_paths
[
__path__,
path_with_templates_basename,
build_path,
build.root
].compact.uniq
end
# Returns a subdir with the current template's basename
#
# Example:
# # Given the following contents directory:
# #
# # contents/
# # section-1/
# # subsection-1-1.tex.erb
# # contents.tex.erb
# # section-1.tex-erb
# # section-2.tex-erb
# #
#
# # section-1.tex.erb
#
# <%= path_with_templates_basename %>
# # => "section-1"
#
# # section-2.tex.erb
#
# <%= path_with_templates_basename %>
# # => nil
#
def path_with_templates_basename
subdir = Template.basename @output_filename
File.directory?(subdir) ? subdir : nil
end
# Searches for the given file in +possible_paths+, also checking for +possible_exts+ as extensions
#
# Example:
# find_template_file(["figures", "some-chart"], [:pdf, :png], ["", "tmp", "tmp/build"])
# # => will check
# figures/some-chart.pdf
# figures/some-chart.png
# tmp/figures/some-chart.pdf
# tmp/figures/some-chart.png
# tmp/build/figures/some-chart.pdf
# tmp/build/figures/some-chart.png
#
def find_template_file(parts, possible_exts = [], possible_paths = default_search_paths)
possible_paths.each do |base|
(possible_exts + [""]).each do |ext|
filename = filename_for_find(parts, base, ext)
return filename if File.exist?(filename) && !File.directory?(filename)
end
end
nil
end
# Searches for the given file and raises an error if it is not found anywhere
#
def find_template_file!(parts, possible_exts = [], possible_paths = default_search_paths)
if filename = find_template_file(parts, possible_exts, possible_paths)
filename
else
raise TemplateNotFound.new(self, "File doesn't exists anywhere: #{parts.size > 1 ? parts : parts.first}")
end
end
def filename_for_find(parts, base, ext = nil)
path = [parts].flatten.map(&:to_s).map(&:dup)
path.unshift base.to_s
path.last << ".#{ext}" unless ext.empty?
File.join(*path)
end
# Renders a partial with the given locals.
#
# Example:
# <%= partial :some_partial, :some_value => 42 %>
#
def partial(name, locals = {})
render("_#{name}", locals)
end
# Renders one or more templates with the given locals.
#
# Example:
# <%= render :template => "some_template" %>
#
# # or by shorthand:
#
# <%= render :some_template %>
#
# # or render multiple templates with a single call:
#
# <%= render %w(some_template some_other_template) %>
#
def render(options, locals = {})
if [String, Symbol].include?(options.class)
options = {:templates => [options]}
end
if name = options[:template]
options[:templates] = [name]
end
if glob = options[:glob]
options[:templates] = templates_by_glob(glob)
end
options[:locals] = locals unless locals.empty?
render_as_array(options).join(options[:join].to_s)
end
def render_as_array(options)
options[:templates].map do |name|
template_file = find_template_file!([name], template_extensions)
Texas::Template.create(template_file, build).__run__(options[:locals])
end
end
# Returns all extensions the Template::Runner can handle.
#
# Example:
# template_extensions
# # => ["tex", "tex.erb", "md", "md.erb"]
#
def template_extensions
Texas::Template.known_extensions
end
# Returns all templates in the current template's path matching the given glob
#
# Example:
# # Given the following contents directory:
# #
# # contents/
# # _some_partial.tex.erb
# # contents.tex.erb
# # other_latex.tex
# # other_markdown.md.erb
# # some_template.tex.erb
# #
#
# templates_by_glob("*.tex.erb")
# # => ["_some_partial", "contents", "other_markdown", "some_template"]
#
def templates_by_glob(glob = "*")
files = Dir[File.join(__path__, glob)]
templates = files.map do |f|
Texas::Template.basename(f).gsub(__path__, '')
end
templates.uniq.sort
end
end
end
end
end
| 31.921687 | 117 | 0.519909 |
01ad38e94debc259e234377a41e3f40093a2542a | 77 | json.users @users do |user|
json.partial! 'v1/users/model', user: user
end
| 19.25 | 44 | 0.701299 |
038d1cf2bfd610e28a075e39f494bcf878b023f3 | 1,660 | #
# Be sure to run `pod lib lint SNDataBinding.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'SNDataBinding'
s.version = '0.1.1'
s.summary = 'Swift data binding using storyboard'
s.swift_version = '4.0'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/ahmedAlmasri/SNDataBinding'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'ahmedAlmasri' => '[email protected]' }
s.source = { :git => 'https://github.com/ahmedAlmasri/SNDataBinding.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'SNDataBinding/Classes/**/*'
# s.resource_bundles = {
# 'SNDataBinding' => ['SNDataBinding/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 37.727273 | 110 | 0.643373 |
878f746a9bd020728c247bd980b02dcf49b6a649 | 280 | class AddHabtmReviewersToCollection < ActiveRecord::Migration[6.0]
def change
remove_column :collections, :reviewers, :string
create_join_table :collections, :users, table_name: :reviewers do |t|
t.index [:collection_id, :user_id], unique: true
end
end
end
| 28 | 73 | 0.732143 |
e87ef3d89dabc63b057cbe1b4bf52ae32cae0d33 | 2,271 | class NodeAT10 < Formula
desc "Platform built on V8 to build network applications"
homepage "https://nodejs.org/"
url "https://nodejs.org/dist/v10.15.0/node-v10.15.0.tar.gz"
sha256 "dbe467e3dabb6854fcb0cd96e04082268cb1e313ce97a4b7100b2ed152b0a0ab"
bottle do
cellar :any
sha256 "5a411a2295f6040438ad96a572bcaeb587b168bd5b5414b298965293ec73be34" => :mojave
sha256 "b80b682c346c4bb9b398ed4a085d6d8ec5eac0755ba2a6714b6df448e46a0277" => :high_sierra
sha256 "555b29c365df7c2903e5f98d1802826194bfa48b0607299a3d70cb657578cb50" => :sierra
end
keg_only :versioned_formula
depends_on "pkg-config" => :build
depends_on "python@2" => :build
depends_on "icu4c"
# Per upstream - "Need g++ 4.8 or clang++ 3.4".
fails_with :clang if MacOS.version <= :snow_leopard
fails_with :gcc_4_2
("4.3".."4.7").each do |n|
fails_with :gcc => n
end
def install
system "./configure", "--prefix=#{prefix}", "--with-intl=system-icu"
system "make", "install"
end
def post_install
(lib/"node_modules/npm/npmrc").atomic_write("prefix = #{HOMEBREW_PREFIX}\n")
end
test do
path = testpath/"test.js"
path.write "console.log('hello');"
output = shell_output("#{bin}/node #{path}").strip
assert_equal "hello", output
output = shell_output("#{bin}/node -e 'console.log(new Intl.NumberFormat(\"en-EN\").format(1234.56))'").strip
assert_equal "1,234.56", output
output = shell_output("#{bin}/node -e 'console.log(new Intl.NumberFormat(\"de-DE\").format(1234.56))'").strip
assert_equal "1.234,56", output
# make sure npm can find node
ENV.prepend_path "PATH", opt_bin
ENV.delete "NVM_NODEJS_ORG_MIRROR"
assert_equal which("node"), opt_bin/"node"
assert_predicate bin/"npm", :exist?, "npm must exist"
assert_predicate bin/"npm", :executable?, "npm must be executable"
npm_args = ["-ddd", "--cache=#{HOMEBREW_CACHE}/npm_cache", "--build-from-source"]
system "#{bin}/npm", *npm_args, "install", "npm@latest"
system "#{bin}/npm", *npm_args, "install", "bignum"
assert_predicate bin/"npx", :exist?, "npx must exist"
assert_predicate bin/"npx", :executable?, "npx must be executable"
assert_match "< hello >", shell_output("#{bin}/npx cowsay hello")
end
end
| 36.629032 | 113 | 0.692206 |
4ac45058ed27a5181276ab2888d731550739c5f1 | 1,672 | #
# Be sure to run `pod lib lint ImoDynamicTableView.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# Any lines starting with a # are optional, but encouraged
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "ImoDynamicTableView"
s.version = "1.1.273"
s.summary = "ImoDynamicTableView is useful for working easy with tableviews and making tables with dynamic content based on cell sources (models)"
s.description = <<-DESC
ImoDynamicTableView is useful for working easy with tableviews and making tables with dynamic content based on cell sources (models), with ImoDynamicTableView class you can make easy your work with tableView content and dinamicaly manipulate them.
DESC
s.homepage = "https://github.com/imodeveloper/ImoDynamicTableView"
# s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "imodeveloper" => "[email protected]" }
s.source = { :git => "https://github.com/imodeveloper/ImoDynamicTableView.git", :tag => "1.1.273" }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.platform = :ios, '7.1'
s.requires_arc = true
s.source_files = 'Pod/Classes/*.*'
s.resources = 'Pod/Resources/*.*'
s.resource_bundles = {
'ImoDynamicTableView' => ['Pod/Assets/*.*']
}
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 42.871795 | 270 | 0.664474 |
91459b014e0143572055d6486c9af9ad67009f6e | 4,783 | require 'test_helper'
module ActiveModel
class Serializer
module Adapter
class PolymorphicTest < ActiveSupport::TestCase
setup do
@employee = Employee.new(id: 42, name: 'Zoop Zoopler', email: '[email protected]')
@picture = @employee.pictures.new(id: 1, title: 'headshot-1.jpg')
@picture.imageable = @employee
end
def serialization(resource, adapter = :attributes)
serializable(resource, adapter: adapter, serializer: PolymorphicBelongsToSerializer).as_json
end
def tag_serialization(adapter = :attributes)
tag = PolyTag.new(id: 1, phrase: 'foo')
tag.object_tags << ObjectTag.new(id: 1, poly_tag_id: 1, taggable: @employee)
tag.object_tags << ObjectTag.new(id: 5, poly_tag_id: 1, taggable: @picture)
serializable(tag, adapter: adapter, serializer: PolymorphicTagSerializer, include: '*.*').as_json
end
def test_attributes_serialization
expected =
{
id: 1,
title: 'headshot-1.jpg',
imageable: {
type: 'employee',
employee: {
id: 42,
name: 'Zoop Zoopler'
}
}
}
assert_equal(expected, serialization(@picture))
end
def test_attributes_serialization_without_polymorphic_association
expected =
{
id: 2,
title: 'headshot-2.jpg',
imageable: nil
}
simple_picture = Picture.new(id: 2, title: 'headshot-2.jpg')
assert_equal(expected, serialization(simple_picture))
end
def test_attributes_serialization_with_polymorphic_has_many
expected =
{
id: 1,
phrase: 'foo',
object_tags: [
{
id: 1,
taggable: {
type: 'employee',
employee: {
id: 42
}
}
},
{
id: 5,
taggable: {
type: 'picture',
picture: {
id: 1
}
}
}
]
}
assert_equal(expected, tag_serialization)
end
def test_json_serialization
expected =
{
picture: {
id: 1,
title: 'headshot-1.jpg',
imageable: {
type: 'employee',
employee: {
id: 42,
name: 'Zoop Zoopler'
}
}
}
}
assert_equal(expected, serialization(@picture, :json))
end
def test_json_serialization_without_polymorphic_association
expected =
{
picture: {
id: 2,
title: 'headshot-2.jpg',
imageable: nil
}
}
simple_picture = Picture.new(id: 2, title: 'headshot-2.jpg')
assert_equal(expected, serialization(simple_picture, :json))
end
def test_json_serialization_with_polymorphic_has_many
expected =
{
poly_tag: {
id: 1,
phrase: 'foo',
object_tags: [
{
id: 1,
taggable: {
type: 'employee',
employee: {
id: 42
}
}
},
{
id: 5,
taggable: {
type: 'picture',
picture: {
id: 1
}
}
}
]
}
}
assert_equal(expected, tag_serialization(:json))
end
def test_json_api_serialization
expected =
{
data: {
id: '1',
type: 'pictures',
attributes: {
title: 'headshot-1.jpg'
},
relationships: {
imageable: {
data: {
id: '42',
type: 'employees'
}
}
}
}
}
assert_equal(expected, serialization(@picture, :json_api))
end
end
end
end
end
| 27.80814 | 107 | 0.400794 |
38cfcac1e6be319bffa9d6dca15895b70103e139 | 1,452 | # frozen_string_literal: true
module RailsWorkflow
# Default error builder. Can be changed in configuration.
# Manages errors building
class ErrorBuilder
attr_accessor :exception, :context
def self.handle(exception, context)
new(exception, context).handle
end
def initialize(exception, context)
@exception = exception
@context = context
end
def handle
create_error(context)
process_parent(target)
end
private
def create_error(context)
error = RailsWorkflow::Error.create(
parent: target,
message: exception.message.first(250),
stack_trace: exception.backtrace.join("<br/>\n")
)
error.create_context(data: context)
end
# Changing custom process or operation classes to default classes.
# If we store error with a custom class and somebody will delete
# or rename this class - we will not be able to load error.
def target
@target ||= begin
parent = context[:parent]
if parent.is_a? RailsWorkflow::Operation
parent.becomes(RailsWorkflow::Operation)
elsif parent.is_a? RailsWorkflow::Process
parent.becomes(RailsWorkflow::Process)
end
end
end
def process_parent(subject)
return if subject.nil?
subject.status = Status::ERROR
subject.save!
process_parent(subject.parent) if subject.parent.present?
end
end
end
| 25.034483 | 70 | 0.666667 |
ff206a4358ddafcd08af515f8f0700af9d710e3e | 56 | Rails.application.routes.draw do
get 'dummy/show'
end
| 14 | 32 | 0.767857 |
1a1a690a58c6eb816d1f5d482e8e50b7a624c31b | 49,709 | module ActiveShipping
class UPS < Carrier
self.retry_safe = true
self.ssl_version = :TLSv1_2
cattr_accessor :default_options
cattr_reader :name
@@name = "UPS"
TEST_URL = 'https://wwwcie.ups.com'
LIVE_URL = 'https://onlinetools.ups.com'
RESOURCES = {
:rates => 'ups.app/xml/Rate',
:track => 'ups.app/xml/Track',
:ship_confirm => 'ups.app/xml/ShipConfirm',
:ship_accept => 'ups.app/xml/ShipAccept',
:delivery_dates => 'ups.app/xml/TimeInTransit',
:void => 'ups.app/xml/Void',
:validate_address => 'ups.app/xml/XAV'
}
PICKUP_CODES = HashWithIndifferentAccess.new(
:daily_pickup => "01",
:customer_counter => "03",
:one_time_pickup => "06",
:on_call_air => "07",
:suggested_retail_rates => "11",
:letter_center => "19",
:air_service_center => "20"
)
CUSTOMER_CLASSIFICATIONS = HashWithIndifferentAccess.new(
:wholesale => "01",
:occasional => "03",
:retail => "04"
)
# these are the defaults described in the UPS API docs,
# but they don't seem to apply them under all circumstances,
# so we need to take matters into our own hands
DEFAULT_CUSTOMER_CLASSIFICATIONS = Hash.new do |hash, key|
hash[key] = case key.to_sym
when :daily_pickup then :wholesale
when :customer_counter then :retail
else
:occasional
end
end
DEFAULT_SERVICES = {
"01" => "UPS Next Day Air",
"02" => "UPS Second Day Air",
"03" => "UPS Ground",
"07" => "UPS Worldwide Express",
"08" => "UPS Worldwide Expedited",
"11" => "UPS Standard",
"12" => "UPS Three-Day Select",
"13" => "UPS Next Day Air Saver",
"14" => "UPS Next Day Air Early A.M.",
"54" => "UPS Worldwide Express Plus",
"59" => "UPS Second Day Air A.M.",
"65" => "UPS Saver",
"82" => "UPS Today Standard",
"83" => "UPS Today Dedicated Courier",
"84" => "UPS Today Intercity",
"85" => "UPS Today Express",
"86" => "UPS Today Express Saver",
"92" => "UPS SurePost (USPS) < 1lb",
"93" => "UPS SurePost (USPS) > 1lb",
"94" => "UPS SurePost (USPS) BPM",
"95" => "UPS SurePost (USPS) Media",
}
CANADA_ORIGIN_SERVICES = {
"01" => "UPS Express",
"02" => "UPS Expedited",
"14" => "UPS Express Early A.M."
}
MEXICO_ORIGIN_SERVICES = {
"07" => "UPS Express",
"08" => "UPS Expedited",
"54" => "UPS Express Plus"
}
EU_ORIGIN_SERVICES = {
"07" => "UPS Express",
"08" => "UPS Expedited"
}
OTHER_NON_US_ORIGIN_SERVICES = {
"07" => "UPS Express"
}
RETURN_SERVICE_CODES = {
"2" => "UPS Print and Mail (PNM)",
"3" => "UPS Return Service 1-Attempt (RS1)",
"5" => "UPS Return Service 3-Attempt (RS3)",
"8" => "UPS Electronic Return Label (ERL)",
"9" => "UPS Print Return Label (PRL)",
"10" => "UPS Exchange Print Return Label",
"11" => "UPS Pack & Collect Service 1-Attempt Box 1",
"12" => "UPS Pack & Collect Service 1-Attempt Box 2",
"13" => "UPS Pack & Collect Service 1-Attempt Box 3",
"14" => "UPS Pack & Collect Service 1-Attempt Box 4",
"15" => "UPS Pack & Collect Service 1-Attempt Box 5",
"16" => "UPS Pack & Collect Service 3-Attempt Box 1",
"17" => "UPS Pack & Collect Service 3-Attempt Box 2",
"18" => "UPS Pack & Collect Service 3-Attempt Box 3",
"19" => "UPS Pack & Collect Service 3-Attempt Box 4",
"20" => "UPS Pack & Collect Service 3-Attempt Box 5",
}
TRACKING_STATUS_CODES = HashWithIndifferentAccess.new(
'I' => :in_transit,
'D' => :delivered,
'X' => :exception,
'P' => :pickup,
'M' => :manifest_pickup
)
# From http://en.wikipedia.org/w/index.php?title=European_Union&oldid=174718707 (Current as of November 30, 2007)
EU_COUNTRY_CODES = %w(GB AT BE BG CY CZ DK EE FI FR DE GR HU IE IT LV LT LU MT NL PL PT RO SK SI ES SE)
US_TERRITORIES_TREATED_AS_COUNTRIES = %w(AS FM GU MH MP PW PR VI)
IMPERIAL_COUNTRIES = %w(US LR MM)
COUNTRY_MAPPING = {
'XK' => 'KV'
}.freeze
DEFAULT_SERVICE_NAME_TO_CODE = Hash[UPS::DEFAULT_SERVICES.to_a.map(&:reverse)]
DEFAULT_SERVICE_NAME_TO_CODE['UPS 2nd Day Air'] = "02"
DEFAULT_SERVICE_NAME_TO_CODE['UPS 3 Day Select'] = "12"
DEFAULT_SERVICE_NAME_TO_CODE['UPS Next Day Air Early'] = "14"
SHIPMENT_DELIVERY_CONFIRMATION_CODES = {
delivery_confirmation_signature_required: 1,
delivery_confirmation_adult_signature_required: 2
}
PACKAGE_DELIVERY_CONFIRMATION_CODES = {
delivery_confirmation: 1,
delivery_confirmation_signature_required: 2,
delivery_confirmation_adult_signature_required: 3,
usps_delivery_confirmation: 4
}
def requirements
[:key, :login, :password]
end
def find_rates(origin, destination, packages, options = {})
origin, destination = upsified_location(origin), upsified_location(destination)
options = @options.merge(options)
packages = Array(packages)
access_request = build_access_request
rate_request = build_rate_request(origin, destination, packages, options)
response = commit(:rates, save_request(access_request + rate_request), options[:test])
parse_rate_response(origin, destination, packages, response, options)
end
# Retrieves tracking information for a previous shipment
#
# @note Override with whatever you need to get a shipping label
#
# @param tracking_number [String] The unique identifier of the shipment to track.
# @param options [Hash] Carrier-specific parameters.
# @option options [Boolean] :mail_innovations Set this to true to track a Mail Innovations Package
# @return [ActiveShipping::TrackingResponse] The response from the carrier. This
# response should a list of shipment tracking events if successful.
def find_tracking_info(tracking_number, options = {})
options = @options.merge(options)
access_request = build_access_request
tracking_request = build_tracking_request(tracking_number, options)
response = commit(:track, save_request(access_request + tracking_request), options[:test])
parse_tracking_response(response, options)
end
def create_shipment(origin, destination, packages, options = {})
options = @options.merge(options)
packages = Array(packages)
access_request = build_access_request
# STEP 1: Confirm. Validation step, important for verifying price.
confirm_request = build_shipment_request(origin, destination, packages, options)
logger.debug(confirm_request) if logger
confirm_response = commit(:ship_confirm, save_request(access_request + confirm_request), (options[:test] || false))
logger.debug(confirm_response) if logger
# ... now, get the digest, it's needed to get the label. In theory,
# one could make decisions based on the price or some such to avoid
# surprises. This also has *no* error handling yet.
xml = parse_ship_confirm(confirm_response)
success = response_success?(xml)
message = response_message(xml)
raise ActiveShipping::ResponseContentError, StandardError.new(message) unless success
digest = response_digest(xml)
# STEP 2: Accept. Use shipment digest in first response to get the actual label.
accept_request = build_accept_request(digest, options)
logger.debug(accept_request) if logger
accept_response = commit(:ship_accept, save_request(access_request + accept_request), (options[:test] || false))
logger.debug(accept_response) if logger
# ...finally, build a map from the response that contains
# the label data and tracking information.
parse_ship_accept(accept_response)
end
def get_delivery_date_estimates(origin, destination, packages, pickup_date=Date.current, options = {})
origin, destination = upsified_location(origin), upsified_location(destination)
options = @options.merge(options)
packages = Array(packages)
access_request = build_access_request
dates_request = build_delivery_dates_request(origin, destination, packages, pickup_date, options)
response = commit(:delivery_dates, save_request(access_request + dates_request), (options[:test] || false))
parse_delivery_dates_response(origin, destination, packages, response, options)
end
def void_shipment(tracking, options={})
options = @options.merge(options)
access_request = build_access_request
void_request = build_void_request(tracking)
response = commit(:void, save_request(access_request + void_request), (options[:test] || false))
parse_void_response(response, options)
end
def maximum_address_field_length
# http://www.ups.com/worldshiphelp/WS12/ENU/AppHelp/CONNECT/Shipment_Data_Field_Descriptions.htm
35
end
# Validates a location with the Street Level Validation service
#
# @param location [Location] The Location to validate
# @return [ActiveShipping::AddressValidationResponse] The response from the validation endpoint. This
# response will determine if the given address is valid or not, its commercial/residential classification,
# and the cleaned-up address and/or potential candidate addresses if the passed location can't be found
def validate_address(location, options = {})
location = upsified_location(location)
options = @options.merge(options)
access_request = build_access_request
address_validation_request = build_address_validation_request(location, options)
response = commit(:validate_address, save_request(access_request + address_validation_request), options[:test])
parse_address_validation_response(location, response, options)
end
protected
def upsified_location(location)
if location.country_code == 'US' && US_TERRITORIES_TREATED_AS_COUNTRIES.include?(location.state)
atts = {:country => location.state}
[:zip, :city, :address1, :address2, :address3, :phone, :fax, :address_type].each do |att|
atts[att] = location.send(att)
end
Location.new(atts)
else
location
end
end
def build_access_request
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.AccessRequest do
xml.AccessLicenseNumber(@options[:key])
xml.UserId(@options[:login])
xml.Password(@options[:password])
end
end
xml_builder.to_xml
end
# Builds an XML node to request UPS shipping rates for the given packages
#
# @param origin [ActiveShipping::Location] Where the shipment will originate from
# @param destination [ActiveShipping::Location] Where the package will go
# @param packages [Array<ActiveShipping::Package>] The list of packages that will
# be in the shipment
# @options options [Hash] rate-specific options
# @return [ActiveShipping::RateResponse] The response from the UPS, which
# includes 0 or more rate estimates for different shipping products
#
# options:
# * service: name of the service
# * pickup_type: symbol for PICKUP_CODES
# * customer_classification: symbol for CUSTOMER_CLASSIFICATIONS
# * shipper: who is sending the package and where it should be returned
# if it is undeliverable.
# * imperial: if truthy, measurements will use the metric system
# * negotiated_rates: if truthy, negotiated rates will be requested from
# UPS. Only valid if shipper account has negotiated rates.
def build_rate_request(origin, destination, packages, options = {})
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.RatingServiceSelectionRequest do
xml.Request do
xml.RequestAction('Rate')
xml.RequestOption((options[:service].nil?) ? 'Shop' : 'Rate')
end
pickup_type = options[:pickup_type] || :daily_pickup
xml.PickupType do
xml.Code(PICKUP_CODES[pickup_type])
# not implemented: PickupType/PickupDetails element
end
cc = options[:customer_classification] || DEFAULT_CUSTOMER_CLASSIFICATIONS[pickup_type]
xml.CustomerClassification do
xml.Code(CUSTOMER_CLASSIFICATIONS[cc])
end
xml.Shipment do
# not implemented: Shipment/Description element
build_location_node(xml, 'Shipper', (options[:shipper] || origin), options)
build_location_node(xml, 'ShipTo', destination, options)
build_location_node(xml, 'ShipFrom', origin, options) if options[:shipper] && options[:shipper] != origin
# not implemented: * Shipment/ShipmentWeight element
# * Shipment/ReferenceNumber element
# * Shipment/Service element
# * Shipment/PickupDate element
# * Shipment/ScheduledDeliveryDate element
# * Shipment/ScheduledDeliveryTime element
# * Shipment/AlternateDeliveryTime element
# * Shipment/DocumentsOnly element
unless options[:service].nil?
xml.Service do
xml.Code(options[:service])
end
end
Array(packages).each do |package|
options[:imperial] ||= IMPERIAL_COUNTRIES.include?(origin.country_code(:alpha2))
build_package_node(xml, package, options)
end
# not implemented: * Shipment/ShipmentServiceOptions element
if options[:negotiated_rates]
xml.RateInformation do
xml.NegotiatedRatesIndicator
end
end
end
end
end
xml_builder.to_xml
end
# Build XML node to request a shipping label for the given packages.
#
# options:
# * origin_account: account number for the shipper
# * customer_context: a "guid like substance" -- according to UPS
# * shipper: who is sending the package and where it should be returned
# if it is undeliverable.
# * ship_from: where the package is picked up.
# * service_code: default to '03'
# * saturday_delivery: any truthy value causes this element to exist
# * optional_processing: 'validate' (blank) or 'nonvalidate' or blank
# * paperless_invoice: set to truthy if using paperless invoice to ship internationally
# * terms_of_shipment: used with paperless invoice to specify who pays duties and taxes
# * reference_numbers: Array of hashes with :value => a reference number value and optionally :code => reference number type
# * prepay: if truthy the shipper will be bill immediatly. Otherwise the shipper is billed when the label is used.
# * negotiated_rates: if truthy negotiated rates will be requested from ups. Only valid if shipper account has negotiated rates.
# * delivery_confirmation: Can be set to any key from SHIPMENT_DELIVERY_CONFIRMATION_CODES. Can also be set on package level via package.options
# * bill_third_party: When truthy, bill an account other than the shipper's. Specified by billing_(account, zip and country)
def build_shipment_request(origin, destination, packages, options={})
packages = Array(packages)
shipper = options[:shipper] || origin
options[:international] = origin.country.name != destination.country.name
options[:imperial] ||= IMPERIAL_COUNTRIES.include?(shipper.country_code(:alpha2))
options[:return] = options[:return_service_code].present?
options[:reason_for_export] ||= ("RETURN" if options[:return])
if allow_package_level_reference_numbers(origin, destination)
if options[:reference_numbers]
packages.each do |package|
package.options[:reference_numbers] = options[:reference_numbers]
end
end
options[:reference_numbers] = []
end
handle_delivery_confirmation_options(origin, destination, packages, options)
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.ShipmentConfirmRequest do
xml.Request do
xml.RequestAction('ShipConfirm')
# Required element cotnrols level of address validation.
xml.RequestOption(options[:optional_processing] || 'validate')
# Optional element to identify transactions between client and server.
if options[:customer_context]
xml.TransactionReference do
xml.CustomerContext(options[:customer_context])
end
end
end
xml.Shipment do
xml.Service do
xml.Code(options[:service_code] || '03')
end
build_location_node(xml, 'ShipTo', destination, options)
build_location_node(xml, 'ShipFrom', origin, options)
# Required element. The company whose account is responsible for the label(s).
build_location_node(xml, 'Shipper', shipper, options)
if options[:saturday_delivery]
xml.ShipmentServiceOptions do
xml.SaturdayDelivery
end
end
if options[:negotiated_rates]
xml.RateInformation do
xml.NegotiatedRatesIndicator
end
end
Array(options[:reference_numbers]).each do |reference_num_info|
xml.ReferenceNumber do
xml.Code(reference_num_info[:code] || "")
xml.Value(reference_num_info[:value])
end
end
if options[:prepay]
xml.PaymentInformation do
xml.Prepaid do
build_billing_info_node(xml, options)
end
end
else
xml.ItemizedPaymentInformation do
xml.ShipmentCharge do
# Type '01' means 'Transportation'
# This node specifies who will be billed for transportation.
xml.Type('01')
build_billing_info_node(xml, options)
end
if options[:terms_of_shipment] == 'DDP' && options[:international]
# DDP stands for delivery duty paid and means the shipper will cover duties and taxes
# Otherwise UPS will charge the receiver
xml.ShipmentCharge do
xml.Type('02') # Type '02' means 'Duties and Taxes'
build_billing_info_node(xml, options.merge(bill_to_consignee: true))
end
end
end
end
if options[:international]
unless options[:return]
build_location_node(xml, 'SoldTo', options[:sold_to] || destination, options)
end
if origin.country_code(:alpha2) == 'US' && ['CA', 'PR'].include?(destination.country_code(:alpha2))
# Required for shipments from the US to Puerto Rico or Canada
xml.InvoiceLineTotal do
total_value = packages.inject(0) {|sum, package| sum + (package.value || 0)}
xml.MonetaryValue(total_value)
end
end
contents_description = packages.map {|p| p.options[:description]}.compact.join(',')
unless contents_description.empty?
xml.Description(contents_description)
end
end
if options[:return]
xml.ReturnService do
xml.Code(options[:return_service_code])
end
end
xml.ShipmentServiceOptions do
if delivery_confirmation = options[:delivery_confirmation]
xml.DeliveryConfirmation do
xml.DCISType(SHIPMENT_DELIVERY_CONFIRMATION_CODES[delivery_confirmation])
end
end
if options[:international]
build_international_forms(xml, origin, destination, packages, options)
end
end
# A request may specify multiple packages.
packages.each do |package|
build_package_node(xml, package, options)
end
end
# Supported label formats:
# GIF, EPL, ZPL, STARPL and SPL
label_format = options[:label_format] ? options[:label_format].upcase : 'GIF'
label_size = options[:label_size] ? options[:label_size] : [4, 6]
xml.LabelSpecification do
xml.LabelStockSize do
xml.Height(label_size[0])
xml.Width(label_size[1])
end
xml.LabelPrintMethod do
xml.Code(label_format)
end
# API requires these only if returning a GIF formated label
if label_format == 'GIF'
xml.HTTPUserAgent('Mozilla/4.5')
xml.LabelImageFormat(label_format) do
xml.Code(label_format)
end
end
end
end
end
xml_builder.to_xml
end
def build_delivery_dates_request(origin, destination, packages, pickup_date, options={})
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.TimeInTransitRequest do
xml.Request do
xml.RequestAction('TimeInTransit')
end
build_address_artifact_format_location(xml, 'TransitFrom', origin)
build_address_artifact_format_location(xml, 'TransitTo', destination)
xml.ShipmentWeight do
xml.UnitOfMeasurement do
xml.Code(options[:imperial] ? 'LBS' : 'KGS')
end
value = packages.inject(0) do |sum, package|
sum + (options[:imperial] ? package.lbs.to_f : package.kgs.to_f )
end
xml.Weight([value.round(3), 0.1].max)
end
if packages.any? {|package| package.value.present?}
xml.InvoiceLineTotal do
xml.CurrencyCode('USD')
total_value = packages.inject(0) {|sum, package| sum + package.value.to_i}
xml.MonetaryValue(total_value)
end
end
xml.PickupDate(pickup_date.strftime('%Y%m%d'))
end
end
xml_builder.to_xml
end
def build_void_request(tracking)
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.VoidShipmentRequest do
xml.Request do
xml.RequestAction('Void')
end
xml.ShipmentIdentificationNumber(tracking)
end
end
xml_builder.to_xml
end
def build_international_forms(xml, origin, destination, packages, options)
if options[:paperless_invoice]
xml.InternationalForms do
xml.FormType('01') # 01 is "Invoice"
xml.InvoiceDate(options[:invoice_date] || Date.today.strftime('%Y%m%d'))
xml.ReasonForExport(options[:reason_for_export] || 'SALE')
xml.CurrencyCode(options[:currency_code] || 'USD')
if options[:terms_of_shipment]
xml.TermsOfShipment(options[:terms_of_shipment])
end
packages.each do |package|
xml.Product do |xml|
xml.Description(package.options[:description])
xml.CommodityCode(package.options[:commodity_code])
xml.OriginCountryCode(origin.country_code(:alpha2))
xml.Unit do |xml|
xml.Value(package.value / (package.options[:item_count] || 1))
xml.Number((package.options[:item_count] || 1))
xml.UnitOfMeasurement do |xml|
# NMB = number. You can specify units in barrels, boxes, etc. Codes are in the api docs.
xml.Code(package.options[:unit_of_item_count] || 'NMB')
end
end
end
end
end
end
end
def build_accept_request(digest, options = {})
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.ShipmentAcceptRequest do
xml.Request do
xml.RequestAction('ShipAccept')
end
xml.ShipmentDigest(digest)
end
end
xml_builder.to_xml
end
def build_tracking_request(tracking_number, options = {})
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.TrackRequest do
xml.TrackingOption(options[:tracking_option]) if options[:tracking_option]
xml.Request do
xml.RequestAction('Track')
xml.RequestOption('1')
end
xml.TrackingNumber(tracking_number.to_s)
xml.TrackingOption('03') if options[:mail_innovations]
end
end
xml_builder.to_xml
end
def build_location_node(xml, name, location, options = {})
# not implemented: * Shipment/Shipper/Name element
# * Shipment/(ShipTo|ShipFrom)/CompanyName element
# * Shipment/(Shipper|ShipTo|ShipFrom)/AttentionName element
# * Shipment/(Shipper|ShipTo|ShipFrom)/TaxIdentificationNumber element
xml.public_send(name) do
if shipper_name = (location.name || location.company_name || options[:origin_name])
xml.Name(shipper_name)
end
xml.PhoneNumber(location.phone.gsub(/[^\d]/, '')) unless location.phone.blank?
xml.FaxNumber(location.fax.gsub(/[^\d]/, '')) unless location.fax.blank?
if name == 'Shipper' and (origin_account = options[:origin_account] || @options[:origin_account])
xml.ShipperNumber(origin_account)
elsif name == 'ShipTo' and (destination_account = options[:destination_account] || @options[:destination_account])
xml.ShipperAssignedIdentificationNumber(destination_account)
end
if name = (location.company_name || location.name || options[:origin_name])
xml.CompanyName(name)
end
if phone = location.phone
xml.PhoneNumber(phone)
end
if attn = location.name
xml.AttentionName(attn)
end
xml.Address do
xml.AddressLine1(location.address1) unless location.address1.blank?
xml.AddressLine2(location.address2) unless location.address2.blank?
xml.AddressLine3(location.address3) unless location.address3.blank?
xml.City(location.city) unless location.city.blank?
xml.StateProvinceCode(location.province) unless location.province.blank?
# StateProvinceCode required for negotiated rates but not otherwise, for some reason
xml.PostalCode(location.postal_code) unless location.postal_code.blank?
xml.CountryCode(mapped_country_code(location.country_code(:alpha2))) unless location.country_code(:alpha2).blank?
xml.ResidentialAddressIndicator(true) unless location.commercial? # the default should be that UPS returns residential rates for destinations that it doesn't know about
# not implemented: Shipment/(Shipper|ShipTo|ShipFrom)/Address/ResidentialAddressIndicator element
end
end
end
def build_address_artifact_format_location(xml, name, location)
xml.public_send(name) do
xml.AddressArtifactFormat do
xml.PoliticalDivision2(location.city)
xml.PoliticalDivision1(location.province)
xml.CountryCode(mapped_country_code(location.country_code(:alpha2)))
xml.PostcodePrimaryLow(location.postal_code)
xml.ResidentialAddressIndicator(true) unless location.commercial?
end
end
end
def build_package_node(xml, package, options = {})
xml.Package do
# not implemented: * Shipment/Package/PackagingType element
#return requires description
if options[:return]
contents_description = package.options[:description]
xml.Description(contents_description) if contents_description
end
xml.PackagingType do
xml.Code('02')
end
xml.Dimensions do
xml.UnitOfMeasurement do
xml.Code(options[:imperial] ? 'IN' : 'CM')
end
[:length, :width, :height].each do |axis|
value = ((options[:imperial] ? package.inches(axis) : package.cm(axis)).to_f * 1000).round / 1000.0 # 3 decimals
xml.public_send(axis.to_s.capitalize, [value, 0.1].max)
end
end
xml.PackageWeight do
if (options[:service] || options[:service_code]) == DEFAULT_SERVICE_NAME_TO_CODE["UPS SurePost (USPS) < 1lb"]
# SurePost < 1lb uses OZS, not LBS
code = options[:imperial] ? 'OZS' : 'KGS'
weight = options[:imperial] ? package.oz : package.kgs
else
code = options[:imperial] ? 'LBS' : 'KGS'
weight = options[:imperial] ? package.lbs : package.kgs
end
xml.UnitOfMeasurement do
xml.Code(code)
end
value = ((weight).to_f * 1000).round / 1000.0 # 3 decimals
xml.Weight([value, 0.1].max)
end
Array(package.options[:reference_numbers]).each do |reference_number_info|
xml.ReferenceNumber do
xml.Code(reference_number_info[:code] || "")
xml.Value(reference_number_info[:value])
end
end
xml.PackageServiceOptions do
if delivery_confirmation = package.options[:delivery_confirmation]
xml.DeliveryConfirmation do
xml.DCISType(PACKAGE_DELIVERY_CONFIRMATION_CODES[delivery_confirmation])
end
end
if dry_ice = package.options[:dry_ice]
xml.DryIce do
xml.RegulationSet(dry_ice[:regulation_set] || 'CFR')
xml.DryIceWeight do
xml.UnitOfMeasurement do
xml.Code(options[:imperial] ? 'LBS' : 'KGS')
end
# Cannot be more than package weight.
# Should be more than 0.0.
# Valid characters are 0-9 and .(Decimal point).
# Limit to 1 digit after the decimal. The maximum length
# of the field is 5 including . and can hold up
# to 1 decimal place.
xml.Weight(dry_ice[:weight])
end
end
end
if package_value = package.options[:insured_value]
xml.InsuredValue do
xml.CurrencyCode(package.options[:currency] || 'USD')
xml.MonetaryValue(package_value.to_f)
end
end
end
# not implemented: * Shipment/Package/LargePackageIndicator element
# * Shipment/Package/AdditionalHandling element
end
end
def build_billing_info_node(xml, options={})
if options[:bill_third_party]
xml.BillThirdParty do
node_type = options[:bill_to_consignee] ? :BillThirdPartyConsignee : :BillThirdPartyShipper
xml.public_send(node_type) do
xml.AccountNumber(options[:billing_account])
xml.ThirdParty do
xml.Address do
xml.PostalCode(options[:billing_zip])
xml.CountryCode(mapped_country_code(options[:billing_country]))
end
end
end
end
else
xml.BillShipper do
xml.AccountNumber(options[:origin_account])
end
end
end
def build_document(xml, expected_root_tag)
document = Nokogiri.XML(xml)
if document.root.nil? || document.root.name != expected_root_tag
raise ActiveShipping::ResponseContentError.new(StandardError.new('Invalid document'), xml)
end
document
rescue Nokogiri::XML::SyntaxError => e
raise ActiveShipping::ResponseContentError.new(e, xml)
end
def parse_rate_response(origin, destination, packages, response, options = {})
xml = build_document(response, 'RatingServiceSelectionResponse')
success = response_success?(xml)
message = response_message(xml)
if success
rate_estimates = xml.root.css('> RatedShipment').map do |rated_shipment|
service_code = rated_shipment.at('Service/Code').text
days_to_delivery = rated_shipment.at('GuaranteedDaysToDelivery').text.to_i
days_to_delivery = nil if days_to_delivery == 0
warning_messages = rate_warning_messages(rated_shipment)
RateEstimate.new(origin, destination, @@name, service_name_for(origin, service_code),
:total_price => rated_shipment.at('TotalCharges/MonetaryValue').text.to_f,
:insurance_price => rated_shipment.at('ServiceOptionsCharges/MonetaryValue').text.to_f,
:currency => rated_shipment.at('TotalCharges/CurrencyCode').text,
:service_code => service_code,
:packages => packages,
:delivery_range => [timestamp_from_business_day(days_to_delivery)],
:negotiated_rate => rated_shipment.at('NegotiatedRates/NetSummaryCharges/GrandTotal/MonetaryValue').try(:text).to_f,
:messages => warning_messages
)
end
end
RateResponse.new(success, message, Hash.from_xml(response).values.first, :rates => rate_estimates, :xml => response, :request => last_request)
end
def parse_tracking_response(response, options = {})
xml = build_document(response, 'TrackResponse')
success = response_success?(xml)
message = response_message(xml)
if success
delivery_signature = nil
exception_event, scheduled_delivery_date, actual_delivery_date = nil
delivered, exception = false
shipment_events = []
first_shipment = xml.root.at('Shipment')
first_package = first_shipment.at('Package')
tracking_number = first_shipment.at_xpath('ShipmentIdentificationNumber | Package/TrackingNumber').text
# Build status hash
status_nodes = first_package.css('Activity > Status > StatusType')
if status_nodes.present?
# Prefer a delivery node
status_node = status_nodes.detect { |x| x.at('Code').text == 'D' }
status_node ||= status_nodes.first
status_code = status_node.at('Code').try(:text)
status_description = status_node.at('Description').try(:text)
status = TRACKING_STATUS_CODES[status_code]
if status_description =~ /out.*delivery/i
status = :out_for_delivery
end
end
origin, destination = %w(Shipper ShipTo).map do |location|
location_from_address_node(first_shipment.at("#{location}/Address"))
end
# Get scheduled delivery date
unless status == :delivered
scheduled_delivery_date_node = first_shipment.at('ScheduledDeliveryDate')
scheduled_delivery_date_node ||= first_shipment.at('RescheduledDeliveryDate')
if scheduled_delivery_date_node
scheduled_delivery_date = parse_ups_datetime(
:date => scheduled_delivery_date_node,
:time => nil
)
end
end
activities = first_package.css('> Activity')
unless activities.empty?
shipment_events = activities.map do |activity|
description = activity.at('Status/StatusType/Description').try(:text)
type_code = activity.at('Status/StatusType/Code').try(:text)
zoneless_time = parse_ups_datetime(:time => activity.at('Time'), :date => activity.at('Date'))
location = location_from_address_node(activity.at('ActivityLocation/Address'))
ShipmentEvent.new(description, zoneless_time, location, description, type_code)
end
shipment_events = shipment_events.sort_by(&:time)
# UPS will sometimes archive a shipment, stripping all shipment activity except for the delivery
# event (see test/fixtures/xml/delivered_shipment_without_events_tracking_response.xml for an example).
# This adds an origin event to the shipment activity in such cases.
if origin && !(shipment_events.count == 1 && status == :delivered)
first_event = shipment_events[0]
origin_event = ShipmentEvent.new(first_event.name, first_event.time, origin, first_event.message, first_event.type_code)
if within_same_area?(origin, first_event.location)
shipment_events[0] = origin_event
else
shipment_events.unshift(origin_event)
end
end
# Has the shipment been delivered?
if status == :delivered
delivered_activity = activities.first
delivery_signature = delivered_activity.at('ActivityLocation/SignedForByName').try(:text)
if delivered_activity.at('Status/StatusType/Code').text == 'D'
actual_delivery_date = parse_ups_datetime(:date => delivered_activity.at('Date'), :time => delivered_activity.at('Time'))
end
unless destination
destination = shipment_events[-1].location
end
shipment_events[-1] = ShipmentEvent.new(shipment_events.last.name, shipment_events.last.time, destination, shipment_events.last.message, shipment_events.last.type_code)
end
end
end
TrackingResponse.new(success, message, Hash.from_xml(response).values.first,
:carrier => @@name,
:xml => response,
:request => last_request,
:status => status,
:status_code => status_code,
:status_description => status_description,
:delivery_signature => delivery_signature,
:scheduled_delivery_date => scheduled_delivery_date,
:actual_delivery_date => actual_delivery_date,
:shipment_events => shipment_events,
:delivered => delivered,
:exception => exception,
:exception_event => exception_event,
:origin => origin,
:destination => destination,
:tracking_number => tracking_number)
end
def parse_delivery_dates_response(origin, destination, packages, response, options={})
xml = build_document(response, 'TimeInTransitResponse')
success = response_success?(xml)
message = response_message(xml)
delivery_estimates = []
if success
xml.css('ServiceSummary').each do |service_summary|
# Translate the Time in Transit Codes to the service codes used elsewhere
service_name = service_summary.at('Service/Description').text
service_code = UPS::DEFAULT_SERVICE_NAME_TO_CODE[service_name]
date = Date.strptime(service_summary.at('EstimatedArrival/Date').text, '%Y-%m-%d')
business_transit_days = service_summary.at('EstimatedArrival/BusinessTransitDays').text.to_i
delivery_estimates << DeliveryDateEstimate.new(origin, destination, self.class.class_variable_get(:@@name),
service_name,
:service_code => service_code,
:guaranteed => service_summary.at('Guaranteed/Code').text == 'Y',
:date => date,
:business_transit_days => business_transit_days)
end
end
response = DeliveryDateEstimatesResponse.new(success, message, Hash.from_xml(response).values.first, :delivery_estimates => delivery_estimates, :xml => response, :request => last_request)
end
def parse_void_response(response, options={})
xml = build_document(response, 'VoidShipmentResponse')
success = response_success?(xml)
message = response_message(xml)
if success
true
else
raise ResponseError.new("Void shipment failed with message: #{message}")
end
end
def build_address_validation_request(location, options = {})
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.AddressValidationRequest do
xml.Request do
xml.RequestAction('XAV')
xml.RequestOption('3')
if options[:customer_context]
xml.TransactionReference do
xml.CustomerContext(options[:customer_context])
xml.XpciVersion("1.0")
end
end
end
xml.AddressKeyFormat do
xml.AddressLine(location.address1)
if location.address2.present?
xml.AddressLine(location.address2)
end
xml.PoliticalDivision2(location.city)
xml.PoliticalDivision1(location.state)
xml.PostcodePrimaryLow(location.postal_code)
xml.CountryCode(mapped_country_code(location.country_code))
end
end
end
xml_builder.to_xml
end
def parse_address_validation_response(address, response, options={})
xml = build_document(response, 'AddressValidationResponse')
success = response_success?(xml)
message = response_message(xml)
validity = nil
classification_code = nil
classification_description = nil
addresses = []
if success
if xml.at('AddressClassification/Code').present?
classification_code = xml.at('AddressClassification/Code').text
end
classification = case classification_code
when "1"
:commercial
when "2"
:residential
else
:unknown
end
validity = if xml.at("ValidAddressIndicator").present?
:valid
elsif xml.at("AmbiguousAddressIndicator").present?
:ambiguous
elsif xml.at("NoCandidatesIndicator").present?
:invalid
else
:unknown
end
addresses = xml.css('AddressKeyFormat').collect { |node| location_from_address_key_format_node(node) }
end
params = Hash.from_xml(response).values.first
response = AddressValidationResponse.new(success, message, params, :validity => validity, :classification => classification, :candidate_addresses => addresses, :xml => response, :request => last_request)
end
# Converts from a AddressKeyFormat XML node to a Location
def location_from_address_key_format_node(address)
return nil unless address
country = address.at('CountryCode').try(:text)
country = 'US' if country == 'ZZ' # Sometimes returned by SUREPOST in the US
address_lines = address.css('AddressLine')
Location.new(
:country => country,
:postal_code => address.at('PostcodePrimaryLow').try(:text),
:province => address.at('PoliticalDivision1').try(:text),
:city => address.at('PoliticalDivision2').try(:text),
:address1 => address_lines[0].try(:text),
:address2 => address_lines[1].try(:text),
:address3 => address_lines[2].try(:text),
)
end
def location_from_address_node(address)
return nil unless address
country = address.at('CountryCode').try(:text)
country = 'US' if country == 'ZZ' # Sometimes returned by SUREPOST in the US
country = 'XK' if country == 'KV' # ActiveUtils now refers to Kosovo by XK
Location.new(
:country => country,
:postal_code => address.at('PostalCode').try(:text),
:province => address.at('StateProvinceCode').try(:text),
:city => address.at('City').try(:text),
:address1 => address.at('AddressLine1').try(:text),
:address2 => address.at('AddressLine2').try(:text),
:address3 => address.at('AddressLine3').try(:text)
)
end
def parse_ups_datetime(options = {})
time, date = options[:time].try(:text), options[:date].text
if time.nil?
hour, minute, second = 0
else
hour, minute, second = time.scan(/\d{2}/)
end
year, month, day = date[0..3], date[4..5], date[6..7]
Time.utc(year, month, day, hour, minute, second)
end
def response_success?(document)
document.root.at('Response/ResponseStatusCode').text == '1'
end
def response_message(document)
status = document.root.at_xpath('Response/ResponseStatusDescription').try(:text)
desc = document.root.at_xpath('Response/Error/ErrorDescription').try(:text)
[status, desc].select(&:present?).join(": ").presence || "UPS could not process the request."
end
def rate_warning_messages(rate_xml)
rate_xml.xpath("RatedShipmentWarning").map { |warning| warning.text }
end
def response_digest(xml)
xml.root.at('ShipmentDigest').text
end
def parse_ship_confirm(response)
build_document(response, 'ShipmentConfirmResponse')
end
def parse_ship_accept(response)
xml = build_document(response, 'ShipmentAcceptResponse')
success = response_success?(xml)
message = response_message(xml)
response_info = Hash.from_xml(response).values.first
packages = response_info["ShipmentResults"]["PackageResults"]
packages = [packages] if Hash === packages
labels = packages.map do |package|
Label.new(package["TrackingNumber"], Base64.decode64(package["LabelImage"]["GraphicImage"]))
end
LabelResponse.new(success, message, response_info, {labels: labels})
end
def commit(action, request, test = false)
response = ssl_post("#{test ? TEST_URL : LIVE_URL}/#{RESOURCES[action]}", request)
response.encode('utf-8', 'iso-8859-1')
end
def within_same_area?(origin, location)
return false unless location
matching_country_codes = origin.country_code(:alpha2) == location.country_code(:alpha2)
matching_or_blank_city = location.city.blank? || location.city == origin.city
matching_country_codes && matching_or_blank_city
end
def service_name_for(origin, code)
origin = origin.country_code(:alpha2)
name = case origin
when "CA" then CANADA_ORIGIN_SERVICES[code]
when "MX" then MEXICO_ORIGIN_SERVICES[code]
when *EU_COUNTRY_CODES then EU_ORIGIN_SERVICES[code]
end
name ||= OTHER_NON_US_ORIGIN_SERVICES[code] unless name == 'US'
name || DEFAULT_SERVICES[code]
end
def allow_package_level_reference_numbers(origin, destination)
# if the package is US -> US or PR -> PR the only type of reference numbers that are allowed are package-level
# Otherwise the only type of reference numbers that are allowed are shipment-level
[['US','US'],['PR', 'PR']].include?([origin,destination].map(&:country_code))
end
def handle_delivery_confirmation_options(origin, destination, packages, options)
if package_level_delivery_confirmation?(origin, destination)
handle_package_level_delivery_confirmation(origin, destination, packages, options)
else
handle_shipment_level_delivery_confirmation(origin, destination, packages, options)
end
end
def handle_package_level_delivery_confirmation(origin, destination, packages, options)
packages.each do |package|
# Transfer shipment-level option to package with no specified delivery_confirmation
package.options[:delivery_confirmation] = options[:delivery_confirmation] unless package.options[:delivery_confirmation]
# Assert that option is valid
if package.options[:delivery_confirmation] && !PACKAGE_DELIVERY_CONFIRMATION_CODES[package.options[:delivery_confirmation]]
raise "Invalid delivery_confirmation option on package: '#{package.options[:delivery_confirmation]}'. Use a key from PACKAGE_DELIVERY_CONFIRMATION_CODES"
end
end
options.delete(:delivery_confirmation)
end
def handle_shipment_level_delivery_confirmation(origin, destination, packages, options)
if packages.any? { |p| p.options[:delivery_confirmation] }
raise "origin/destination pair does not support package level delivery_confirmation options"
end
if options[:delivery_confirmation] && !SHIPMENT_DELIVERY_CONFIRMATION_CODES[options[:delivery_confirmation]]
raise "Invalid delivery_confirmation option: '#{options[:delivery_confirmation]}'. Use a key from SHIPMENT_DELIVERY_CONFIRMATION_CODES"
end
end
# For certain origin/destination pairs, UPS allows each package in a shipment to have a specified delivery_confirmation option
# otherwise the delivery_confirmation option must be specified on the entire shipment.
# See Appendix P of UPS Shipping Package XML Developers Guide for the rules on which the logic below is based.
def package_level_delivery_confirmation?(origin, destination)
origin.country_code == destination.country_code ||
[['US','PR'], ['PR','US']].include?([origin,destination].map(&:country_code))
end
def mapped_country_code(country_code)
COUNTRY_MAPPING[country_code].presence || country_code
end
end
end
| 40.912757 | 209 | 0.63395 |
91281854b73c9f3cf04621dbf0abb0c9f50137b1 | 25 | require 'middleman-gdpr'
| 12.5 | 24 | 0.8 |
790be685fcbb98003ccd7923f04b4bfe09379201 | 1,254 | RSpec.describe Api::V2::Measures::MeasureConditionComponentSerializer do
let(:measure_condition_component) do
create(
:measure_condition_component,
duty_expression_id: duty_expression.duty_expression_id,
duty_amount: 10.0,
monetary_unit_code: 'foo',
measurement_unit_code: 'bar',
measurement_unit_qualifier_code: 'a',
)
end
let(:duty_expression) { create(:duty_expression, :with_description) }
let(:expected_pattern) do
{
data: {
id: measure_condition_component.pk.join('-'),
type: :measure_condition_component,
attributes: {
duty_expression_id: duty_expression.duty_expression_id,
duty_amount: 10.0,
monetary_unit_code: 'foo',
monetary_unit_abbreviation: nil,
measurement_unit_code: 'bar',
duty_expression_description: duty_expression.description,
duty_expression_abbreviation: nil,
measurement_unit_qualifier_code: 'a',
},
},
}
end
describe '#serializable_hash' do
it 'serializes the correct attributes' do
actual = described_class.new(measure_condition_component).serializable_hash
expect(actual).to include(expected_pattern)
end
end
end
| 30.585366 | 81 | 0.6874 |
e26e1b490a63c5936b256ab7c811063786db8bc9 | 28,786 | # frozen_string_literal: true
require "cases/helper"
require "models/topic" # For booleans
require "models/pirate" # For timestamps
require "models/parrot"
require "models/person" # For optimistic locking
require "models/aircraft"
require "models/numeric_data"
class DirtyTest < ActiveRecord::TestCase
include InTimeZone
# Dummy to force column loads so query counts are clean.
def setup
Person.create first_name: "foo"
end
def test_attribute_changes
# New record - no changes.
pirate = Pirate.new
assert_equal false, pirate.catchphrase_changed?
assert_equal false, pirate.non_validated_parrot_id_changed?
# Change catchphrase.
pirate.catchphrase = "arrr"
assert_predicate pirate, :catchphrase_changed?
assert_nil pirate.catchphrase_was
assert_equal [nil, "arrr"], pirate.catchphrase_change
# Saved - no changes.
pirate.save!
assert_not_predicate pirate, :catchphrase_changed?
assert_nil pirate.catchphrase_change
# Same value - no changes.
pirate.catchphrase = "arrr"
assert_not_predicate pirate, :catchphrase_changed?
assert_nil pirate.catchphrase_change
end
def test_time_attributes_changes_with_time_zone
in_time_zone "Paris" do
target = Class.new(ActiveRecord::Base)
target.table_name = "pirates"
# New record - no changes.
pirate = target.new
assert_not_predicate pirate, :created_on_changed?
assert_nil pirate.created_on_change
# Saved - no changes.
pirate.catchphrase = "arrrr, time zone!!"
pirate.save!
assert_not_predicate pirate, :created_on_changed?
assert_nil pirate.created_on_change
# Change created_on.
old_created_on = pirate.created_on
pirate.created_on = Time.now - 1.day
assert_predicate pirate, :created_on_changed?
assert_kind_of ActiveSupport::TimeWithZone, pirate.created_on_was
assert_equal old_created_on, pirate.created_on_was
pirate.created_on = old_created_on
assert_not_predicate pirate, :created_on_changed?
end
end
def test_setting_time_attributes_with_time_zone_field_to_itself_should_not_be_marked_as_a_change
in_time_zone "Paris" do
target = Class.new(ActiveRecord::Base)
target.table_name = "pirates"
pirate = target.create!
pirate.created_on = pirate.created_on
assert_not_predicate pirate, :created_on_changed?
end
end
def test_time_attributes_changes_without_time_zone_by_skip
in_time_zone "Paris" do
target = Class.new(ActiveRecord::Base)
target.table_name = "pirates"
target.skip_time_zone_conversion_for_attributes = [:created_on]
# New record - no changes.
pirate = target.new
assert_not_predicate pirate, :created_on_changed?
assert_nil pirate.created_on_change
# Saved - no changes.
pirate.catchphrase = "arrrr, time zone!!"
pirate.save!
assert_not_predicate pirate, :created_on_changed?
assert_nil pirate.created_on_change
# Change created_on.
old_created_on = pirate.created_on
pirate.created_on = Time.now + 1.day
assert_predicate pirate, :created_on_changed?
# kind_of does not work because
# ActiveSupport::TimeWithZone.name == 'Time'
assert_instance_of Time, pirate.created_on_was
assert_equal old_created_on, pirate.created_on_was
end
end
def test_time_attributes_changes_without_time_zone
with_timezone_config aware_attributes: false do
target = Class.new(ActiveRecord::Base)
target.table_name = "pirates"
# New record - no changes.
pirate = target.new
assert_not_predicate pirate, :created_on_changed?
assert_nil pirate.created_on_change
# Saved - no changes.
pirate.catchphrase = "arrrr, time zone!!"
pirate.save!
assert_not_predicate pirate, :created_on_changed?
assert_nil pirate.created_on_change
# Change created_on.
old_created_on = pirate.created_on
pirate.created_on = Time.now + 1.day
assert_predicate pirate, :created_on_changed?
# kind_of does not work because
# ActiveSupport::TimeWithZone.name == 'Time'
assert_instance_of Time, pirate.created_on_was
assert_equal old_created_on, pirate.created_on_was
end
end
def test_aliased_attribute_changes
# the actual attribute here is name, title is an
# alias setup via alias_attribute
parrot = Parrot.new
assert_not_predicate parrot, :title_changed?
assert_nil parrot.title_change
parrot.name = "Sam"
assert_predicate parrot, :title_changed?
assert_nil parrot.title_was
assert_equal parrot.name_change, parrot.title_change
end
def test_restore_attribute!
pirate = Pirate.create!(catchphrase: "Yar!")
pirate.catchphrase = "Ahoy!"
pirate.restore_catchphrase!
assert_equal "Yar!", pirate.catchphrase
assert_equal Hash.new, pirate.changes
assert_not_predicate pirate, :catchphrase_changed?
end
def test_nullable_number_not_marked_as_changed_if_new_value_is_blank
pirate = Pirate.new
["", nil].each do |value|
pirate.parrot_id = value
assert_not_predicate pirate, :parrot_id_changed?
assert_nil pirate.parrot_id_change
end
end
def test_nullable_decimal_not_marked_as_changed_if_new_value_is_blank
numeric_data = NumericData.new
["", nil].each do |value|
numeric_data.bank_balance = value
assert_not_predicate numeric_data, :bank_balance_changed?
assert_nil numeric_data.bank_balance_change
end
end
def test_nullable_float_not_marked_as_changed_if_new_value_is_blank
numeric_data = NumericData.new
["", nil].each do |value|
numeric_data.temperature = value
assert_not_predicate numeric_data, :temperature_changed?
assert_nil numeric_data.temperature_change
end
end
def test_nullable_datetime_not_marked_as_changed_if_new_value_is_blank
in_time_zone "Edinburgh" do
target = Class.new(ActiveRecord::Base)
target.table_name = "topics"
topic = target.create
assert_nil topic.written_on
["", nil].each do |value|
topic.written_on = value
assert_nil topic.written_on
assert_not_predicate topic, :written_on_changed?
end
end
end
def test_integer_zero_to_string_zero_not_marked_as_changed
pirate = Pirate.new
pirate.parrot_id = 0
pirate.catchphrase = "arrr"
assert pirate.save!
assert_not_predicate pirate, :changed?
pirate.parrot_id = "0"
assert_not_predicate pirate, :changed?
end
def test_integer_zero_to_integer_zero_not_marked_as_changed
pirate = Pirate.new
pirate.parrot_id = 0
pirate.catchphrase = "arrr"
assert pirate.save!
assert_not_predicate pirate, :changed?
pirate.parrot_id = 0
assert_not_predicate pirate, :changed?
end
def test_float_zero_to_string_zero_not_marked_as_changed
data = NumericData.new temperature: 0.0
data.save!
assert_not_predicate data, :changed?
data.temperature = "0"
assert_empty data.changes
data.temperature = "0.0"
assert_empty data.changes
data.temperature = "0.00"
assert_empty data.changes
end
def test_zero_to_blank_marked_as_changed
pirate = Pirate.new
pirate.catchphrase = "Yarrrr, me hearties"
pirate.parrot_id = 1
pirate.save
# check the change from 1 to ''
pirate = Pirate.find_by_catchphrase("Yarrrr, me hearties")
pirate.parrot_id = ""
assert_predicate pirate, :parrot_id_changed?
assert_equal([1, nil], pirate.parrot_id_change)
pirate.save
# check the change from nil to 0
pirate = Pirate.find_by_catchphrase("Yarrrr, me hearties")
pirate.parrot_id = 0
assert_predicate pirate, :parrot_id_changed?
assert_equal([nil, 0], pirate.parrot_id_change)
pirate.save
# check the change from 0 to ''
pirate = Pirate.find_by_catchphrase("Yarrrr, me hearties")
pirate.parrot_id = ""
assert_predicate pirate, :parrot_id_changed?
assert_equal([0, nil], pirate.parrot_id_change)
end
def test_object_should_be_changed_if_any_attribute_is_changed
pirate = Pirate.new
assert_not_predicate pirate, :changed?
assert_equal [], pirate.changed
assert_equal Hash.new, pirate.changes
pirate.catchphrase = "arrr"
assert_predicate pirate, :changed?
assert_nil pirate.catchphrase_was
assert_equal %w(catchphrase), pirate.changed
assert_equal({ "catchphrase" => [nil, "arrr"] }, pirate.changes)
pirate.save
assert_not_predicate pirate, :changed?
assert_equal [], pirate.changed
assert_equal Hash.new, pirate.changes
end
def test_attribute_will_change!
pirate = Pirate.create!(catchphrase: "arr")
assert_not_predicate pirate, :catchphrase_changed?
assert pirate.catchphrase_will_change!
assert_predicate pirate, :catchphrase_changed?
assert_equal ["arr", "arr"], pirate.catchphrase_change
pirate.catchphrase << " matey!"
assert_predicate pirate, :catchphrase_changed?
assert_equal ["arr", "arr matey!"], pirate.catchphrase_change
end
def test_virtual_attribute_will_change
parrot = Parrot.create!(name: "Ruby")
parrot.send(:attribute_will_change!, :cancel_save_from_callback)
assert_predicate parrot, :has_changes_to_save?
end
def test_association_assignment_changes_foreign_key
pirate = Pirate.create!(catchphrase: "jarl")
pirate.parrot = Parrot.create!(name: "Lorre")
assert_predicate pirate, :changed?
assert_equal %w(parrot_id), pirate.changed
end
def test_attribute_should_be_compared_with_type_cast
topic = Topic.new
assert_predicate topic, :approved?
assert_not_predicate topic, :approved_changed?
# Coming from web form.
params = { topic: { approved: 1 } }
# In the controller.
topic.attributes = params[:topic]
assert_predicate topic, :approved?
assert_not_predicate topic, :approved_changed?
end
def test_partial_update
pirate = Pirate.new(catchphrase: "foo")
old_updated_on = 1.hour.ago.beginning_of_day
with_partial_writes Pirate, false do
assert_queries(2) { 2.times { pirate.save! } }
Pirate.where(id: pirate.id).update_all(updated_on: old_updated_on)
end
with_partial_writes Pirate, true do
assert_queries(0) { 2.times { pirate.save! } }
assert_equal old_updated_on, pirate.reload.updated_on
assert_queries(1) { pirate.catchphrase = "bar"; pirate.save! }
assert_not_equal old_updated_on, pirate.reload.updated_on
end
end
def test_partial_update_with_optimistic_locking
person = Person.new(first_name: "foo")
with_partial_writes Person, false do
assert_queries(2) { 2.times { person.save! } }
Person.where(id: person.id).update_all(first_name: "baz")
end
old_lock_version = person.lock_version
with_partial_writes Person, true do
assert_queries(0) { 2.times { person.save! } }
assert_equal old_lock_version, person.reload.lock_version
assert_queries(1) { person.first_name = "bar"; person.save! }
assert_not_equal old_lock_version, person.reload.lock_version
end
end
def test_changed_attributes_should_be_preserved_if_save_failure
pirate = Pirate.new
pirate.parrot_id = 1
assert !pirate.save
check_pirate_after_save_failure(pirate)
pirate = Pirate.new
pirate.parrot_id = 1
assert_raise(ActiveRecord::RecordInvalid) { pirate.save! }
check_pirate_after_save_failure(pirate)
end
def test_reload_should_clear_changed_attributes
pirate = Pirate.create!(catchphrase: "shiver me timbers")
pirate.catchphrase = "*hic*"
assert_predicate pirate, :changed?
pirate.reload
assert_not_predicate pirate, :changed?
end
def test_dup_objects_should_not_copy_dirty_flag_from_creator
pirate = Pirate.create!(catchphrase: "shiver me timbers")
pirate_dup = pirate.dup
pirate_dup.restore_catchphrase!
pirate.catchphrase = "I love Rum"
assert_predicate pirate, :catchphrase_changed?
assert_not_predicate pirate_dup, :catchphrase_changed?
end
def test_reverted_changes_are_not_dirty
phrase = "shiver me timbers"
pirate = Pirate.create!(catchphrase: phrase)
pirate.catchphrase = "*hic*"
assert_predicate pirate, :changed?
pirate.catchphrase = phrase
assert_not_predicate pirate, :changed?
end
def test_reverted_changes_are_not_dirty_after_multiple_changes
phrase = "shiver me timbers"
pirate = Pirate.create!(catchphrase: phrase)
10.times do |i|
pirate.catchphrase = "*hic*" * i
assert_predicate pirate, :changed?
end
assert_predicate pirate, :changed?
pirate.catchphrase = phrase
assert_not_predicate pirate, :changed?
end
def test_reverted_changes_are_not_dirty_going_from_nil_to_value_and_back
pirate = Pirate.create!(catchphrase: "Yar!")
pirate.parrot_id = 1
assert_predicate pirate, :changed?
assert_predicate pirate, :parrot_id_changed?
assert_not_predicate pirate, :catchphrase_changed?
pirate.parrot_id = nil
assert_not_predicate pirate, :changed?
assert_not_predicate pirate, :parrot_id_changed?
assert_not_predicate pirate, :catchphrase_changed?
end
def test_save_should_store_serialized_attributes_even_with_partial_writes
with_partial_writes(Topic) do
topic = Topic.create!(content: { a: "a" })
assert_not_predicate topic, :changed?
topic.content[:b] = "b"
assert_predicate topic, :changed?
topic.save!
assert_not_predicate topic, :changed?
assert_equal "b", topic.content[:b]
topic.reload
assert_equal "b", topic.content[:b]
end
end
def test_save_always_should_update_timestamps_when_serialized_attributes_are_present
with_partial_writes(Topic) do
topic = Topic.create!(content: { a: "a" })
topic.save!
updated_at = topic.updated_at
travel(1.second) do
topic.content[:hello] = "world"
topic.save!
end
assert_not_equal updated_at, topic.updated_at
assert_equal "world", topic.content[:hello]
end
end
def test_save_should_not_save_serialized_attribute_with_partial_writes_if_not_present
with_partial_writes(Topic) do
topic = Topic.create!(author_name: "Bill", content: { a: "a" })
topic = Topic.select("id, author_name").find(topic.id)
topic.update_columns author_name: "John"
assert_not_nil topic.reload.content
end
end
def test_changes_to_save_should_not_mutate_array_of_hashes
topic = Topic.new(author_name: "Bill", content: [{ a: "a" }])
topic.changes_to_save
assert_equal [{ a: "a" }], topic.content
end
def test_previous_changes
# original values should be in previous_changes
pirate = Pirate.new
assert_equal Hash.new, pirate.previous_changes
pirate.catchphrase = "arrr"
pirate.save!
assert_equal 4, pirate.previous_changes.size
assert_equal [nil, "arrr"], pirate.previous_changes["catchphrase"]
assert_equal [nil, pirate.id], pirate.previous_changes["id"]
assert_nil pirate.previous_changes["updated_on"][0]
assert_not_nil pirate.previous_changes["updated_on"][1]
assert_nil pirate.previous_changes["created_on"][0]
assert_not_nil pirate.previous_changes["created_on"][1]
assert !pirate.previous_changes.key?("parrot_id")
# original values should be in previous_changes
pirate = Pirate.new
assert_equal Hash.new, pirate.previous_changes
pirate.catchphrase = "arrr"
pirate.save
assert_equal 4, pirate.previous_changes.size
assert_equal [nil, "arrr"], pirate.previous_changes["catchphrase"]
assert_equal [nil, pirate.id], pirate.previous_changes["id"]
assert_includes pirate.previous_changes, "updated_on"
assert_includes pirate.previous_changes, "created_on"
assert !pirate.previous_changes.key?("parrot_id")
pirate.catchphrase = "Yar!!"
pirate.reload
assert_equal Hash.new, pirate.previous_changes
pirate = Pirate.find_by_catchphrase("arrr")
travel(1.second)
pirate.catchphrase = "Me Maties!"
pirate.save!
assert_equal 2, pirate.previous_changes.size
assert_equal ["arrr", "Me Maties!"], pirate.previous_changes["catchphrase"]
assert_not_nil pirate.previous_changes["updated_on"][0]
assert_not_nil pirate.previous_changes["updated_on"][1]
assert !pirate.previous_changes.key?("parrot_id")
assert !pirate.previous_changes.key?("created_on")
pirate = Pirate.find_by_catchphrase("Me Maties!")
travel(1.second)
pirate.catchphrase = "Thar She Blows!"
pirate.save
assert_equal 2, pirate.previous_changes.size
assert_equal ["Me Maties!", "Thar She Blows!"], pirate.previous_changes["catchphrase"]
assert_not_nil pirate.previous_changes["updated_on"][0]
assert_not_nil pirate.previous_changes["updated_on"][1]
assert !pirate.previous_changes.key?("parrot_id")
assert !pirate.previous_changes.key?("created_on")
travel(1.second)
pirate = Pirate.find_by_catchphrase("Thar She Blows!")
pirate.update(catchphrase: "Ahoy!")
assert_equal 2, pirate.previous_changes.size
assert_equal ["Thar She Blows!", "Ahoy!"], pirate.previous_changes["catchphrase"]
assert_not_nil pirate.previous_changes["updated_on"][0]
assert_not_nil pirate.previous_changes["updated_on"][1]
assert !pirate.previous_changes.key?("parrot_id")
assert !pirate.previous_changes.key?("created_on")
travel(1.second)
pirate = Pirate.find_by_catchphrase("Ahoy!")
pirate.update_attribute(:catchphrase, "Ninjas suck!")
assert_equal 2, pirate.previous_changes.size
assert_equal ["Ahoy!", "Ninjas suck!"], pirate.previous_changes["catchphrase"]
assert_not_nil pirate.previous_changes["updated_on"][0]
assert_not_nil pirate.previous_changes["updated_on"][1]
assert !pirate.previous_changes.key?("parrot_id")
assert !pirate.previous_changes.key?("created_on")
ensure
travel_back
end
class Testings < ActiveRecord::Base; end
def test_field_named_field
ActiveRecord::Base.connection.create_table :testings do |t|
t.string :field
end
assert_nothing_raised do
Testings.new.attributes
end
ensure
ActiveRecord::Base.connection.drop_table :testings rescue nil
ActiveRecord::Base.clear_cache!
end
def test_datetime_attribute_can_be_updated_with_fractional_seconds
skip "Fractional seconds are not supported" unless subsecond_precision_supported?
in_time_zone "Paris" do
target = Class.new(ActiveRecord::Base)
target.table_name = "topics"
written_on = Time.utc(2012, 12, 1, 12, 0, 0).in_time_zone("Paris")
topic = target.create(written_on: written_on)
topic.written_on += 0.3
assert topic.written_on_changed?, "Fractional second update not detected"
end
end
def test_datetime_attribute_doesnt_change_if_zone_is_modified_in_string
time_in_paris = Time.utc(2014, 1, 1, 12, 0, 0).in_time_zone("Paris")
pirate = Pirate.create!(catchphrase: "rrrr", created_on: time_in_paris)
pirate.created_on = pirate.created_on.in_time_zone("Tokyo").to_s
assert_not_predicate pirate, :created_on_changed?
end
test "partial insert" do
with_partial_writes Person do
jon = nil
assert_sql(/first_name/i) do
jon = Person.create! first_name: "Jon"
end
assert ActiveRecord::SQLCounter.log_all.none? { |sql| sql.include?("followers_count") }
jon.reload
assert_equal "Jon", jon.first_name
assert_equal 0, jon.followers_count
assert_not_nil jon.id
end
end
test "partial insert with empty values" do
with_partial_writes Aircraft do
a = Aircraft.create!
a.reload
assert_not_nil a.id
end
end
test "in place mutation detection" do
pirate = Pirate.create!(catchphrase: "arrrr")
pirate.catchphrase << " matey!"
assert_predicate pirate, :catchphrase_changed?
expected_changes = {
"catchphrase" => ["arrrr", "arrrr matey!"]
}
assert_equal(expected_changes, pirate.changes)
assert_equal("arrrr", pirate.catchphrase_was)
assert pirate.catchphrase_changed?(from: "arrrr")
assert_not pirate.catchphrase_changed?(from: "anything else")
assert_includes pirate.changed_attributes, :catchphrase
pirate.save!
pirate.reload
assert_equal "arrrr matey!", pirate.catchphrase
assert_not_predicate pirate, :changed?
end
test "in place mutation for binary" do
klass = Class.new(ActiveRecord::Base) do
self.table_name = :binaries
serialize :data
end
binary = klass.create!(data: "\\\\foo")
assert_not_predicate binary, :changed?
binary.data = binary.data.dup
assert_not_predicate binary, :changed?
binary = klass.last
assert_not_predicate binary, :changed?
binary.data << "bar"
assert_predicate binary, :changed?
end
test "changes is correct for subclass" do
foo = Class.new(Pirate) do
def catchphrase
super.upcase
end
end
pirate = foo.create!(catchphrase: "arrrr")
new_catchphrase = "arrrr matey!"
pirate.catchphrase = new_catchphrase
assert_predicate pirate, :catchphrase_changed?
expected_changes = {
"catchphrase" => ["arrrr", new_catchphrase]
}
assert_equal new_catchphrase.upcase, pirate.catchphrase
assert_equal expected_changes, pirate.changes
end
test "changes is correct if override attribute reader" do
pirate = Pirate.create!(catchphrase: "arrrr")
def pirate.catchphrase
super.upcase
end
new_catchphrase = "arrrr matey!"
pirate.catchphrase = new_catchphrase
assert_predicate pirate, :catchphrase_changed?
expected_changes = {
"catchphrase" => ["arrrr", new_catchphrase]
}
assert_equal new_catchphrase.upcase, pirate.catchphrase
assert_equal expected_changes, pirate.changes
end
test "attribute_changed? doesn't compute in-place changes for unrelated attributes" do
test_type_class = Class.new(ActiveRecord::Type::Value) do
define_method(:changed_in_place?) do |*|
raise
end
end
klass = Class.new(ActiveRecord::Base) do
self.table_name = "people"
attribute :foo, test_type_class.new
end
model = klass.new(first_name: "Jim")
assert_predicate model, :first_name_changed?
end
test "attribute_will_change! doesn't try to save non-persistable attributes" do
klass = Class.new(ActiveRecord::Base) do
self.table_name = "people"
attribute :non_persisted_attribute, :string
end
record = klass.new(first_name: "Sean")
record.non_persisted_attribute_will_change!
assert_predicate record, :non_persisted_attribute_changed?
assert record.save
end
test "virtual attributes are not written with partial_writes off" do
original_partial_writes = ActiveRecord::Base.partial_writes
begin
ActiveRecord::Base.partial_writes = false
klass = Class.new(ActiveRecord::Base) do
self.table_name = "people"
attribute :non_persisted_attribute, :string
end
record = klass.new(first_name: "Sean")
record.non_persisted_attribute_will_change!
assert record.save
record.non_persisted_attribute_will_change!
assert record.save
ensure
ActiveRecord::Base.partial_writes = original_partial_writes
end
end
test "mutating and then assigning doesn't remove the change" do
pirate = Pirate.create!(catchphrase: "arrrr")
pirate.catchphrase << " matey!"
pirate.catchphrase = "arrrr matey!"
assert pirate.catchphrase_changed?(from: "arrrr", to: "arrrr matey!")
end
test "getters with side effects are allowed" do
klass = Class.new(Pirate) do
def catchphrase
if super.blank?
update_attribute(:catchphrase, "arr") # what could possibly go wrong?
end
super
end
end
pirate = klass.create!(catchphrase: "lol")
pirate.update_attribute(:catchphrase, nil)
assert_equal "arr", pirate.catchphrase
end
test "attributes assigned but not selected are dirty" do
person = Person.select(:id).first
assert_not_predicate person, :changed?
person.first_name = "Sean"
assert_predicate person, :changed?
person.first_name = nil
assert_predicate person, :changed?
end
test "attributes not selected are still missing after save" do
person = Person.select(:id).first
assert_raises(ActiveModel::MissingAttributeError) { person.first_name }
assert person.save # calls forget_attribute_assignments
assert_raises(ActiveModel::MissingAttributeError) { person.first_name }
end
test "saved_change_to_attribute? returns whether a change occurred in the last save" do
person = Person.create!(first_name: "Sean")
assert_predicate person, :saved_change_to_first_name?
assert_not_predicate person, :saved_change_to_gender?
assert person.saved_change_to_first_name?(from: nil, to: "Sean")
assert person.saved_change_to_first_name?(from: nil)
assert person.saved_change_to_first_name?(to: "Sean")
assert_not person.saved_change_to_first_name?(from: "Jim", to: "Sean")
assert_not person.saved_change_to_first_name?(from: "Jim")
assert_not person.saved_change_to_first_name?(to: "Jim")
end
test "saved_change_to_attribute returns the change that occurred in the last save" do
person = Person.create!(first_name: "Sean", gender: "M")
assert_equal [nil, "Sean"], person.saved_change_to_first_name
assert_equal [nil, "M"], person.saved_change_to_gender
person.update(first_name: "Jim")
assert_equal ["Sean", "Jim"], person.saved_change_to_first_name
assert_nil person.saved_change_to_gender
end
test "attribute_before_last_save returns the original value before saving" do
person = Person.create!(first_name: "Sean", gender: "M")
assert_nil person.first_name_before_last_save
assert_nil person.gender_before_last_save
person.first_name = "Jim"
assert_nil person.first_name_before_last_save
assert_nil person.gender_before_last_save
person.save
assert_equal "Sean", person.first_name_before_last_save
assert_equal "M", person.gender_before_last_save
end
test "saved_changes? returns whether the last call to save changed anything" do
person = Person.create!(first_name: "Sean")
assert_predicate person, :saved_changes?
person.save
assert_not_predicate person, :saved_changes?
end
test "saved_changes returns a hash of all the changes that occurred" do
person = Person.create!(first_name: "Sean", gender: "M")
assert_equal [nil, "Sean"], person.saved_changes[:first_name]
assert_equal [nil, "M"], person.saved_changes[:gender]
assert_equal %w(id first_name gender created_at updated_at).sort, person.saved_changes.keys.sort
travel(1.second) do
person.update(first_name: "Jim")
end
assert_equal ["Sean", "Jim"], person.saved_changes[:first_name]
assert_equal %w(first_name lock_version updated_at).sort, person.saved_changes.keys.sort
end
test "changed? in after callbacks returns false" do
klass = Class.new(ActiveRecord::Base) do
self.table_name = "people"
after_save do
raise "changed? should be false" if changed?
raise "has_changes_to_save? should be false" if has_changes_to_save?
raise "saved_changes? should be true" unless saved_changes?
raise "id_in_database should not be nil" if id_in_database.nil?
end
end
person = klass.create!(first_name: "Sean")
assert_not_predicate person, :changed?
end
test "changed? in around callbacks after yield returns false" do
klass = Class.new(ActiveRecord::Base) do
self.table_name = "people"
around_create :check_around
def check_around
yield
raise "changed? should be false" if changed?
raise "has_changes_to_save? should be false" if has_changes_to_save?
raise "saved_changes? should be true" unless saved_changes?
raise "id_in_database should not be nil" if id_in_database.nil?
end
end
person = klass.create!(first_name: "Sean")
assert_not_predicate person, :changed?
end
private
def with_partial_writes(klass, on = true)
old = klass.partial_writes?
klass.partial_writes = on
yield
ensure
klass.partial_writes = old
end
def check_pirate_after_save_failure(pirate)
assert_predicate pirate, :changed?
assert_predicate pirate, :parrot_id_changed?
assert_equal %w(parrot_id), pirate.changed
assert_nil pirate.parrot_id_was
end
end
| 30.986006 | 100 | 0.72365 |
380bf45a6c160aa20fde4a1fdb327919d91f0056 | 171 | module Spree
class PriceSerializer < Spree::BaseSerializer
attributes :amount, :price, :display_amount, :display_price, :currency
belongs_to :variant
end
end
| 21.375 | 74 | 0.754386 |
e9f6727f483fe3202e9e468aaae92fdd3fd73c25 | 2,589 | require "rails_helper"
RSpec.describe "POST /builds" do
let(:payload) do
File.read("spec/support/fixtures/pull_request_opened_event.json")
end
let(:parsed_payload) { JSON.parse(payload) }
let(:repo_name) { parsed_payload["repository"]["full_name"] }
let(:repo_id) { parsed_payload["repository"]["id"] }
let(:pr_sha) { parsed_payload["pull_request"]["head"]["sha"] }
let(:pr_number) { parsed_payload["number"] }
context "with violations" do
it "makes a new comment and cleans up resolved one" do
existing_comment_violation = { line: 5, message: "Line is too long." }
new_violation1 = { line: 3, message: "Trailing whitespace detected." }
new_violation2 = { line: 9, message: "Avoid empty else-clauses." }
violations = [new_violation1, existing_comment_violation, new_violation2]
create(:repo, :active, github_id: repo_id, name: repo_name)
stub_review_job(
LintersJob,
violations: violations,
error: "invalid config syntax",
)
post builds_path, params: { payload: payload }
expect(FakeGitHub.review_body).to eq <<~EOS.chomp
Some files could not be reviewed due to errors:
<details>
<summary>invalid config syntax</summary>
<pre>invalid config syntax</pre>
</details>
EOS
expect(FakeGitHub.comments).to match_array [
{
body: new_violation1[:message],
path: "path/to/test_github_file.rb",
position: new_violation1[:line],
pr_number: "1",
repo: "Hello-World",
},
{
body: new_violation2[:message],
path: "path/to/test_github_file.rb",
position: new_violation2[:line],
pr_number: "1",
repo: "Hello-World",
},
]
end
end
context "without violations" do
it "does not make a comment" do
create(:repo, github_id: repo_id, name: repo_name)
post builds_path, params: { payload: payload }
expect(FakeGitHub.comments).to be_empty
end
end
def stub_review_job(klass, violations:, error:)
allow(klass).to receive(:perform) do |attributes|
CompleteFileReview.call(
"commit_sha" => attributes.fetch("commit_sha"),
"filename" => attributes.fetch("filename"),
"linter_name" => attributes.fetch("linter_name"),
"patch" => attributes.fetch("patch"),
"pull_request_number" => attributes.fetch("pull_request_number"),
"violations" => violations.map(&:stringify_keys),
"error" => error,
)
end
end
end
| 33.192308 | 79 | 0.629587 |
11137b578d8d3b30a0f843288666db6498251862 | 1,164 | require 'test_helper'
class StudentsControllerTest < ActionController::TestCase
setup do
@student = students(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:students)
end
test "should get new" do
get :new
assert_response :success
end
test "should create student" do
assert_difference('Student.count') do
post :create, student: { blood_group: @student.blood_group, dept: @student.dept, name: @student.name }
end
assert_redirected_to student_path(assigns(:student))
end
test "should show student" do
get :show, id: @student
assert_response :success
end
test "should get edit" do
get :edit, id: @student
assert_response :success
end
test "should update student" do
patch :update, id: @student, student: { blood_group: @student.blood_group, dept: @student.dept, name: @student.name }
assert_redirected_to student_path(assigns(:student))
end
test "should destroy student" do
assert_difference('Student.count', -1) do
delete :destroy, id: @student
end
assert_redirected_to students_path
end
end
| 23.28 | 121 | 0.702749 |
ffe08e8ea12727d8e9ef7fd1c8c9ad8337bcc861 | 1,953 | module Ruby
module HL7
class OBR < Segment
weight 89 # obr.weight-1
has_children [:OBX]
add_field :set_id, :idx => 1
add_field :placer_order_number, :idx => 2
add_field :filler_order_number, :idx => 3
add_field :universal_service_id, :idx => 4
add_field :priority
add_field :requested_date
add_field :observation_date
add_field :observation_end_date
add_field :collection_volume
add_field :collector_identifier
add_field :specimen_action_code
add_field :danger_code
add_field :relevant_clinical_info
add_field :specimen_received_date
add_field :specimen_source
add_field :ordering_provider
add_field :order_callback_phone_number
add_field :placer_field_1
add_field :placer_field_2
add_field :filer_field_1
add_field :filer_field_2
add_field :results_status_change_date
add_field :charge_to_practice
add_field :parent_result
add_field :quantity_timing
add_field :result_copies_to
add_field :parent
add_field :transport_mode
add_field :reason_for_study
add_field :principal_result_interpreter
add_field :assistant_result_interpreter
add_field :technician
add_field :transcriptionist
add_field :scheduled_date
add_field :number_of_sample_containers
add_field :transport_logistics_of_sample
add_field :collectors_comment
add_field :transport_arrangement_responsibility
add_field :transport_arranged
add_field :escort_required
add_field :planned_patient_transport_comment
add_field :procedure_code
add_field :procedure_code_modifier
add_field :placer_supplemental_service_info
add_field :filler_supplemental_service_info
add_field :medically_necessary_dup_procedure_reason #longest method name ever. sry.
add_field :result_handling
end
end
end
| 33.672414 | 89 | 0.736303 |
214f604c794204547bfc8828245ec6815075c35a | 676 | # Supported extra options:
#
class Vorbis < StandardCMakeDep
def initialize(args)
super('Vorbis', 'vorbis', args)
self.HandleStandardCMakeOptions
@RepoURL ||= 'https://github.com/xiph/vorbis.git'
end
def DoClone
runSystemSafe('git', 'clone', @RepoURL) == 0
end
def DoUpdate
standardGitUpdate
end
def getInstalledFiles
if OS.windows?
[
'lib/vorbis.lib',
'lib/vorbisenc.lib',
'lib/vorbisfile.lib',
'include/vorbis'
]
elsif OS.linux?
[
'lib64/libvorbis.a',
'lib64/libvorbisenc.a',
'lib64/libvorbisfile.a',
'include/vorbis'
]
end
end
end
| 17.333333 | 53 | 0.590237 |
e2cd6049757404a4f24ff48a23a6c8a111f95a7f | 1,757 | #!/usr/bin/env ruby
require 'tk'
require 'tkextlib/vu/charts'
#######################################
Tk.root.geometry('+30+30')
delay = 2000
c = TkCanvas.new.pack
begin
st = Tk::Vu::TkcSticker.new(c, 0, 0, 10, 10)
rescue
Tk.messageBox(:type=>'ok', :title=>"No sticker Item",
:message=>"This build of vu does not include the sticker item")
exit
end
#st.delete
steps = []
steps << proc{
# I used a 75dpi screen for testing, but others should make no difference!
puts 'You\'ll see a small upright rectangle with "He" inside.'
st = Tk::Vu::TkcSticker.new(c, '6m', '10m', '13m', '27m', :text=>'Hello')
}
steps << proc{
puts 'You\'ll see the whole "Hello" drawn rotated 90 degrees.'
st[:orient] = :vertical
}
steps << proc{
puts 'The rectangle shrinks and the text is clipped to "Hell"'
#st.coords('6m', '10m', '13m', '20m')
st.coords('6m', '10m', '13m', '17m')
}
steps << proc{
puts 'Now you\'ll read "ello"'
st[:lefttrunc] = true
}
steps << proc{
puts 'Enlarging the rectangle shows the complete "Hello" again'
st.scale(0, 0, 3, 3)
}
steps << proc{
puts 'This time the text is repeated: "Hello", approx. 5mm space, "Hello"'
st[:space] = '5m'
}
steps << proc{
puts 'A vertical bar appears in the lower right region and text jumps to the left.'
st.configure(:anchor=>:n, :relw=>0.3, :relh=>0.7,
:relx=>0.6, :rely=>0.3, :bar=>'red')
}
steps << proc{
puts 'Paint the backgound.'
st[:fill] = 'yellow'
}
steps << proc{
puts "Let's test stippling."
st[:stipple] = 'gray25'
}
steps << proc{
puts 'Finally a large outline forces a single "Hello" and shrinks the bar.'
st[:width] = '6m'
}
Tk.root.bind('q', proc{exit})
TkTimer.new(delay, 1, *steps).start
Tk.mainloop
| 21.168675 | 85 | 0.611269 |
1cdabf7f6b77c2799896ad65aff5894d6f83a6e4 | 543 | Pod::Spec.new do |s|
s.name = 'xcbeautify'
s.version = '0.10.1'
s.summary = 'A little beautifier tool for xcodebuild'
s.homepage = 'https://github.com/thii/xcbeautify'
s.source = { :http => "#{s.homepage}/releases/download/#{s.version}/xcbeautify-#{s.version}-universal-apple-macosx.zip" }
s.ios.deployment_target = '0.0'
s.osx.deployment_target = '10.6'
s.tvos.deployment_target = '9.0'
s.preserve_paths = '*'
s.authors = 'Thi Doãn'
s.license = { :type => 'MIT' }
end
| 38.785714 | 131 | 0.594843 |
1a42d32b61d04146e2f607a0a61a1fb3627f9c59 | 991 | class Meteorlog::DSL::Context::MetricFilter
include Meteorlog::DSL::Validator
include Meteorlog::TemplateHelper
attr_reader :result
def initialize(name, log_group_name, &block)
@error_identifier = "LogGroup `#{log_group_name}` > MetricFilter `#{name}`"
@result = OpenStruct.new(
:filter_name => name,
:metric_transformations => [],
)
instance_eval(&block)
end
def filter_pattern(pattern)
_call_once(:filter_pattern)
_required(:filter_pattern, pattern)
@result.filter_pattern = pattern.to_s
end
def metric(attrs)
metric_attrs = {}
_expected_type(attrs, Hash)
{
:name => :metric_name,
:namespace => :metric_namespace,
:value => :metric_value
}.each do |attr_name, metric_name|
_required("metric[#{attr_name}]", attrs[attr_name])
metric_attrs[metric_name] = attrs[attr_name].to_s
end
_expected_length(metric_attrs, 3)
@result.metric_transformations << metric_attrs
end
end
| 26.078947 | 79 | 0.685166 |
6a0b36afcb09593fe0177aa524beac083830dbd3 | 981 | require "rails_helper"
RSpec.describe ItemsController, type: :routing do
describe "routing" do
it "routes to #index" do
expect(:get => "/items").to route_to("items#index")
end
it "routes to #new" do
expect(:get => "/items/new").to route_to("items#new")
end
it "routes to #show" do
expect(:get => "/items/1").to route_to("items#show", :id => "1")
end
it "routes to #edit" do
expect(:get => "/items/1/edit").to route_to("items#edit", :id => "1")
end
it "routes to #create" do
expect(:post => "/items").to route_to("items#create")
end
it "routes to #update via PUT" do
expect(:put => "/items/1").to route_to("items#update", :id => "1")
end
it "routes to #update via PATCH" do
expect(:patch => "/items/1").to route_to("items#update", :id => "1")
end
it "routes to #destroy" do
expect(:delete => "/items/1").to route_to("items#destroy", :id => "1")
end
end
end
| 25.153846 | 76 | 0.574924 |
f746479fc8ac7d428f37135669e2cc5afb495583 | 3,808 | require 'spec'
require 'fileutils'
require 'roo'
require 'rasta/extensions/roo_extensions'
require 'rasta/spreadsheet'
require 'rasta/fixture/metrics'
module Rasta
class ClassLoader
def initialize(path)
@fixture_path = path
end
# Load the files in the fixture path and
# track which classes got loaded
def load_test_fixtures
before_classes = []
after_classes = []
# Gather classes currently loaded
ObjectSpace.each_object(Class) { |x| before_classes << x.name }
# Load the test fixtures
fixture_files = File.join(@fixture_path, "**", "*.rb")
Dir.glob(fixture_files).each {|f| do_require f }
# Gather classes after loading fixtures
ObjectSpace.each_object(Class) { |x| after_classes << x.name }
@loaded_classes = (after_classes - before_classes)
end
# Get the reference to a class based on a string. Also
# check to see if it's a class that we loaded
def find_class_by_name(classname)
ObjectSpace.each_object(Class) do |klass|
next unless @loaded_classes.include?(klass.name)
return klass if klass.name =~ /(^|:)#{classname}$/
end
raise LoadError, "Class '#{classname}' not found!"
end
def do_require(filename)
require filename
end
private :do_require
end
class FixtureRunner
def initialize(opts)
@options = opts
end
def execute
create_results_directory(@options[:results_path])
start_rspec
run_test_fixtures
stop_rspec
end
def create_results_directory(results_dir)
FileUtils.rm_r(results_dir) if File.directory?(results_dir)
FileUtils.mkdir_p(results_dir)
end
def start_rspec
require 'rasta/extensions/rspec_extensions'
require 'spec/runner/formatter/progress_bar_formatter'
require 'spec/runner/formatter/html_formatter'
Spec::Runner.options.backtrace_tweaker = Spec::Runner::NoisyBacktraceTweaker.new
Spec::Runner.options.parse_format("Formatter::ProgressBarFormatter")
Spec::Runner.options.parse_format("Formatter::BaseTextFormatter:#{@options[:results_path]}/results.txt")
Spec::Runner.options.parse_format("Formatter::HtmlFormatter:#{@options[:results_path]}/results.html")
require 'rasta/formatter/spreadsheet_formatter'
Spec::Runner.options.parse_format("Formatter::SpreadsheetFormatter:#{@options[:results_path]}/spreadsheet.html")
Spec::Runner.options.reporter.initialize_spreadsheet
end
private :start_rspec
def run_test_fixtures
roo = Rasta::Spreadsheet.open(@options[:spreadsheet])
Spec::Runner.options.reporter.roo = roo
@loader = ClassLoader.new(@options[:fixture_path])
@loader.load_test_fixtures
#html = Rasta::HTML.new
roo.sheets.each do |sheet|
next if sheet =~ /^#/ #skip sheets that are only comments
begin
roo.default_sheet = sheet
base_sheet_name = roo.default_sheet.gsub(/#.*/, '')
classname = @loader.find_class_by_name(base_sheet_name)
fixture = classname.new
fixture.initialize_test_fixture(roo, @options)
rescue ArgumentError => e
raise ArgumentError, "Unable to load class #{@classname}. Make sure the class includes the Rasta fixture: #{e.inspect + e.backtrace.join("\n")}"
end
fixture.generate_rspec_tests
#html.add_tab(roo)
end
#html.write(@options[:results_path] + '/' + File.basename(@options[:spreadsheet]) + '.html')
end
private :run_test_fixtures
def stop_rspec
Spec::Runner.options.reporter.original_dump if Spec::Runner.options
Spec::Runner.options.clear_format_options;
end
private :stop_rspec
end
end | 32.827586 | 154 | 0.676471 |
bb3eeb0faebcefa8f315e86a11c8c5fc8c396cd7 | 13,018 | =begin
#Xero Payroll NZ
#This is the Xero Payroll API for orgs in the NZ region.
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'time'
require 'date'
module XeroRuby::PayrollNz
require 'bigdecimal'
class SalaryAndWage
# Xero unique identifier for a salary and wages record
attr_accessor :salary_and_wages_id
# Xero unique identifier for an earnings rate
attr_accessor :earnings_rate_id
# The Number of Units per week for the corresponding salary and wages
attr_accessor :number_of_units_per_week
# The rate of each unit for the corresponding salary and wages
attr_accessor :rate_per_unit
# The Number of Units per day for the corresponding salary and wages
attr_accessor :number_of_units_per_day
# The days per week for the salary.
attr_accessor :days_per_week
# The effective date of the corresponding salary and wages
attr_accessor :effective_from
# The annual salary
attr_accessor :annual_salary
# The current status of the corresponding salary and wages
attr_accessor :status
ACTIVE = "Active".freeze
PENDING = "Pending".freeze
# The type of the payment of the corresponding salary and wages
attr_accessor :payment_type
SALARY = "Salary".freeze
HOURLY = "Hourly".freeze
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'salary_and_wages_id' => :'salaryAndWagesID',
:'earnings_rate_id' => :'earningsRateID',
:'number_of_units_per_week' => :'numberOfUnitsPerWeek',
:'rate_per_unit' => :'ratePerUnit',
:'number_of_units_per_day' => :'numberOfUnitsPerDay',
:'days_per_week' => :'daysPerWeek',
:'effective_from' => :'effectiveFrom',
:'annual_salary' => :'annualSalary',
:'status' => :'status',
:'payment_type' => :'paymentType'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'salary_and_wages_id' => :'String',
:'earnings_rate_id' => :'String',
:'number_of_units_per_week' => :'BigDecimal',
:'rate_per_unit' => :'BigDecimal',
:'number_of_units_per_day' => :'BigDecimal',
:'days_per_week' => :'BigDecimal',
:'effective_from' => :'Date',
:'annual_salary' => :'BigDecimal',
:'status' => :'String',
:'payment_type' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `XeroRuby::PayrollNz::SalaryAndWage` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `XeroRuby::PayrollNz::SalaryAndWage`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'salary_and_wages_id')
self.salary_and_wages_id = attributes[:'salary_and_wages_id']
end
if attributes.key?(:'earnings_rate_id')
self.earnings_rate_id = attributes[:'earnings_rate_id']
end
if attributes.key?(:'number_of_units_per_week')
self.number_of_units_per_week = attributes[:'number_of_units_per_week']
end
if attributes.key?(:'rate_per_unit')
self.rate_per_unit = attributes[:'rate_per_unit']
end
if attributes.key?(:'number_of_units_per_day')
self.number_of_units_per_day = attributes[:'number_of_units_per_day']
end
if attributes.key?(:'days_per_week')
self.days_per_week = attributes[:'days_per_week']
end
if attributes.key?(:'effective_from')
self.effective_from = attributes[:'effective_from']
end
if attributes.key?(:'annual_salary')
self.annual_salary = attributes[:'annual_salary']
end
if attributes.key?(:'status')
self.status = attributes[:'status']
end
if attributes.key?(:'payment_type')
self.payment_type = attributes[:'payment_type']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @earnings_rate_id.nil?
invalid_properties.push('invalid value for "earnings_rate_id", earnings_rate_id cannot be nil.')
end
if @number_of_units_per_week.nil?
invalid_properties.push('invalid value for "number_of_units_per_week", number_of_units_per_week cannot be nil.')
end
if @number_of_units_per_day.nil?
invalid_properties.push('invalid value for "number_of_units_per_day", number_of_units_per_day cannot be nil.')
end
if @effective_from.nil?
invalid_properties.push('invalid value for "effective_from", effective_from cannot be nil.')
end
if @annual_salary.nil?
invalid_properties.push('invalid value for "annual_salary", annual_salary cannot be nil.')
end
if @status.nil?
invalid_properties.push('invalid value for "status", status cannot be nil.')
end
if @payment_type.nil?
invalid_properties.push('invalid value for "payment_type", payment_type cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @earnings_rate_id.nil?
return false if @number_of_units_per_week.nil?
return false if @number_of_units_per_day.nil?
return false if @effective_from.nil?
return false if @annual_salary.nil?
return false if @status.nil?
status_validator = EnumAttributeValidator.new('String', ["Active", "Pending"])
return false unless status_validator.valid?(@status)
return false if @payment_type.nil?
payment_type_validator = EnumAttributeValidator.new('String', ["Salary", "Hourly"])
return false unless payment_type_validator.valid?(@payment_type)
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] status Object to be assigned
def status=(status)
validator = EnumAttributeValidator.new('String', ["Active", "Pending"])
unless validator.valid?(status)
fail ArgumentError, "invalid value for \"status\", must be one of #{validator.allowable_values}."
end
@status = status
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] payment_type Object to be assigned
def payment_type=(payment_type)
validator = EnumAttributeValidator.new('String', ["Salary", "Hourly"])
unless validator.valid?(payment_type)
fail ArgumentError, "invalid value for \"payment_type\", must be one of #{validator.allowable_values}."
end
@payment_type = payment_type
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
salary_and_wages_id == o.salary_and_wages_id &&
earnings_rate_id == o.earnings_rate_id &&
number_of_units_per_week == o.number_of_units_per_week &&
rate_per_unit == o.rate_per_unit &&
number_of_units_per_day == o.number_of_units_per_day &&
days_per_week == o.days_per_week &&
effective_from == o.effective_from &&
annual_salary == o.annual_salary &&
status == o.status &&
payment_type == o.payment_type
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[salary_and_wages_id, earnings_rate_id, number_of_units_per_week, rate_per_unit, number_of_units_per_day, days_per_week, effective_from, annual_salary, status, payment_type].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(parse_date(value))
when :Date
Date.parse(parse_date(value))
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BigDecimal
BigDecimal(value.to_s)
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
XeroRuby::PayrollNz.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash(downcase: true)
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
key = downcase ? attr : param
hash[key] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
def parse_date(datestring)
if datestring.include?('Date')
seconds_since_epoch = datestring.scan(/[0-9]+/)[0].to_i / 1000.0
Time.at(seconds_since_epoch).utc.strftime('%Y-%m-%dT%H:%M:%S%z').to_s
else # handle date 'types' for small subset of payroll API's
Time.parse(datestring).strftime('%Y-%m-%dT%H:%M:%S').to_s
end
end
end
end
| 33.638243 | 212 | 0.647872 |
7936645a584b1ca2bc3de4eec0382742e3fc65fc | 2,183 | # frozen_string_literal: true
module Epics
class UpdateService < Epics::BaseService
EPIC_DATE_FIELDS = %I[
start_date_fixed
start_date_is_fixed
due_date_fixed
due_date_is_fixed
].freeze
def execute(epic)
# start_date and end_date columns are no longer writable by users because those
# are composite fields managed by the system.
params.extract!(:start_date, :end_date)
update_task_event(epic) || update(epic)
if saved_change_to_epic_dates?(epic)
Epics::UpdateDatesService.new([epic]).execute
track_start_date_fixed_events(epic)
epic.reset
end
assign_parent_epic_for(epic)
assign_child_epic_for(epic)
epic
end
def handle_changes(epic, options)
old_associations = options.fetch(:old_associations, {})
old_mentioned_users = old_associations.fetch(:mentioned_users, [])
old_labels = old_associations.fetch(:labels, [])
if has_changes?(epic, old_labels: old_labels)
todo_service.resolve_todos_for_target(epic, current_user)
end
todo_service.update_epic(epic, current_user, old_mentioned_users)
if epic.previous_changes.include?('confidential') && epic.confidential?
# don't enqueue immediately to prevent todos removal in case of a mistake
::TodosDestroyer::ConfidentialEpicWorker.perform_in(::Todo::WAIT_FOR_DELETE, epic.id)
end
end
def handle_task_changes(epic)
todo_service.resolve_todos_for_target(epic, current_user)
todo_service.update_epic(epic, current_user)
end
private
def track_start_date_fixed_events(epic)
return unless epic.saved_changes.key?('start_date_is_fixed')
if epic.start_date_is_fixed?
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_start_date_set_as_fixed_action(author: current_user)
else
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_start_date_set_as_inherited_action(author: current_user)
end
end
def saved_change_to_epic_dates?(epic)
(epic.saved_changes.keys.map(&:to_sym) & EPIC_DATE_FIELDS).present?
end
end
end
| 30.319444 | 130 | 0.722858 |
035951c892ef1d3e1c13f3175c367351e687ca7e | 248 | require 'rails_helper'
describe SalesforceService do
subject { described_class.call }
it 'initializes and authenticate salesforce client' do
expect_any_instance_of(Restforce::Data::Client).to receive(:authenticate!)
subject
end
end
| 22.545455 | 78 | 0.778226 |
ace13c355ec1ea094babaa66a91f256af04c3023 | 4,573 | module Contentful
# All errors raised by the contentful gem are either instances of Contentful::Error
# or inherit from Contentful::Error
class Error < StandardError
attr_reader :response
def initialize(response)
@response = response
super best_available_message
end
# Shortcut for creating specialized error classes
# USAGE rescue Contentful::Error[404]
def self.[](error_status_code)
errors = {
400 => BadRequest,
401 => Unauthorized,
403 => AccessDenied,
404 => NotFound,
429 => RateLimitExceeded,
500 => ServerError,
502 => BadGateway,
503 => ServiceUnavailable
}
errors.key?(error_status_code) ? errors[error_status_code] : Error
end
protected
def default_error_message
"The following error was received: #{@response.raw.body}"
end
def handle_details(details)
details.to_s
end
def additional_info?
false
end
def additional_info
[]
end
def best_available_message
error_message = [
"HTTP status code: #{@response.raw.status}"
]
begin
response_json = @response.load_json
message = response_json.fetch('message', default_error_message)
details = response_json.fetch('details', nil)
request_id = response_json.fetch('requestId', nil)
error_message << "Message: #{message}"
error_message << "Details: #{handle_details(details)}" if details
error_message << "Request ID: #{request_id}" if request_id
rescue
error_message << "Message: #{default_error_message}"
end
error_message << additional_info if additional_info?
error_message.join("\n")
end
end
# 400
class BadRequest < Error
protected
def default_error_message
'The request was malformed or missing a required parameter.'
end
def handle_details(details)
return details if details.is_a?(String)
handle_detail = proc do |detail|
return detail if detail.is_a?(String)
detail.fetch('details', nil)
end
inner_details = details['errors'].map { |detail| handle_detail[detail] }.reject(&:nil?)
inner_details.join("\n\t")
end
end
# 401
class Unauthorized < Error
protected
def default_error_message
'The authorization token was invalid.'
end
end
# 403
class AccessDenied < Error
protected
def default_error_message
'The specified token does not have access to the requested resource.'
end
def handle_details(details)
"\n\tReasons:\n\t\t#{details['reasons'].join("\n\t\t")}"
end
end
# 404
class NotFound < Error
protected
def default_error_message
'The requested resource or endpoint could not be found.'
end
def handle_details(details)
return details if details.is_a?(String)
type = details['type'] || (details['sys'] || {})['type']
message = "The requested #{type} could not be found."
resource_id = details.fetch('id', nil)
message += " ID: #{resource_id}." if resource_id
message
end
end
# 429
class RateLimitExceeded < Error
# Rate Limit Reset Header Key
RATE_LIMIT_RESET_HEADER_KEY = 'x-contentful-ratelimit-reset'
def reset_time?
# rubocop:disable Style/DoubleNegation
!!reset_time
# rubocop:enable Style/DoubleNegation
end
# Time until next available request, in seconds.
def reset_time
@reset_time ||= @response.raw[RATE_LIMIT_RESET_HEADER_KEY]
end
protected
def additional_info?
reset_time?
end
def additional_info
["Time until reset (seconds): #{reset_time}"]
end
def default_error_message
'Rate limit exceeded. Too many requests.'
end
end
# 500
class ServerError < Error
protected
def default_error_message
'Internal server error.'
end
end
# 502
class BadGateway < Error
protected
def default_error_message
'The requested space is hibernated.'
end
end
# 503
class ServiceUnavailable < Error
protected
def default_error_message
'The request was malformed or missing a required parameter.'
end
end
# Raised when response is no valid json
class UnparsableJson < Error
protected
def default_error_message
@response.error_message
end
end
# Raised when response is not parsable as a Contentful::Resource
class UnparsableResource < StandardError; end
end
| 22.307317 | 93 | 0.654931 |
abf0c0e7667eb74d9bbfbd55a2a62f235a2e5c5a | 341 | # == Schema Information
#
# Table name: questionnaires
#
# id :integer not null, primary key
# name :string
# position :integer
# created_at :datetime not null
# updated_at :datetime not null
#
class Questionnaire < ApplicationRecord
has_many :event_instances
has_many :question_groups
end
| 21.3125 | 53 | 0.659824 |
6ad47be6088231abbe5d3f57007e0ab338eb40d0 | 308 | # frozen_string_literal: true
module CobraCommander
module Output
# Prints a list of components' names sorted alphabetically
class FlatList
def initialize(components)
@components = components
end
def to_s
@components.map(&:name).sort
end
end
end
end
| 18.117647 | 62 | 0.662338 |
7a7979c056d8a2814406a862b80ef5bc82e49ca4 | 237 | class CreateCrFormatKinds < ActiveRecord::Migration[4.2]
def change
create_table :cr_format_kinds do |t|
t.string :name, null: false
t.string :abbr
t.string :note
t.timestamps null: false
end
end
end
| 19.75 | 56 | 0.662447 |
080f0ede8d65913ed91fa670334ce647184d82c1 | 284 | require 'open-uri'
require 'net/http'
require 'json'
require 'pry'
require_relative "../lib/spell"
require_relative "../lib/cli"
require_relative "../lib/get_spell"
require_relative "../lib/dnd_project.rb"
require_relative "../lib/level_list.rb"
require_relative "../lib/school.rb"
| 21.846154 | 40 | 0.75 |
bb437b0f914d05c1641d82131cb7c4daf546350a | 5,387 | # frozen_string_literal: true
module RuboCop
module Cop
module Style
# Checks for `if` and `unless` statements that would fit on one line if
# written as modifier `if`/`unless`. The cop also checks for modifier
# `if`/`unless` lines that exceed the maximum line length.
#
# The maximum line length is configured in the `Layout/LineLength`
# cop. The tab size is configured in the `IndentationWidth` of the
# `Layout/IndentationStyle` cop.
#
# @example
# # bad
# if condition
# do_stuff(bar)
# end
#
# unless qux.empty?
# Foo.do_something
# end
#
# do_something_with_a_long_name(arg) if long_condition_that_prevents_code_fit_on_single_line
#
# # good
# do_stuff(bar) if condition
# Foo.do_something unless qux.empty?
#
# if long_condition_that_prevents_code_fit_on_single_line
# do_something_with_a_long_name(arg)
# end
#
# if short_condition # a long comment that makes it too long if it were just a single line
# do_something
# end
class IfUnlessModifier < Base
include StatementModifier
include LineLengthHelp
include IgnoredPattern
extend AutoCorrector
MSG_USE_MODIFIER = 'Favor modifier `%<keyword>s` usage when having a ' \
'single-line body. Another good alternative is ' \
'the usage of control flow `&&`/`||`.'
MSG_USE_NORMAL =
'Modifier form of `%<keyword>s` makes the line too long.'
def on_if(node)
msg = if single_line_as_modifier?(node) && !named_capture_in_condition?(node)
MSG_USE_MODIFIER
elsif too_long_due_to_modifier?(node)
MSG_USE_NORMAL
end
return unless msg
add_offense(node.loc.keyword, message: format(msg, keyword: node.keyword)) do |corrector|
autocorrect(corrector, node)
end
end
private
def autocorrect(corrector, node)
replacement = if node.modifier_form?
to_normal_form(node)
else
to_modifier_form(node)
end
corrector.replace(node, replacement)
end
def too_long_due_to_modifier?(node)
node.modifier_form? && too_long_single_line?(node) &&
!another_statement_on_same_line?(node)
end
def ignored_patterns
config.for_cop('Layout/LineLength')['IgnoredPatterns'] || []
end
def too_long_single_line?(node)
return false unless max_line_length
range = node.source_range
return false unless range.first_line == range.last_line
return false unless line_length_enabled_at_line?(range.first_line)
line = range.source_line
return false if line_length(line) <= max_line_length
too_long_line_based_on_config?(range, line)
end
def too_long_line_based_on_config?(range, line)
return false if matches_ignored_pattern?(line)
too_long = too_long_line_based_on_ignore_cop_directives?(range, line)
return too_long unless too_long == :undetermined
too_long_line_based_on_allow_uri?(line)
end
def too_long_line_based_on_ignore_cop_directives?(range, line)
if ignore_cop_directives? && directive_on_source_line?(range.line - 1)
return line_length_without_directive(line) > max_line_length
end
:undetermined
end
def too_long_line_based_on_allow_uri?(line)
if allow_uri?
uri_range = find_excessive_uri_range(line)
return false if uri_range && allowed_uri_position?(line, uri_range)
end
true
end
def line_length_enabled_at_line?(line)
processed_source.comment_config
.cop_enabled_at_line?('Layout/LineLength', line)
end
def named_capture_in_condition?(node)
node.condition.match_with_lvasgn_type?
end
def non_eligible_node?(node)
non_simple_if_unless?(node) ||
node.chained? ||
node.nested_conditional? ||
super
end
def non_simple_if_unless?(node)
node.ternary? || node.elsif? || node.else?
end
def another_statement_on_same_line?(node)
line_no = node.source_range.last_line
# traverse the AST upwards until we find a 'begin' node
# we want to look at the following child and see if it is on the
# same line as this 'if' node
while node && !node.begin_type?
index = node.sibling_index
node = node.parent
end
node && (sibling = node.children[index + 1]) &&
sibling.source_range.first_line == line_no
end
def to_normal_form(node)
indentation = ' ' * node.source_range.column
<<~RUBY.chomp
#{node.keyword} #{node.condition.source}
#{indentation} #{node.body.source}
#{indentation}end
RUBY
end
end
end
end
end
| 32.065476 | 100 | 0.593651 |
7a263f344958a9c35dee73ba00c3bed7df82e6e8 | 2,295 | # encoding:utf-8
module Inspec
class Test
attr_accessor :qualifier, :matcher, :expectation, :skip, :negated, :variables
include RubyHelper
def initialize
@qualifier = []
@negated = false
@variables = []
end
def negate!
@negated = !@negated
end
def to_ruby
return rb_skip if !skip.nil?
rb_describe
end
def to_hash
{ qualifier: qualifier, matcher: matcher, expectation: expectation, skip: skip, negated: negated }
end
def resource
@resource ||=
if qualifier.empty? || qualifier[0].empty? || qualifier[0][0].empty?
nil
else
qualifier[0][0]
end
end
def remove_expectation
remove_instance_variable(:@expectation)
end
private
def describe_chain
return nil if @qualifier.empty?
resource = @qualifier.length > 1 ? @qualifier[0..-2] : [@qualifier[0]]
res = resource.map { |q| ruby_qualifier(q) }.join('.')
xres = nil
if @qualifier.length > 1
last = @qualifier[-1]
last_call = last.is_a?(Array) ? last[0].to_s : ''
if last.length == 1 && last_call !~ /^to_.$/ && !last_call.include?('[') && !last_call.empty?
# this will go in its()
xres = last_call
else
res += '.' + ruby_qualifier(last) unless last_call.empty?
end
end
[res, xres]
end
def rb_describe
vars = variables.map(&:to_ruby).join("\n")
vars += "\n" unless vars.empty?
res, xtra = describe_chain
itsy = xtra.nil? ? 'it' : 'its(' + xtra.to_s.inspect + ')'
naughty = @negated ? '_not' : ''
xpect = if !defined?(@expectation)
''
elsif @expectation.class == Regexp
# without this, xpect values like / \/zones\// will not be parsed properly
"(#{@expectation.inspect})"
elsif xpect != ''
' ' + expectation.inspect
end
format("%sdescribe %s do\n %s { should%s %s%s }\nend",
vars, res, itsy, naughty, matcher, xpect)
end
def rb_skip
dc = describe_chain
obj = dc.nil? ? skip.inspect : dc[0]
format("describe %s do\n skip %s\nend", obj, skip.inspect)
end
end
end
| 26.079545 | 104 | 0.548584 |
01c8a64e024491ae31ac3a8783dad6544789225d | 1,732 | # This fork contains macOS patches.
# Original project: https://github.com/roysjosh/xbee-comm
class XbeeComm < Formula
desc "XBee communication libraries and utilities"
homepage "https://github.com/guyzmo/xbee-comm"
url "https://github.com/guyzmo/xbee-comm/archive/v1.5.tar.gz"
sha256 "c474d22feae5d9c05b3ec167b839c8fded512587da0f020ca682d60db174f24a"
license "GPL-3.0"
head "https://github.com/guyzmo/xbee-comm.git"
bottle do
cellar :any_skip_relocation
sha256 "9d163cd9e888a337b0bc39bc3af871a0ed0b8efadb75933e4a4273fbccdfd90d" => :big_sur
sha256 "a4dda0f81a92b04ac242a71d3b233da85abdcc767b8c5ec956e3285565eef994" => :arm64_big_sur
sha256 "4c4eb5e75f59ac2527ec72d41e5e11ae156243278b7c92186fdccec62435a783" => :catalina
sha256 "c5358f469073875537f489d59525c3c9022cebbd3fb77f418b4abba96cd24bf4" => :mojave
sha256 "935948849935f3f11e6cf8992b1c6ad79e92716583c6b5685bf55cc6c4bd2d7a" => :high_sierra
sha256 "9f6d626176e06f69934f3a3a3c56ddfa6a02be4f49d2e53dbce9d92b17f9eeb0" => :sierra
sha256 "64b15ac79da143e2b092db702fd22c92ef064093be1c4c81cb60fd5b08f44075" => :el_capitan
sha256 "805e99d4e700a2e9993f26fbc48cae17c1bf16e6ff9ce63b5c7195358fcb052c" => :yosemite
sha256 "06cb9c96c880a55763dbb58c1b1a60cba19ec89be9c6995955e235d10b6cb47d" => :mavericks
sha256 "5384e94b2ac56713f0a17c5a76d1e36d33f20c1730901e909b4fdf552ab9a49c" => :x86_64_linux
end
depends_on "autoconf" => :build
depends_on "automake" => :build
def install
system "aclocal"
system "autoconf"
system "autoheader"
system "automake", "-a", "-c"
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
end
| 43.3 | 95 | 0.779446 |
f7a3e9797be213a1b2850de05771bd5a0c7d3715 | 738 | require 'optparse'
module Grog
class Options
attr_accessor :number_of_commits_to_show, :show_datetimes
def self.parse(argv)
Options.new(argv)
end
def initialize(argv)
@opts = OptionParser.new
@number_of_commits_to_show = 10
@opts.on("-n=N", "number of commits to show", Integer) { |val| @number_of_commits_to_show = val }
@show_datetimes = false
@opts.on("-d", "show datetimes") { @show_datetimes = true }
@opts.on_tail("-h", "--help", "Show this message") do
puts @opts
exit
end
@opts.on_tail("--version", "Show the version of the grog gem") do
puts "grog v" + `cat VERSION`
end
@rest = @opts.parse(argv)
end
end
end
| 23.806452 | 103 | 0.611111 |
91e4d1e55b185821923aad8fcfcce1dc27378084 | 1,640 | # typed: false
# frozen_string_literal: true
module Homebrew
module API
# Helper functions for using the versions JSON API.
#
# @api private
module Versions
class << self
extend T::Sig
def formulae
# The result is cached by Homebrew::API.fetch
Homebrew::API.fetch "versions-formulae.json"
end
def linux
# The result is cached by Homebrew::API.fetch
Homebrew::API.fetch "versions-linux.json"
end
def casks
# The result is cached by Homebrew::API.fetch
Homebrew::API.fetch "versions-casks.json"
end
sig { params(name: String).returns(T.nilable(PkgVersion)) }
def latest_formula_version(name)
versions = if OS.mac? ||
Homebrew::EnvConfig.force_homebrew_on_linux? ||
Homebrew::EnvConfig.force_homebrew_core_repo_on_linux?
formulae
else
linux
end
return unless versions.key? name
version = Version.new(versions[name]["version"])
revision = versions[name]["revision"]
PkgVersion.new(version, revision)
end
sig { params(token: String).returns(T.nilable(Version)) }
def latest_cask_version(token)
return unless casks.key? token
version = if casks[token]["versions"].key? MacOS.version.to_sym.to_s
casks[token]["versions"][MacOS.version.to_sym.to_s]
else
casks[token]["version"]
end
Version.new(version)
end
end
end
end
end
| 27.333333 | 78 | 0.577439 |
ab9277ef43b71a714868aa427b4a05d259d90c91 | 1,249 | module LolSoap
# Turns an XML node into a hash data structure. Works out which elements
# are supposed to be collections based on the type information.
class HashBuilder
attr_reader :node, :type
def initialize(node, type)
@node = node
@type = type
end
def output
if node.first_element_child
children_hash
else
content
end
end
private
# @private
def children_hash
hash = {}
node.element_children.each do |child|
element = type.element(child.name)
output = self.class.new(child, element.type).output
val = hash[child.name]
if output
if val
if val.is_a?(Array)
val << output
else
hash[child.name] = [val, output]
end
else
hash[child.name] = element.singular? ? output : [output]
end
else
hash[child.name] = element.singular? ? nil : []
end
end
hash
end
# @private
def content
node.text.to_s unless nil_value?
end
# @private
def nil_value?
node.attribute_with_ns('nil', 'http://www.w3.org/2001/XMLSchema-instance')
end
end
end
| 21.534483 | 80 | 0.560448 |
39640c978b4ebbae4958175b2d9c895f9f768727 | 2,493 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20181210122644) do
create_table "devices", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t|
t.string "device_id"
t.string "name"
t.string "name_long"
t.boolean "is_online"
t.boolean "can_cool"
t.boolean "can_heat"
t.date "last_connection"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "records", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t|
t.float "internal_temp", limit: 24
t.float "external_temp", limit: 24
t.float "target_temp", limit: 24
t.float "nest_temp_high", limit: 24
t.float "nest_temp_low", limit: 24
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.float "humidity", limit: 24
t.string "name"
t.string "device_id"
t.string "time_to_target"
t.float "external_humidity", limit: 24, default: 0.0
t.float "is_heating", limit: 24, default: 0.0
end
create_table "users", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0, null: false
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
end
| 40.868852 | 95 | 0.723225 |
d5804e6be31e0ce7e54fcca0f669e3fe1b0d760e | 13,879 | # frozen_string_literal: true
module RailsBestPractices
module Core
# A Check class that takes charge of checking the sexp.
class Check < CodeAnalyzer::Checker
ALL_FILES = /.*/.freeze
CONTROLLER_FILES = /app\/(controllers|cells)\/.*\.rb$/.freeze
MIGRATION_FILES = /db\/migrate\/.*\.rb$/.freeze
MODEL_FILES = /app\/models\/.*\.rb$/.freeze
MAILER_FILES = /app\/models\/.*mailer\.rb$|app\/mailers\/.*\.rb/.freeze
VIEW_FILES = /app\/(views|cells)\/.*\.(erb|haml|slim|builder|rxml)$/.freeze
PARTIAL_VIEW_FILES = /app\/(views|cells)\/.*\/_.*\.(erb|haml|slim|builder|rxml)$/.freeze
ROUTE_FILES = /config\/routes.*\.rb/.freeze
SCHEMA_FILE = /db\/schema\.rb/.freeze
HELPER_FILES = /app\/helpers\/.*\.rb$/.freeze
DEPLOY_FILES = /config\/deploy.*\.rb/.freeze
CONFIG_FILES = /config\/(application|environment|environments\/.*)\.rb/.freeze
INITIALIZER_FILES = /config\/initializers\/.*\.rb/.freeze
CAPFILE = /Capfile/.freeze
GEMFILE_LOCK = /Gemfile\.lock/.freeze
SKIP_FILES = /db\/schema.rb/.freeze
def initialize(options = {})
options.each do |key, value|
instance_variable_set("@#{key}", value)
end
end
# check if the check will need to parse the node file.
#
# @param [String] the file name of node.
# @return [Boolean] true if the check will need to parse the file.
def parse_file?(node_file)
is_interesting_file?(node_file) && !is_ignored?(node_file)
end
def is_interesting_file?(node_file)
interesting_files.any? do |pattern|
if pattern == ALL_FILES
node_file =~ pattern && node_file !~ SKIP_FILES
else
node_file =~ pattern
end
end
end
def is_ignored?(node_file)
regex_ignored_files.map { |r| !!r.match(node_file) }.inject(:|)
end
def regex_ignored_files
@regex_ignored_files ||= Array(@ignored_files).map { |pattern| Regexp.new(pattern) }
end
# add error if source code violates rails best practice.
#
# @param [String] message, is the string message for violation of the rails best practice
# @param [String] filename, is the filename of source code
# @param [Integer] line_number, is the line number of the source code which is reviewing
def add_error(message, filename = @node.file, line_number = @node.line_number)
errors << RailsBestPractices::Core::Error.new(
filename: filename,
line_number: line_number,
message: message,
type: self.class.to_s,
url: url
)
end
# errors that violate the rails best practices.
def errors
@errors ||= []
end
# default url is empty.
#
# @return [String] the url of rails best practice
def url
self.class.url
end
# method_missing to catch all start and end process for each node type, like
#
# start_def
# end_def
# start_call
# end_call
#
# if there is a "debug" method defined in check, each node will be output.
def method_missing(method_name, *args)
if method_name.to_s =~ /^start_/
p args if respond_to?(:debug)
elsif method_name.to_s =~ /^end_/
# nothing to do
else
super
end
end
class <<self
def url(url = nil)
url ? @url = url : @url
end
def debug?
@debug == true
end
def debug
@debug = true
end
end
# Helper to parse the class name.
module Classable
def self.included(base)
base.class_eval do
interesting_nodes :module, :class
# remember module name
add_callback :start_module do |node|
classable_modules << node.module_name.to_s
end
# end of the module.
add_callback :end_module do |_node|
classable_modules.pop
end
# remember the class name
add_callback :start_class do |node|
base_class_name = node.base_class.is_a?(CodeAnalyzer::Nil) ? nil : node.base_class.to_s
@klass = Core::Klass.new(node.class_name.to_s, base_class_name, classable_modules)
klasses << @klass
end
# end of the class
add_callback :end_class do |_node|
klasses.pop
@klass = nil
end
end
end
# get the current class name.
def current_class_name
@klass.to_s
end
# get the current extend class name.
def current_extend_class_name
@klass.extend_class_name
end
# modules.
def classable_modules
@class_modules ||= []
end
def klasses
@klasses ||= []
end
end
# Helper to parse the module name.
module Moduleable
def self.included(base)
base.class_eval do
interesting_nodes :module
# remember module name
add_callback :start_module do |node|
moduleable_modules << node.module_name.to_s
end
# end of module
add_callback :end_module do |_node|
moduleable_modules.pop
end
end
end
# get the current module name.
def current_module_name
moduleable_modules.join('::')
end
# modules.
def moduleable_modules
@moduleable_modules ||= []
end
end
# Helper to add callbacks to mark the methods are used.
module Callable
def self.included(base)
base.class_eval do
interesting_nodes :call, :fcall, :var_ref, :vcall, :command_call, :command, :alias, :bare_assoc_hash, :method_add_arg
# remembe the message of call node.
add_callback :start_call do |node|
mark_used(node.message)
end
# remembe the message of fcall node.
add_callback :start_fcall do |node|
mark_used(node.message)
end
# remembe name of var_ref node.
add_callback :start_var_ref do |node|
mark_used(node)
end
# remembe name of vcall node.
add_callback :start_vcall do |node|
mark_used(node)
end
# skip start_command callback for these nodes
def skip_command_callback_nodes
[]
end
# remember the message of command node.
# remember the argument of alias_method and alias_method_chain as well.
add_callback :start_command do |node|
case node.message.to_s
when *skip_command_callback_nodes
# nothing
when 'alias_method'
mark_used(node.arguments.all[1])
when 'alias_method_chain'
method, feature = *node.arguments.all.map(&:to_s)
call_method("#{method}_with_#{feature}")
when /^(before|after)_/
node.arguments.all.each { |argument| mark_used(argument) }
else
mark_used(node.message)
last_argument = node.arguments.all.last
if last_argument.present? && last_argument.sexp_type == :bare_assoc_hash
last_argument.hash_values.each { |argument_value| mark_used(argument_value) }
end
end
end
# remembe the message of command call node.
add_callback :start_command_call do |node|
mark_used(node.message)
end
# remember the old method of alias node.
add_callback :start_alias do |node|
mark_used(node.old_method)
end
# remember hash values for hash key "methods".
#
# def to_xml(options = {})
# super options.merge(exclude: :visible, methods: [:is_discussion_conversation])
# end
add_callback :start_bare_assoc_hash do |node|
if node.hash_keys.include? 'methods'
mark_used(node.hash_value('methods'))
end
end
# remember the first argument for try and send method.
add_callback :start_method_add_arg do |node|
case node.message.to_s
when 'try'
mark_used(node.arguments.all.first)
when 'send'
if %i[symbol_literal string_literal].include?(node.arguments.all.first.sexp_type)
mark_used(node.arguments.all.first)
end
else
# nothing
end
end
private
def mark_used(method_node)
return if method_node == :call
if method_node.sexp_type == :bare_assoc_hash
method_node.hash_values.each { |value_node| mark_used(value_node) }
elsif method_node.sexp_type == :array
method_node.array_values.each { |value_node| mark_used(value_node) }
else
method_name = method_node.to_s
end
call_method(method_name)
end
def call_method(method_name, class_name = nil)
class_name ||= respond_to?(:current_class_name) ? current_class_name : current_module_name
if methods.has_method?(class_name, method_name)
methods.get_method(class_name, method_name).mark_used
end
methods.mark_parent_class_method_used(class_name, method_name)
methods.mark_subclasses_method_used(class_name, method_name)
methods.possible_public_used(method_name)
end
end
end
end
# Helper to indicate if the controller is inherited from InheritedResources.
module InheritedResourcesable
def self.included(base)
base.class_eval do
interesting_nodes :class, :var_ref, :vcall
interesting_files CONTROLLER_FILES
# check if the controller is inherit from InheritedResources::Base.
add_callback :start_class do |_node|
if current_extend_class_name == 'InheritedResources::Base'
@inherited_resources = true
end
end
# check if there is a DSL call inherit_resources.
add_callback :start_var_ref do |node|
if node.to_s == 'inherit_resources'
@inherited_resources = true
end
end
# check if there is a DSL call inherit_resources.
add_callback :start_vcall do |node|
if node.to_s == 'inherit_resources'
@inherited_resources = true
end
end
end
end
end
# Helper to check except methods.
module Exceptable
def self.included(base)
base.class_eval do
def except_methods
@except_methods + internal_except_methods
end
# check if the method is in the except methods list.
def excepted?(method)
is_ignored?(method.file) ||
except_methods.any? { |except_method| Exceptable.matches method, except_method }
end
def internal_except_methods
raise NoMethodError, 'no method internal_except_methods'
end
end
end
def self.matches(method, except_method)
class_name, method_name = except_method.split('#')
method_name = '.*' if method_name == '*'
method_expression = Regexp.new method_name
matched = method.method_name =~ method_expression
if matched
class_name = '.*' if class_name == '*'
class_expression = Regexp.new class_name
class_names = Prepares.klasses
.select { |klass| klass.class_name == method.class_name }
.map(&:extend_class_name)
.compact
class_names.unshift method.class_name
matched = class_names.any? { |name| name =~ class_expression }
end
!!matched
end
end
# Helper to parse the access control.
module Accessable
def self.included(base)
base.class_eval do
interesting_nodes :var_ref, :vcall, :class, :module
# remember the current access control for methods.
add_callback :start_var_ref do |node|
if %w[public protected private].include? node.to_s
@access_control = node.to_s
end
end
# remember the current access control for methods.
add_callback :start_vcall do |node|
if %w[public protected private].include? node.to_s
@access_control = node.to_s
end
end
# set access control to "public" by default.
add_callback :start_class do |_node|
@access_control = 'public'
end
# set access control to "public" by default.
add_callback :start_module do |_node|
@access_control = 'public'
end
end
# get the current acces control.
def current_access_control
@access_control
end
end
end
end
end
end
| 32.503513 | 129 | 0.559118 |
18b7e71b9de9ea614bbbf53c66a336f7680c4566 | 252 | require 'test_helper'
class A1agregatorModuleTest < Test::Unit::TestCase
include ActiveMerchant::Billing::Integrations
def test_notification_method
assert_instance_of A1agregator::Notification, A1agregator.notification('name=cody')
end
end
| 25.2 | 87 | 0.813492 |
18be4a64cc71c4a53b5041c9565a783bb34edd11 | 871 | module Spree
class PaymentMethod::Pagseguro < PaymentMethod
preference :email, :string
preference :token, :string
def payment_profiles_supported?
false
end
def supports?(source)
true
end
def provider_class
Billing::Pagseguro
end
def provider
provider_class.new
end
def source_required?
false
end
def auto_capture?
false
end
def method_type
'pag_seguro'
end
# Indicates whether its possible to void the payment.
def can_void?(payment)
payment.state != 'void'
end
def capture(*args)
ActiveMerchant::Billing::Response.new(true, "", {}, {})
end
def cancel(response); end
def void(*args)
ActiveMerchant::Billing::Response.new(true, "", {}, {})
end
def actions
%w(capture void)
end
end
end
| 15.836364 | 61 | 0.609644 |
7a8cace8cbd6496115b42a31ff71f0de71e4fba3 | 1,142 | class Api::V1::ExercisesController < ApplicationController
before_action :set_exercise, only: [:show, :update, :destroy]
# GET /exercises
def index
@exercises = Exercise.all
render json: ExerciseSerializer.new(@exercises)
end
# GET /exercises/1
def show
render json: @exercise
end
# POST /exercises
def create
@exercise = Exercise.new(exercise_params)
if @exercise.save
render json: @exercise, status: :created, location: @exercise
else
render json: @exercise.errors, status: :unprocessable_entity
end
end
# PATCH/PUT /exercises/1
def update
if @exercise.update(exercise_params)
render json: @exercise
else
render json: @exercise.errors, status: :unprocessable_entity
end
end
# DELETE /exercises/1
def destroy
@exercise.destroy
end
private
# Use callbacks to share common setup or constraints between actions.
def set_exercise
@exercise = Exercise.find(params[:id])
end
# Only allow a list of trusted parameters through.
def exercise_params
params.require(:exercise).permit(:lbs, :reps)
end
end
| 21.961538 | 73 | 0.687391 |
1d3661c4b9742631639a6127404a7c291ba04855 | 152 | module Furnace::AVM2::ABC
class AS3ReturnVoid < FunctionReturnOpcode
instruction 0x47
ast_type :return
consume 0
produce 0
end
end | 15.2 | 44 | 0.717105 |
f8508a758372ea98886228b8616141e1e28bff54 | 523 | Gem::Specification.new do |s|
s.name = 'meta_proto_object'
s.version = '0.0.1'
s.date = '2013-9-26'
s.summary = "Extends Ruby's Object, Array, and Hash Classes with Magic"
s.description = "Extends Ruby's Object, Array, and Hash Classes with Monkey Patching & Metaprogramming"
s.authors = ["Aaron Lifton"]
s.email = '[email protected]'
s.files = ["lib/meta_proto_object.rb"]
s.homepage = 'http://rubygems.org/gems/meta_proto_object'
s.license = 'MIT'
end
| 40.230769 | 105 | 0.638623 |
ed7169ce10662f6714c65f3a22066c7f5c84f98b | 934 | require 'rails_helper'
require_relative '../../../support/api/v1/request'
require_relative '../../../support/api/v1/endpoints/create'
require_relative '../../../support/api/v1/endpoints/destroy'
RSpec.describe Api::V1::UserFollowshipsController, type: :controller do
include_context "authenticate requests using valid token"
describe "POST #create" do
it_behaves_like "a create endpoint", UserFollowship do
let(:user){ create(:user) }
let(:followed_user){ create(:followed_user) }
let(:resource_params){
{
user_id: user.id,
followed_user_id: followed_user.id,
}
}
end
it_behaves_like "a create endpoint which validates presence", UserFollowship, [:user, :followed_user] do
let(:resource_params){ {user_id: "", followed_user_id: ""} }
end
end
describe "DELETE #destroy" do
it_behaves_like "a destroy endpoint", UserFollowship
end
end
| 31.133333 | 108 | 0.684154 |
219b37875ca5a311cf7dd52bb1f7141dcae3a2f5 | 567 | class SupportPolicy < ApplicationPolicy
def readable?
user.is_support? || user.is_computacenter?
end
def editable?
user.is_support?
end
alias_method :index?, :readable?
alias_method :show?, :readable?
alias_method :schools?, :readable?
alias_method :technical_support?, :editable?
alias_method :feature_flags?, :editable?
alias_method :macros?, :readable?
alias_method :new?, :editable?
alias_method :create?, :editable?
alias_method :edit?, :editable?
alias_method :update?, :editable?
alias_method :destroy?, :editable?
end
| 24.652174 | 46 | 0.726631 |
034d2fecbc614227cc4f8e6d5c2dd2d3fe1761f8 | 47 | module ExtensionLister
VERSION = '0.2.5'
end
| 11.75 | 22 | 0.723404 |
1d087b8c6261253907ef64275d25180df8b23997 | 21,784 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Api::V1::KeywordsController, type: :controller do
describe 'GET#index' do
context 'given access token' do
it 'returns ok status code' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index
expect(response).to have_http_status(:ok)
end
it 'returns keywords for valid resource owner' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
Fabricate(:keyword, user_id: user.id, keyword: 'Lionel Messi')
Fabricate(:keyword, user_id: user.id, keyword: 'Cristiano Ronaldo')
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index
expect(JSON.parse(response.body, symbolize_names: true)[:data].count).to eql 2
end
it 'does NOT return keywords for invalid resource owner' do
user1 = Fabricate(:user)
user2 = Fabricate(:user, username: 'hello', password: 'password', password_confirmation: 'password')
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user2.id, application_id: application.id)
Fabricate(:keyword, user_id: user1.id, keyword: 'Lionel Messi')
Fabricate(:keyword, user_id: user1.id, keyword: 'Cristiano Ronaldo')
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index
expect(JSON.parse(response.body, symbolize_names: true)[:data].count).to eql 0
end
it 'returns ascending ordered keywords' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
Fabricate(:keyword, user_id: user.id, keyword: 'Eden Hazard')
Fabricate(:keyword, user_id: user.id, keyword: 'Lionel Messi')
Fabricate(:keyword, user_id: user.id, keyword: 'Cristiano Ronaldo')
Fabricate(:keyword, user_id: user.id, keyword: 'Kylian Mbappe')
Fabricate(:keyword, user_id: user.id, keyword: 'Neymar')
Fabricate(:keyword, user_id: user.id, keyword: 'Kevin De Bruyne')
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index
response_body = JSON.parse(response.body, symbolize_names: true)
expect(response_body[:data].count).to eql 6
expect(response_body[:data][0][:attributes][:keyword]).to eql('Cristiano Ronaldo')
expect(response_body[:data][1][:attributes][:keyword]).to eql('Eden Hazard')
expect(response_body[:data][2][:attributes][:keyword]).to eql('Kevin De Bruyne')
expect(response_body[:data][3][:attributes][:keyword]).to eql('Kylian Mbappe')
expect(response_body[:data][4][:attributes][:keyword]).to eql('Lionel Messi')
expect(response_body[:data][5][:attributes][:keyword]).to eql('Neymar')
end
it 'returns only 25 keywords per page' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
Fabricate.times(60, :keyword, user_id: user.id, keyword: FFaker::Name.name)
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index
expect(JSON.parse(response.body, symbolize_names: true)[:data].count).to eql 25
end
it 'returns only keywords that match with search params' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
Fabricate(:keyword, user_id: user.id, keyword: 'Eden Hazard')
Fabricate(:keyword, user_id: user.id, keyword: 'Lionel Messi')
Fabricate(:keyword, user_id: user.id, keyword: 'Cristiano Ronaldo')
Fabricate(:keyword, user_id: user.id, keyword: 'Kylian Mbappe')
Fabricate(:keyword, user_id: user.id, keyword: 'Neymar')
Fabricate(:keyword, user_id: user.id, keyword: 'Kevin De Bruyne')
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index, params: { search: 'ian' }
response_body = JSON.parse(response.body, symbolize_names: true)
expect(response_body[:data].count).to eql 2
expect(response_body[:data][0][:attributes][:keyword]).to eql('Cristiano Ronaldo')
expect(response_body[:data][1][:attributes][:keyword]).to eql('Kylian Mbappe')
end
it 'returns nothing when given page 2 but keywords less than 25' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
Fabricate(:keyword, user_id: user.id, keyword: 'Eden Hazard')
Fabricate(:keyword, user_id: user.id, keyword: 'Lionel Messi')
Fabricate(:keyword, user_id: user.id, keyword: 'Cristiano Ronaldo')
Fabricate(:keyword, user_id: user.id, keyword: 'Kylian Mbappe')
Fabricate(:keyword, user_id: user.id, keyword: 'Neymar')
Fabricate(:keyword, user_id: user.id, keyword: 'Kevin De Bruyne')
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index, params: { page: 2 }
expect(JSON.parse(response.body, symbolize_names: true)[:data].count).to eql 0
end
end
context 'given NO access token' do
it 'returns unauthorized status code' do
get :index
expect(response).to have_http_status(:unauthorized)
end
it 'returns invalid token message' do
get :index
expect(JSON.parse(response.body, symbolize_names: true)[:errors][0][:detail]).to eql(I18n.t('doorkeeper.errors.messages.invalid_token.unknown'))
end
end
context 'given wrong access token' do
it 'returns unauthorized status code' do
request.headers['Authorization'] = 'xxx'
get :index
expect(response).to have_http_status(:unauthorized)
end
it 'returns invalid token message' do
request.headers['Authorization'] = 'xxx'
get :index
expect(JSON.parse(response.body, symbolize_names: true)[:errors][0][:detail]).to eql(I18n.t('doorkeeper.errors.messages.invalid_token.unknown'))
end
end
context 'given revoked access token' do
it 'returns unauthorized status code' do
application = Fabricate(:application)
access_token = Fabricate(:access_token, application_id: application.id, revoked_at: DateTime.now)
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index
expect(response).to have_http_status(:unauthorized)
end
it 'returns token was revoked message' do
application = Fabricate(:application)
access_token = Fabricate(:access_token, application_id: application.id, revoked_at: DateTime.now)
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index
expect(JSON.parse(response.body, symbolize_names: true)[:errors][0][:detail]).to eql(I18n.t('doorkeeper.errors.messages.invalid_token.revoked'))
end
end
context 'given expired access token' do
it 'returns unauthorized status code' do
application = Fabricate(:application)
access_token = Fabricate(:access_token, application_id: application.id, expires_in: 0)
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index
expect(response).to have_http_status(:unauthorized)
end
it 'returns token was expired message' do
application = Fabricate(:application)
access_token = Fabricate(:access_token, application_id: application.id, expires_in: 0)
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :index
expect(JSON.parse(response.body, symbolize_names: true)[:errors][0][:detail]).to eql(I18n.t('doorkeeper.errors.messages.invalid_token.expired'))
end
end
end
describe 'GET#show' do
context 'given access token' do
context 'given correct keyword id' do
it 'returns ok status code' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
keyword = Fabricate(:keyword, user_id: user.id, keyword: 'test')
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :show, params: { id: keyword.id }
expect(response).to have_http_status(:ok)
end
it 'returns correct keyword' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
keyword = Fabricate(:keyword, user_id: user.id, keyword: 'test')
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :show, params: { id: keyword.id }
response_body = JSON.parse(response.body, symbolize_names: true)
expect(response_body[:data][:id]).to eql(keyword.id.to_s)
expect(response_body[:data][:attributes][:keyword]).to eql('test')
end
end
context 'given incorrect keyword id' do
it 'returns not found status code' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
Fabricate(:keyword, user_id: user.id, keyword: 'test')
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :show, params: { id: 9999 }
expect(response).to have_http_status(:not_found)
end
it 'returns not found message' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
Fabricate(:keyword, user_id: user.id, keyword: 'test')
request.headers['Authorization'] = "Bearer #{access_token.token}"
get :show, params: { id: 9999 }
expect(JSON.parse(response.body, symbolize_names: true)[:errors][0][:detail]).to include(I18n.t('keyword.not_found_with_id', id: 9999))
end
end
end
context 'given NO access token' do
it 'returns unauthorized status code' do
user = Fabricate(:user)
keyword = Fabricate(:keyword, user_id: user.id, keyword: 'test')
get :show, params: { id: keyword.id }
expect(response).to have_http_status(:unauthorized)
end
end
end
describe 'POST#create' do
context 'given access token' do
context 'given valid parameters (file)' do
it 'returns ok status code' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/example.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
post :create, params: { file: file }
expect(response).to have_http_status(:no_content)
end
it 'inserts keywords to database' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/example.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
expect do
post :create, params: { file: file }
end.to change(Keyword, :count).by(6)
end
it 'creates google scraping job' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/example.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
expect do
post :create, params: { file: file }
end.to have_enqueued_job(ScrapingProcessDistributingJob)
end
end
context 'given invalid parameters (file)' do
context 'given NO file' do
it 'returns bad request status code' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
request.headers['Authorization'] = "Bearer #{access_token.token}"
post :create
expect(response).to have_http_status(:bad_request)
end
it 'returns invalid file type message' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
request.headers['Authorization'] = "Bearer #{access_token.token}"
post :create
expect(JSON.parse(response.body, symbolize_names: true)[:errors][0][:detail]).to eql(I18n.t('keyword.file_cannot_be_blank'))
end
it 'does NOT insert keywords to database' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
request.headers['Authorization'] = "Bearer #{access_token.token}"
expect do
post :create
end.to change(Keyword, :count).by(0)
end
it 'does NOT create google scraping job' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
request.headers['Authorization'] = "Bearer #{access_token.token}"
expect do
post :create
end.not_to have_enqueued_job(ScrapingProcessDistributingJob)
end
end
context 'given invalid file type' do
it 'returns bad request status code' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/nimble.png')
request.headers['Authorization'] = "Bearer #{access_token.token}"
post :create, params: { file: file }
expect(response).to have_http_status(:bad_request)
end
it 'returns invalid file type message' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/nimble.png')
request.headers['Authorization'] = "Bearer #{access_token.token}"
post :create, params: { file: file }
expect(JSON.parse(response.body, symbolize_names: true)[:errors][0][:detail]).to eql(I18n.t('keyword.file_must_be_csv'))
end
it 'does NOT insert keywords to database' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/nimble.png')
request.headers['Authorization'] = "Bearer #{access_token.token}"
expect do
post :create, params: { file: file }
end.to change(Keyword, :count).by(0)
end
it 'does NOT create google scraping job' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/nimble.png')
request.headers['Authorization'] = "Bearer #{access_token.token}"
expect do
post :create, params: { file: file }
end.not_to have_enqueued_job(ScrapingProcessDistributingJob)
end
end
context 'given no keyword csv' do
it 'returns bad request status code' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/no_keywords.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
post :create, params: { file: file }
expect(response).to have_http_status(:bad_request)
end
it 'returns invalid csv message' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/no_keywords.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
post :create, params: { file: file }
expect(JSON.parse(response.body, symbolize_names: true)[:errors][0][:detail]).to eql(I18n.t('keyword.keyword_range'))
end
it 'does NOT insert keywords to database' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/no_keywords.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
expect do
post :create, params: { file: file }
end.to change(Keyword, :count).by(0)
end
it 'does NOT create google scraping job' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/no_keywords.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
expect do
post :create, params: { file: file }
end.not_to have_enqueued_job(ScrapingProcessDistributingJob)
end
end
context 'given more than 1,000 keywords csv' do
it 'returns bad request status code' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/more_than_thoudsand_keywords.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
post :create, params: { file: file }
expect(response).to have_http_status(:bad_request)
end
it 'returns invalid csv message' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/more_than_thoudsand_keywords.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
post :create, params: { file: file }
expect(JSON.parse(response.body, symbolize_names: true)[:errors][0][:detail]).to eql(I18n.t('keyword.keyword_range'))
end
it 'does NOT insert keywords to database' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/more_than_thoudsand_keywords.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
expect do
post :create, params: { file: file }
end.to change(Keyword, :count).by(0)
end
it 'does NOT create google scraping job' do
user = Fabricate(:user)
application = Fabricate(:application)
access_token = Fabricate(:access_token, resource_owner_id: user.id, application_id: application.id)
file = fixture_file_upload('files/more_than_thoudsand_keywords.csv', 'text/csv')
request.headers['Authorization'] = "Bearer #{access_token.token}"
expect do
post :create, params: { file: file }
end.not_to have_enqueued_job(ScrapingProcessDistributingJob)
end
end
end
end
end
end
| 40.117864 | 152 | 0.642123 |
1cdeff5a11b1e95578a24f85448149b2e74628ff | 57 | require 'spec_helper'
describe VideosController do
end
| 9.5 | 28 | 0.824561 |
26098100353371973e8d156648f4244b1c8ca60b | 1,773 | # frozen_string_literal: true
require 'forwardable'
require 'fileutils'
require 'digest'
require 'sys/filesystem'
require 'files/version'
module Files
# delegate class methods
extend SingleForwardable
extend Sys
class << self
# list of file basenames in specified dir
def filenames dir
Dir[File.join(dir, '*')]
.select { |f| File.file?(f) }
.map { |f| basename(f) }
end
# return free space in bytes
def free_space path
mount_point = Filesystem.mount_point(path)
stat = Filesystem.stat(mount_point)
stat.block_size * stat.blocks_available
end
def md5 path
Digest::MD5.file(path).hexdigest
end
# open in append mode
def open path
File.open(path, 'a')
end
# overwrite existing file
def write path, string
File.write(path, string, mode: 'w')
end
# don't use ruby-fifo gem because of its loading problems
def write_to_fifo path, string
# make fifo file writeable (just in case)
FileUtils.chmod(0o666, path)
# '+' means non-blocking mode
File.open(path, 'w+') do |file|
# don't use File#write - string must be newline terminated
file.puts(string)
file.flush
end
end
end
def_delegator :File, :basename
# http://stackoverflow.com/questions/6553392
# use FileUtils.rm instead of File.delete
def_delegator :File, :exist?
def_delegator :File, :join
def_delegator :File, :read
def_delegator :File, :size
def_delegator :FileUtils, :mkdir_p
def_delegator :FileUtils, :mv
# raises exception if file doesn't exist
def_delegator :FileUtils, :rm
# doesn't raise exception if file doesn't exist
def_delegator :FileUtils, :rm_f
def_delegator :FileUtils, :touch
end
| 24.625 | 66 | 0.670051 |
d5092bc0ffe68909d9215ee5289436bb1ec5febe | 1,125 | # Rep Recalculator
# Recalculates the reputation score for one user, specified by the ID given in the first command line argument.
def get_setting(name)
begin
return SiteSetting.find_by_name(name).value
rescue
return nil
end
end
qu = get_setting('QuestionUpVoteRep').to_i
qd = get_setting('QuestionDownVoteRep').to_i
au = get_setting('AnswerUpVoteRep').to_i
ad = get_setting('AnswerDownVoteRep').to_i
users = User.all
users.each do |u|
u.reputation = 1
u.questions.each do |p|
upvotes = p.votes.where(:vote_type => 1).count
downvotes = p.votes.where(:vote_type => -1).count
p.user.reputation += qu * upvotes
p.user.reputation += qd * downvotes
puts "Questions: #{upvotes} up, #{downvotes} down => #{qu*upvotes}, #{qd*downvotes} rep"
p.user.save!
end
u.answers.each do |p|
upvotes = p.votes.where(:vote_type => 1).count
downvotes = p.votes.where(:vote_type => -1).count
p.user.reputation += au * upvotes
p.user.reputation += ad * downvotes
puts "Answers: #{upvotes} up, #{downvotes} down => #{au*upvotes}, #{ad*downvotes} rep"
p.user.save!
end
end
| 28.125 | 111 | 0.682667 |
5d07838234045d61bf9c6ea6fb65f3c71ce8ad31 | 649 | module Morpho
module Formatters
module StandardError
class << self
def call(message, backtrace, options, env, original_exception)
if message.is_a?(String)
result = {
message: message,
errors: [
{ field_name: 'base', messages: [ message ] }
]
}
elsif message.is_a?(Hash)
result = message
elsif message.is_a?(Object)
return message.to_json if message.respond_to?(:to_json)
result = message
end
::Grape::Json.dump(result)
end
end
end
end
end
| 24.037037 | 70 | 0.511556 |
91cc6b271a5f9169a8c4ae78b69f71249fc52ace | 1,839 | # frozen_string_literal: true
FactoryBot.define do
factory :event_type, class: "Renalware::Events::Type" do
initialize_with do
Renalware::Events::Type.find_or_create_by(
name: name,
category: category
)
end
category factory: :event_category
name { "Test" }
factory :access_clinic_event_type do
name { "Access--Clinic" }
hidden { false }
end
factory :swab_event_type do
name { "Swab" }
event_class_name { "Renalware::Events::Swab" }
slug { "swabs" }
end
factory :biopsy_event_type do
name { "Renal biopsy" }
event_class_name { "Renalware::Events::Biopsy" }
end
factory :pd_line_change_event_type do
name { "PD Line Change" }
slug { "pd_line_changes" }
end
factory :investigation_event_type do
name { "Investigation" }
slug { "investigations" }
event_class_name { "Renalware::Events::Investigation" }
end
factory :vaccination_event_type do
name { "Vaccination" }
slug { "vaccinations" }
event_class_name { "Renalware::Virology::Vaccination" }
end
factory :advanced_care_plan_event_type do
name { "AdvancedCarePlan" }
slug { "advanced_care_plans" }
event_class_name { "Renalware::Events::AdvancedCarePlan" }
end
factory :clinical_frailty_score_event_type do
name { "Clinical Frailty Score" }
slug { "clinical_frailty_score" }
event_class_name { "Renalware::Events::ClinicalFrailtyScore" }
end
factory :medication_review_event_type do
name { "Medication Review" }
event_class_name { "Renalware::Medications::Review" }
end
factory :research_study_event_type do
name { "Research Study" }
event_class_name { "Renalware::Research::StudyEvent" }
end
end
end
| 25.901408 | 68 | 0.654704 |
793959fbddd5a0e58b65ef6f34d9a7bfcf8122b7 | 6,192 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Hdinsight::Mgmt::V2018_06_01_preview
#
# A service client - single point of access to the REST API.
#
class HDInsightManagementClient < MsRestAzure::AzureServiceClient
include MsRestAzure
include MsRestAzure::Serialization
# @return [String] the base URI of the service.
attr_accessor :base_url
# @return Credentials needed for the client to connect to Azure.
attr_reader :credentials
# @return [String] The subscription credentials which uniquely identify
# Microsoft Azure subscription. The subscription ID forms part of the URI
# for every service call.
attr_accessor :subscription_id
# @return [String] The HDInsight client API Version.
attr_reader :api_version
# @return [String] The preferred language for the response.
attr_accessor :accept_language
# @return [Integer] The retry timeout in seconds for Long Running
# Operations. Default value is 30.
attr_accessor :long_running_operation_retry_timeout
# @return [Boolean] Whether a unique x-ms-client-request-id should be
# generated. When set to true a unique x-ms-client-request-id value is
# generated and included in each request. Default is true.
attr_accessor :generate_client_request_id
# @return [Clusters] clusters
attr_reader :clusters
# @return [Applications] applications
attr_reader :applications
# @return [Locations] locations
attr_reader :locations
# @return [Configurations] configurations
attr_reader :configurations
# @return [Extensions] extensions
attr_reader :extensions
# @return [ScriptActions] script_actions
attr_reader :script_actions
# @return [ScriptExecutionHistory] script_execution_history
attr_reader :script_execution_history
# @return [Operations] operations
attr_reader :operations
#
# Creates initializes a new instance of the HDInsightManagementClient class.
# @param credentials [MsRest::ServiceClientCredentials] credentials to authorize HTTP requests made by the service client.
# @param base_url [String] the base URI of the service.
# @param options [Array] filters to be applied to the HTTP requests.
#
def initialize(credentials = nil, base_url = nil, options = nil)
super(credentials, options)
@base_url = base_url || 'https://management.azure.com'
fail ArgumentError, 'invalid type of credentials input parameter' unless credentials.is_a?(MsRest::ServiceClientCredentials) unless credentials.nil?
@credentials = credentials
@clusters = Clusters.new(self)
@applications = Applications.new(self)
@locations = Locations.new(self)
@configurations = Configurations.new(self)
@extensions = Extensions.new(self)
@script_actions = ScriptActions.new(self)
@script_execution_history = ScriptExecutionHistory.new(self)
@operations = Operations.new(self)
@api_version = '2018-06-01-preview'
@accept_language = 'en-US'
@long_running_operation_retry_timeout = 30
@generate_client_request_id = true
add_telemetry
end
#
# Makes a request and returns the body of the response.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [Hash{String=>String}] containing the body of the response.
# Example:
#
# request_content = "{'location':'westus','tags':{'tag1':'val1','tag2':'val2'}}"
# path = "/path"
# options = {
# body: request_content,
# query_params: {'api-version' => '2016-02-01'}
# }
# result = @client.make_request(:put, path, options)
#
def make_request(method, path, options = {})
result = make_request_with_http_info(method, path, options)
result.body unless result.nil?
end
#
# Makes a request and returns the operation response.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [MsRestAzure::AzureOperationResponse] Operation response containing the request, response and status.
#
def make_request_with_http_info(method, path, options = {})
result = make_request_async(method, path, options).value!
result.body = result.response.body.to_s.empty? ? nil : JSON.load(result.response.body)
result
end
#
# Makes a request asynchronously.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def make_request_async(method, path, options = {})
fail ArgumentError, 'method is nil' if method.nil?
fail ArgumentError, 'path is nil' if path.nil?
request_url = options[:base_url] || @base_url
if(!options[:headers].nil? && !options[:headers]['Content-Type'].nil?)
@request_headers['Content-Type'] = options[:headers]['Content-Type']
end
request_headers = @request_headers
request_headers.merge!({'accept-language' => @accept_language}) unless @accept_language.nil?
options.merge!({headers: request_headers.merge(options[:headers] || {})})
options.merge!({credentials: @credentials}) unless @credentials.nil?
super(request_url, method, path, options)
end
private
#
# Adds telemetry information.
#
def add_telemetry
sdk_information = 'azure_mgmt_hdinsight'
sdk_information = "#{sdk_information}/0.17.3"
add_user_agent_information(sdk_information)
end
end
end
| 38.222222 | 154 | 0.698482 |
e856e14ca9d143f9d40458d658c77448036ede25 | 43 | class PaymentType < ActiveRecord::Base
end
| 14.333333 | 38 | 0.813953 |
e8d79d22ea797daa77cdd41d1e5c09ec59023307 | 105 | require 'rubygems'
require 'bundler/setup'
# our gem
require 'db_clone'
RSpec.configure do |config|
end | 13.125 | 27 | 0.761905 |
26142dc8dc925f60d3ce71b8d37d11e73e7f14a8 | 1,766 | module Neography
class Rest
module NodePaths
include Neography::Rest::Helpers
def get_path(from, to, relationships, depth = 1, algorithm = "shortestPath")
options = path_options(to, relationships, depth, algorithm)
@connection.post("/node/%{id}/path" % {:id => get_id(from)}, options) || {}
end
def get_paths(from, to, relationships, depth = 1, algorithm = "allPaths")
options = path_options(to, relationships, depth, algorithm)
@connection.post("/node/%{id}/paths" % {:id => get_id(from)}, options) || []
end
def get_shortest_weighted_path(from, to, relationships, weight_attribute = "weight", depth = 1, algorithm = "dijkstra")
options = path_options(to, relationships, depth, algorithm, { :cost_property => weight_attribute })
@connection.post("/node/%{id}/paths" % {:id => get_id(from)}, options) || {}
end
private
def get_algorithm(algorithm)
case algorithm
when :shortest, "shortest", :shortestPath, "shortestPath", :short, "short"
"shortestPath"
when :allSimplePaths, "allSimplePaths", :simple, "simple"
"allSimplePaths"
when :dijkstra, "dijkstra"
"dijkstra"
else
"allPaths"
end
end
def path_options(to, relationships, depth, algorithm, extra_body = {})
options = { :body => {
"to" => @connection.configuration + "/node/#{get_id(to)}",
"relationships" => relationships,
"max_depth" => depth,
"algorithm" => get_algorithm(algorithm)
}.merge(extra_body).to_json,
:headers => json_content_type
}
end
end
end
end
| 35.32 | 125 | 0.582673 |
1c5209ef21b9b16cae7f1ce1bf0cffc9e139411d | 1,974 | class YelpTools < Formula
desc "Tools that help create and edit Mallard or DocBook documentation"
homepage "https://github.com/GNOME/yelp-tools"
url "https://download.gnome.org/sources/yelp-tools/3.32/yelp-tools-3.32.1.tar.xz"
sha256 "99a7c312a5fcb427870dc198af02801eb0f8ea63317e20110fc0303eb44636e3"
bottle do
cellar :any_skip_relocation
sha256 "2ac8c83a70c72d104f3b6ce093ccd989dacaa1ab4de9b895adeebdffa50b6663" => :mojave
sha256 "2ac8c83a70c72d104f3b6ce093ccd989dacaa1ab4de9b895adeebdffa50b6663" => :high_sierra
sha256 "2c11165b029ef8e8aa56ff802ccbf8d6cc1409187b42bee33a4e972bd60094f0" => :sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "gettext" => :build
depends_on "intltool" => :build
depends_on "itstool" => :build
depends_on "libtool" => :build
depends_on "libxml2" => :build
depends_on "libxslt" => :build
depends_on "pkg-config" => :build
depends_on "gtk+3"
resource "yelp-xsl" do
url "https://download.gnome.org/sources/yelp-xsl/3.32/yelp-xsl-3.32.1.tar.xz"
sha256 "cac31bc150545d6aa0de15dce04560cbf591008d17a783a1d1d9cdd47b147f04"
end
def install
resource("yelp-xsl").stage do
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
ENV.append_path "PKG_CONFIG_PATH", "#{share}/pkgconfig"
end
system "autoreconf", "-fi"
system "./configure", "--prefix=#{prefix}"
system "make", "install"
end
def post_install
system "#{Formula["gtk+3"].opt_bin}/gtk3-update-icon-cache",
"-f", "-t", "#{HOMEBREW_PREFIX}/share/icons/hicolor"
end
test do
system "#{bin}/yelp-new", "task", "ducksinarow"
system "#{bin}/yelp-build", "html", "ducksinarow.page"
system "#{bin}/yelp-check", "validate", "ducksinarow.page"
end
end
| 35.25 | 93 | 0.677812 |
e8429bac3bb609d0cd1412496ca933dd1005ac1c | 12,543 | # frozen_string_literal: true
# Selenium specific implementation of the Capybara::Driver::Node API
require 'capybara/selenium/extensions/find'
require 'capybara/selenium/extensions/scroll'
class Capybara::Selenium::Node < Capybara::Driver::Node
include Capybara::Selenium::Find
include Capybara::Selenium::Scroll
def visible_text
native.text
end
def all_text
text = driver.evaluate_script('arguments[0].textContent', self)
text.gsub(/[\u200b\u200e\u200f]/, '')
.gsub(/[\ \n\f\t\v\u2028\u2029]+/, ' ')
.gsub(/\A[[:space:]&&[^\u00a0]]+/, '')
.gsub(/[[:space:]&&[^\u00a0]]+\z/, '')
.tr("\u00a0", ' ')
end
def [](name)
native.attribute(name.to_s)
rescue Selenium::WebDriver::Error::WebDriverError
nil
end
def value
if tag_name == 'select' && multiple?
native.find_elements(:css, 'option:checked').map { |el| el[:value] || el.text }
else
native[:value]
end
end
def style(styles)
styles.each_with_object({}) do |style, result|
result[style] = native.css_value(style)
end
end
##
#
# Set the value of the form element to the given value.
#
# @param [String] value The new value
# @param [Hash{}] options Driver specific options for how to set the value
# @option options [Symbol,Array] :clear (nil) The method used to clear the previous value <br/>
# nil => clear via javascript <br/>
# :none => append the new value to the existing value <br/>
# :backspace => send backspace keystrokes to clear the field <br/>
# Array => an array of keys to send before the value being set, e.g. [[:command, 'a'], :backspace]
def set(value, **options)
raise ArgumentError, "Value cannot be an Array when 'multiple' attribute is not present. Not a #{value.class}" if value.is_a?(Array) && !multiple?
tag_name, type = attrs(:tagName, :type).map { |val| val&.downcase }
@tag_name ||= tag_name
case tag_name
when 'input'
case type
when 'radio'
click
when 'checkbox'
click if value ^ checked?
when 'file'
set_file(value)
when 'date'
set_date(value)
when 'time'
set_time(value)
when 'datetime-local'
set_datetime_local(value)
else
set_text(value, options)
end
when 'textarea'
set_text(value, options)
else
set_content_editable(value)
end
end
def select_option
click unless selected? || disabled?
end
def unselect_option
raise Capybara::UnselectNotAllowed, 'Cannot unselect option from single select box.' unless select_node.multiple?
click if selected?
end
def click(keys = [], **options)
click_options = ClickOptions.new(keys, options)
return native.click if click_options.empty?
click_with_options(click_options)
rescue StandardError => e
if e.is_a?(::Selenium::WebDriver::Error::ElementClickInterceptedError) ||
e.message.match?(/Other element would receive the click/)
scroll_to_center
end
raise e
end
def right_click(keys = [], **options)
click_options = ClickOptions.new(keys, options)
click_with_options(click_options) do |action|
click_options.coords? ? action.context_click : action.context_click(native)
end
end
def double_click(keys = [], **options)
click_options = ClickOptions.new(keys, options)
click_with_options(click_options) do |action|
click_options.coords? ? action.double_click : action.double_click(native)
end
end
def send_keys(*args)
native.send_keys(*args)
end
def hover
scroll_if_needed { browser_action.move_to(native).perform }
end
def drag_to(element)
# Due to W3C spec compliance - The Actions API no longer scrolls to elements when necessary
# which means Seleniums `drag_and_drop` is now broken - do it manually
scroll_if_needed { browser_action.click_and_hold(native).perform }
element.scroll_if_needed { browser_action.move_to(element.native).release.perform }
end
def tag_name
@tag_name ||= native.tag_name.downcase
end
def visible?; boolean_attr(native.displayed?); end
def readonly?; boolean_attr(self[:readonly]); end
def multiple?; boolean_attr(self[:multiple]); end
def selected?; boolean_attr(native.selected?); end
alias :checked? :selected?
def disabled?
return true unless native.enabled?
# WebDriver only defines `disabled?` for form controls but fieldset makes sense too
find_xpath('self::fieldset/ancestor-or-self::fieldset[@disabled]').any?
end
def content_editable?
native.attribute('isContentEditable')
end
def ==(other)
native == other.native
end
def path
driver.evaluate_script GET_XPATH_SCRIPT, self
end
protected
def scroll_if_needed
yield
rescue ::Selenium::WebDriver::Error::MoveTargetOutOfBoundsError
scroll_to_center
yield
end
private
def sibling_index(parent, node, selector)
siblings = parent.find_xpath(selector)
case siblings.size
when 0
'[ERROR]' # IE doesn't support full XPath (namespace-uri, etc)
when 1
'' # index not necessary when only one matching element
else
idx = siblings.index(node)
# Element may not be found in the siblings if it has gone away
idx.nil? ? '[ERROR]' : "[#{idx + 1}]"
end
end
def boolean_attr(val)
val && (val != 'false')
end
# a reference to the select node if this is an option node
def select_node
find_xpath(XPath.ancestor(:select)[1]).first
end
def set_text(value, clear: nil, **_unused)
value = value.to_s
if value.empty? && clear.nil?
native.clear
elsif clear == :backspace
# Clear field by sending the correct number of backspace keys.
backspaces = [:backspace] * self.value.to_s.length
send_keys(*([:end] + backspaces + [value]))
elsif clear.is_a? Array
send_keys(*clear, value)
else
# Clear field by JavaScript assignment of the value property.
# Script can change a readonly element which user input cannot, so
# don't execute if readonly.
driver.execute_script "if (!arguments[0].readOnly){ arguments[0].value = '' }", self unless clear == :none
send_keys(value)
end
end
def click_with_options(click_options)
scroll_if_needed do
action_with_modifiers(click_options) do |action|
if block_given?
yield action
else
click_options.coords? ? action.click : action.click(native)
end
end
end
end
def scroll_to_center
script = <<-'JS'
try {
arguments[0].scrollIntoView({behavior: 'instant', block: 'center', inline: 'center'});
} catch(e) {
arguments[0].scrollIntoView(true);
}
JS
begin
driver.execute_script(script, self)
rescue StandardError # rubocop:disable Lint/HandleExceptions
# Swallow error if scrollIntoView with options isn't supported
end
end
def set_date(value) # rubocop:disable Naming/AccessorMethodName
value = SettableValue.new(value)
return set_text(value) unless value.dateable?
# TODO: this would be better if locale can be detected and correct keystrokes sent
update_value_js(value.to_date_str)
end
def set_time(value) # rubocop:disable Naming/AccessorMethodName
value = SettableValue.new(value)
return set_text(value) unless value.timeable?
# TODO: this would be better if locale can be detected and correct keystrokes sent
update_value_js(value.to_time_str)
end
def set_datetime_local(value) # rubocop:disable Naming/AccessorMethodName
value = SettableValue.new(value)
return set_text(value) unless value.timeable?
# TODO: this would be better if locale can be detected and correct keystrokes sent
update_value_js(value.to_datetime_str)
end
def update_value_js(value)
driver.execute_script(<<-JS, self, value)
if (arguments[0].readOnly) { return };
if (document.activeElement !== arguments[0]){
arguments[0].focus();
}
if (arguments[0].value != arguments[1]) {
arguments[0].value = arguments[1]
arguments[0].dispatchEvent(new InputEvent('input'));
arguments[0].dispatchEvent(new Event('change', { bubbles: true }));
}
JS
end
def set_file(value) # rubocop:disable Naming/AccessorMethodName
path_names = value.to_s.empty? ? [] : value
native.send_keys(Array(path_names).join("\n"))
end
def set_content_editable(value) # rubocop:disable Naming/AccessorMethodName
# Ensure we are focused on the element
click
editable = driver.execute_script <<-JS, self
if (arguments[0].isContentEditable) {
var range = document.createRange();
var sel = window.getSelection();
arguments[0].focus();
range.selectNodeContents(arguments[0]);
sel.removeAllRanges();
sel.addRange(range);
return true;
}
return false;
JS
# The action api has a speed problem but both chrome and firefox 58 raise errors
# if we use the faster direct send_keys. For now just send_keys to the element
# we've already focused.
# native.send_keys(value.to_s)
browser_action.send_keys(value.to_s).perform if editable
end
def action_with_modifiers(click_options)
actions = browser_action.move_to(native, *click_options.coords)
modifiers_down(actions, click_options.keys)
yield actions
modifiers_up(actions, click_options.keys)
actions.perform
ensure
act = browser_action
act.release_actions if act.respond_to?(:release_actions)
end
def modifiers_down(actions, keys)
each_key(keys) { |key| actions.key_down(key) }
end
def modifiers_up(actions, keys)
each_key(keys) { |key| actions.key_up(key) }
end
def browser
driver.browser
end
def browser_action
browser.action
end
def each_key(keys)
keys.each do |key|
key = case key
when :ctrl then :control
when :command, :cmd then :meta
else
key
end
yield key
end
end
def find_context
native
end
def build_node(native_node, initial_cache = {})
self.class.new(driver, native_node, initial_cache)
end
def attrs(*attr_names)
return attr_names.map { |name| self[name.to_s] } if ENV['CAPYBARA_THOROUGH']
driver.evaluate_script <<~'JS', self, attr_names.map(&:to_s)
(function(el, names){
return names.map(function(name){
return el[name]
});
})(arguments[0], arguments[1]);
JS
end
GET_XPATH_SCRIPT = <<~'JS'
(function(el, xml){
var xpath = '';
var pos, tempitem2;
while(el !== xml.documentElement) {
pos = 0;
tempitem2 = el;
while(tempitem2) {
if (tempitem2.nodeType === 1 && tempitem2.nodeName === el.nodeName) { // If it is ELEMENT_NODE of the same name
pos += 1;
}
tempitem2 = tempitem2.previousSibling;
}
if (el.namespaceURI != xml.documentElement.namespaceURI) {
xpath = "*[local-name()='"+el.nodeName+"' and namespace-uri()='"+(el.namespaceURI===null?'':el.namespaceURI)+"']["+pos+']'+'/'+xpath;
} else {
xpath = el.nodeName.toUpperCase()+"["+pos+"]/"+xpath;
}
el = el.parentNode;
}
xpath = '/'+xml.documentElement.nodeName.toUpperCase()+'/'+xpath;
xpath = xpath.replace(/\/$/, '');
return xpath;
})(arguments[0], document)
JS
# SettableValue encapsulates time/date field formatting
class SettableValue
attr_reader :value
def initialize(value)
@value = value
end
def to_s
value.to_s
end
def dateable?
!value.is_a?(String) && value.respond_to?(:to_date)
end
def to_date_str
value.to_date.iso8601
end
def timeable?
!value.is_a?(String) && value.respond_to?(:to_time)
end
def to_time_str
value.to_time.strftime('%H:%M')
end
def to_datetime_str
value.to_time.strftime('%Y-%m-%dT%H:%M')
end
end
private_constant :SettableValue
# ClickOptions encapsulates click option logic
class ClickOptions
attr_reader :keys, :options
def initialize(keys, options)
@keys = keys
@options = options
end
def coords?
options[:x] && options[:y]
end
def coords
[options[:x], options[:y]]
end
def empty?
keys.empty? && !coords?
end
end
private_constant :ClickOptions
end
| 27.208243 | 150 | 0.657259 |
33a1c88e33c126e35a4f61efe5a0dff55f704274 | 2,568 | class Spinach::Features::Dashboard < Spinach::FeatureSteps
include SharedAuthentication
include SharedPaths
include SharedProject
include SharedIssuable
step 'I should see "New Project" link' do
expect(page).to have_link "New project"
end
step 'I should see "Shop" project link' do
expect(page).to have_link "Shop"
end
step 'I should see "Shop" project CI status' do
expect(page).to have_link "Commit: skipped"
end
step 'I should see last push widget' do
expect(page).to have_content "You pushed to fix"
expect(page).to have_link "Create Merge Request"
end
step 'I click "Create Merge Request" link' do
click_link "Create Merge Request"
end
step 'I see prefilled new Merge Request page' do
expect(page).to have_selector('.merge-request-form')
expect(current_path).to eq new_namespace_project_merge_request_path(@project.namespace, @project)
expect(find("#merge_request_target_project_id").value).to eq @project.id.to_s
expect(find("input#merge_request_source_branch").value).to eq "fix"
expect(find("input#merge_request_target_branch").value).to eq "master"
end
step 'I have group with projects' do
@group = create(:group)
@project = create(:empty_project, namespace: @group)
@event = create(:closed_issue_event, project: @project)
@project.team << [current_user, :master]
end
step 'I should see projects list' do
@user.authorized_projects.all.each do |project|
expect(page).to have_link project.name_with_namespace
end
end
step 'I should see groups list' do
Group.all.each do |group|
expect(page).to have_link group.name
end
end
step 'group has a projects that does not belongs to me' do
@forbidden_project1 = create(:empty_project, group: @group)
@forbidden_project2 = create(:empty_project, group: @group)
end
step 'I should see 1 project at group list' do
expect(find('span.last_activity/span')).to have_content('1')
end
step 'I filter the list by label "feature"' do
page.within ".labels-filter" do
find('.dropdown').click
click_link "feature"
end
end
step 'I should see "Bugfix1" in issues list' do
page.within "ul.content-list" do
expect(page).to have_content "Bugfix1"
end
end
step 'project "Shop" has issue "Bugfix1" with label "feature"' do
project = Project.find_by(name: "Shop")
issue = create(:issue, title: "Bugfix1", project: project, assignee: current_user)
issue.labels << project.labels.find_by(title: 'feature')
end
end
| 30.571429 | 101 | 0.706386 |
1a9cddb7f7bd3557334f3293f798edf291f61c5d | 1,395 | module Caboose
module LogicParser
# This module holds our recursive descent parser that take a logic string
# the logic string is tested by the enclosing Handler class' #check method
# Include this module in your Handler class.
# recursively processes an permission string and returns true or false
def process(logicstring, context)
# if logicstring contains any parenthasized patterns, call process recursively on them
while logicstring =~ /\(/
logicstring.sub!(/\(([^\)]+)\)/) {
process($1, context)
}
end
# process each operator in order of precedence
#!
while logicstring =~ /!/
logicstring.sub!(/!([^ &|]+)/) {
(!check(logicstring[$1], context)).to_s
}
end
#&
if logicstring =~ /&/
return (process(logicstring[/^[^&]+/], context) and process(logicstring[/^[^&]+&(.*)$/,1], context))
end
#|
if logicstring =~ /\|/
return (process(logicstring[/^[^\|]+/], context) or process(logicstring[/^[^\|]+\|(.*)$/,1], context))
end
# constants
if logicstring =~ /^\s*true\s*$/i
return true
elsif logicstring =~ /^\s*false\s*$/i
return false
end
# single list items
(check(logicstring.strip, context))
end
end # LogicParser
end | 29.0625 | 110 | 0.560573 |
38c30247a6ae63101e3fb608be4aa11169a8b34a | 1,808 | # frozen_string_literal: true
require 'rubygems'
require 'bundler/setup'
require 'rspec'
require 'rspec/its'
require 'mongoid'
require 'mongoid/relations_dirty_tracking'
Mongo::Logger.logger.level = 2
Mongoid.configure do |config|
config.logger.level = 2
config.connect_to('mongoid_relations_dirty_tracking_test')
config.belongs_to_required_by_default = false
end
RSpec.configure do |config|
config.mock_with :rspec
config.after(:all) { Mongoid.purge! }
end
class TestDocument
include Mongoid::Document
include Mongoid::RelationsDirtyTracking
embeds_one :one_document, class_name: 'TestEmbeddedDocument'
embeds_many :many_documents, class_name: 'TestEmbeddedDocument'
has_one :one_related, class_name: 'TestRelatedDocument'
has_many :many_related, class_name: 'TestRelatedDocument'
has_and_belongs_to_many :many_to_many_related, class_name: 'TestRelatedDocument'
end
class TestEmbeddedDocument
include Mongoid::Document
embedded_in :test_document
field :title, type: String
end
class TestRelatedDocument
include Mongoid::Document
include Mongoid::RelationsDirtyTracking
belongs_to :test_document, inverse_of: :one_related
field :title, type: String
end
class TestDocumentWithOnlyOption
include Mongoid::Document
include Mongoid::RelationsDirtyTracking
embeds_many :many_documents, class_name: 'TestEmbeddedDocument'
has_one :one_related, class_name: 'TestRelatedDocument'
relations_dirty_tracking only: :many_documents
end
class TestDocumentWithExceptOption
include Mongoid::Document
include Mongoid::RelationsDirtyTracking
embeds_many :many_documents, class_name: 'TestEmbeddedDocument'
has_one :one_related, class_name: 'TestRelatedDocument'
relations_dirty_tracking except: 'many_documents'
end
| 25.464789 | 82 | 0.798119 |
f724c69fa59d070db095c8cc8eca066afc06fbaa | 221 | module TZInfo
module Definitions
module America
module Coral_Harbour
include TimezoneDefinition
linked_timezone 'America/Coral_Harbour', 'America/Atikokan'
end
end
end
end
| 18.416667 | 67 | 0.669683 |
f725ac7b6817b9875b888f2e495876427fc9bd67 | 929 | # encoding: utf-8
Gem::Specification.new do |s|
s.name = 'gherkin'
s.version = '4.1.0'
s.authors = ["Gáspár Nagy", "Aslak Hellesøy", "Steve Tooke"]
s.description = 'Gherkin parser'
s.summary = "gherkin-#{s.version}"
s.email = '[email protected]'
s.homepage = "https://github.com/cucumber/gherkin-ruby"
s.platform = Gem::Platform::RUBY
s.license = "MIT"
s.required_ruby_version = ">= 1.9.3"
s.add_development_dependency 'bundler'
s.add_development_dependency 'rake', '~> 10.5'
s.add_development_dependency 'rspec', '~> 3.5'
# For coverage reports
s.add_development_dependency 'coveralls'
s.rubygems_version = ">= 1.6.1"
s.files = `git ls-files`.split("\n").reject {|path| path =~ /\.gitignore$/ }
s.test_files = `git ls-files -- spec/*`.split("\n")
s.rdoc_options = ["--charset=UTF-8"]
s.require_path = "lib"
end
| 34.407407 | 89 | 0.612487 |
1a940475bb07e26d88706067701ab3b67a203b7b | 2,247 | class Lha < Formula
desc "Utility for creating and opening lzh archives"
homepage "https://lha.osdn.jp/"
# Canonical: https://osdn.net/dl/lha/lha-1.14i-ac20050924p1.tar.gz
url "https://dotsrc.dl.osdn.net/osdn/lha/22231/lha-1.14i-ac20050924p1.tar.gz"
version "1.14i-ac20050924p1"
sha256 "b5261e9f98538816aa9e64791f23cb83f1632ecda61f02e54b6749e9ca5e9ee4"
license "MIT"
# OSDN releases pages use asynchronous requests to fetch the archive
# information for each release, rather than including this information in the
# page source. As such, we identify versions from the release names instead.
# The portion of the regex that captures the version is looser than usual
# because the version format is unusual and may change in the future.
livecheck do
url "https://osdn.net/projects/lha/releases/"
regex(%r{href=.*?/projects/lha/releases/[^>]+?>\s*?v?(\d+(?:[.-][\da-z]+)+)}im)
end
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "bd78eb55cbce8091fd07d82ec486bfd67fc8079b2fe6385c8374b2e7c5171528" => :big_sur
sha256 "d328d1b1740353a2e04c6f79dc863f3fa2caca9380e76b3e48b4b72f5e1ad32b" => :arm64_big_sur
sha256 "429d3165a0f986e815f09ea3f6b2d93e1bd0feef01b6df6159a983e8118244a4" => :catalina
sha256 "12b5c79de56f71138c64d517ffc0091bc313f4cc0f174e10276b248b06e2fa0f" => :mojave
sha256 "208546ee6f604a3ee6f83ab6e66387c71b349d33aeaada51dcdf81c4c9d28543" => :x86_64_linux
end
head do
url "https://github.com/jca02266/lha.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
end
conflicts_with "lhasa", because: "both install a `lha` binary"
def install
# Work around configure/build issues with Xcode 12
ENV.append "CFLAGS", "-Wno-implicit-function-declaration"
system "autoreconf", "-is" if build.head?
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--prefix=#{prefix}",
"--mandir=#{man}"
system "make", "install"
end
test do
(testpath/"foo").write "test"
system "#{bin}/lha", "c", "foo.lzh", "foo"
assert_equal "::::::::\nfoo\n::::::::\ntest",
shell_output("#{bin}/lha p foo.lzh")
end
end
| 39.421053 | 95 | 0.696039 |
082c98c0104364c9d40ced441e4e8c49e74b434f | 64,621 | # frozen_string_literal: true
require 'carrierwave/orm/activerecord'
class User < ApplicationRecord
extend Gitlab::ConfigHelper
include Gitlab::ConfigHelper
include Gitlab::SQL::Pattern
include AfterCommitQueue
include Avatarable
include Referable
include Sortable
include CaseSensitivity
include TokenAuthenticatable
include FeatureGate
include CreatedAtFilterable
include BulkMemberAccessLoad
include BlocksJsonSerialization
include WithUploads
include OptionallySearch
include FromUnion
include BatchDestroyDependentAssociations
include HasUniqueInternalUsers
include IgnorableColumns
include UpdateHighestRole
include HasUserType
DEFAULT_NOTIFICATION_LEVEL = :participating
INSTANCE_ACCESS_REQUEST_APPROVERS_TO_BE_NOTIFIED_LIMIT = 10
add_authentication_token_field :incoming_email_token, token_generator: -> { SecureRandom.hex.to_i(16).to_s(36) }
add_authentication_token_field :feed_token
add_authentication_token_field :static_object_token
default_value_for :admin, false
default_value_for(:external) { Gitlab::CurrentSettings.user_default_external }
default_value_for :can_create_group, gitlab_config.default_can_create_group
default_value_for :can_create_team, false
default_value_for :hide_no_ssh_key, false
default_value_for :hide_no_password, false
default_value_for :project_view, :files
default_value_for :notified_of_own_activity, false
default_value_for :preferred_language, I18n.default_locale
default_value_for :theme_id, gitlab_config.default_theme
attr_encrypted :otp_secret,
key: Gitlab::Application.secrets.otp_key_base,
mode: :per_attribute_iv_and_salt,
insecure_mode: true,
algorithm: 'aes-256-cbc'
devise :two_factor_authenticatable,
otp_secret_encryption_key: Gitlab::Application.secrets.otp_key_base
devise :two_factor_backupable, otp_number_of_backup_codes: 10
serialize :otp_backup_codes, JSON # rubocop:disable Cop/ActiveRecordSerialize
devise :lockable, :recoverable, :rememberable, :trackable,
:validatable, :omniauthable, :confirmable, :registerable
include AdminChangedPasswordNotifier
# This module adds async behaviour to Devise emails
# and should be added after Devise modules are initialized.
include AsyncDeviseEmail
MINIMUM_INACTIVE_DAYS = 90
# Override Devise::Models::Trackable#update_tracked_fields!
# to limit database writes to at most once every hour
# rubocop: disable CodeReuse/ServiceClass
def update_tracked_fields!(request)
return if Gitlab::Database.read_only?
update_tracked_fields(request)
lease = Gitlab::ExclusiveLease.new("user_update_tracked_fields:#{id}", timeout: 1.hour.to_i)
return unless lease.try_obtain
Users::UpdateService.new(self, user: self).execute(validate: false)
end
# rubocop: enable CodeReuse/ServiceClass
attr_accessor :force_random_password
# Virtual attribute for authenticating by either username or email
attr_accessor :login
# Virtual attribute for impersonator
attr_accessor :impersonator
#
# Relations
#
# Namespace for personal projects
has_one :namespace, -> { where(type: nil) }, dependent: :destroy, foreign_key: :owner_id, inverse_of: :owner, autosave: true # rubocop:disable Cop/ActiveRecordDependent
# Profile
has_many :keys, -> { regular_keys }, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :deploy_keys, -> { where(type: 'DeployKey') }, dependent: :nullify # rubocop:disable Cop/ActiveRecordDependent
has_many :group_deploy_keys
has_many :gpg_keys
has_many :emails
has_many :personal_access_tokens, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :identities, dependent: :destroy, autosave: true # rubocop:disable Cop/ActiveRecordDependent
has_many :u2f_registrations, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :webauthn_registrations
has_many :chat_names, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :user_synced_attributes_metadata, autosave: true
has_one :aws_role, class_name: 'Aws::Role'
# Groups
has_many :members
has_many :group_members, -> { where(requested_at: nil).where("access_level >= ?", Gitlab::Access::GUEST) }, source: 'GroupMember'
has_many :groups, through: :group_members
has_many :owned_groups, -> { where(members: { access_level: Gitlab::Access::OWNER }) }, through: :group_members, source: :group
has_many :maintainers_groups, -> { where(members: { access_level: Gitlab::Access::MAINTAINER }) }, through: :group_members, source: :group
has_many :developer_groups, -> { where(members: { access_level: ::Gitlab::Access::DEVELOPER }) }, through: :group_members, source: :group
has_many :owned_or_maintainers_groups,
-> { where(members: { access_level: [Gitlab::Access::MAINTAINER, Gitlab::Access::OWNER] }) },
through: :group_members,
source: :group
alias_attribute :masters_groups, :maintainers_groups
has_many :reporter_developer_maintainer_owned_groups,
-> { where(members: { access_level: [Gitlab::Access::REPORTER, Gitlab::Access::DEVELOPER, Gitlab::Access::MAINTAINER, Gitlab::Access::OWNER] }) },
through: :group_members,
source: :group
has_many :minimal_access_group_members, -> { where(access_level: [Gitlab::Access::MINIMAL_ACCESS]) }, source: 'GroupMember', class_name: 'GroupMember'
has_many :minimal_access_groups, through: :minimal_access_group_members, source: :group
# Projects
has_many :groups_projects, through: :groups, source: :projects
has_many :personal_projects, through: :namespace, source: :projects
has_many :project_members, -> { where(requested_at: nil) }
has_many :projects, through: :project_members
has_many :created_projects, foreign_key: :creator_id, class_name: 'Project'
has_many :users_star_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :starred_projects, through: :users_star_projects, source: :project
has_many :project_authorizations, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
has_many :authorized_projects, through: :project_authorizations, source: :project
has_many :user_interacted_projects
has_many :project_interactions, through: :user_interacted_projects, source: :project, class_name: 'Project'
has_many :snippets, dependent: :destroy, foreign_key: :author_id # rubocop:disable Cop/ActiveRecordDependent
has_many :notes, dependent: :destroy, foreign_key: :author_id # rubocop:disable Cop/ActiveRecordDependent
has_many :issues, dependent: :destroy, foreign_key: :author_id # rubocop:disable Cop/ActiveRecordDependent
has_many :merge_requests, dependent: :destroy, foreign_key: :author_id # rubocop:disable Cop/ActiveRecordDependent
has_many :events, dependent: :delete_all, foreign_key: :author_id # rubocop:disable Cop/ActiveRecordDependent
has_many :releases, dependent: :nullify, foreign_key: :author_id # rubocop:disable Cop/ActiveRecordDependent
has_many :subscriptions, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :oauth_applications, class_name: 'Doorkeeper::Application', as: :owner, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :abuse_report, dependent: :destroy, foreign_key: :user_id # rubocop:disable Cop/ActiveRecordDependent
has_many :reported_abuse_reports, dependent: :destroy, foreign_key: :reporter_id, class_name: "AbuseReport" # rubocop:disable Cop/ActiveRecordDependent
has_many :spam_logs, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :builds, dependent: :nullify, class_name: 'Ci::Build' # rubocop:disable Cop/ActiveRecordDependent
has_many :pipelines, dependent: :nullify, class_name: 'Ci::Pipeline' # rubocop:disable Cop/ActiveRecordDependent
has_many :todos
has_many :notification_settings
has_many :award_emoji, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :triggers, dependent: :destroy, class_name: 'Ci::Trigger', foreign_key: :owner_id # rubocop:disable Cop/ActiveRecordDependent
has_many :issue_assignees, inverse_of: :assignee
has_many :merge_request_assignees, inverse_of: :assignee
has_many :assigned_issues, class_name: "Issue", through: :issue_assignees, source: :issue
has_many :assigned_merge_requests, class_name: "MergeRequest", through: :merge_request_assignees, source: :merge_request
has_many :bulk_imports
has_many :custom_attributes, class_name: 'UserCustomAttribute'
has_many :callouts, class_name: 'UserCallout'
has_many :term_agreements
belongs_to :accepted_term, class_name: 'ApplicationSetting::Term'
has_many :metrics_users_starred_dashboards, class_name: 'Metrics::UsersStarredDashboard', inverse_of: :user
has_one :status, class_name: 'UserStatus'
has_one :user_preference
has_one :user_detail
has_one :user_highest_role
has_one :user_canonical_email
has_one :atlassian_identity, class_name: 'Atlassian::Identity'
has_many :reviews, foreign_key: :author_id, inverse_of: :author
#
# Validations
#
# Note: devise :validatable above adds validations for :email and :password
validates :name, presence: true, length: { maximum: 255 }
validates :first_name, length: { maximum: 127 }
validates :last_name, length: { maximum: 127 }
validates :email, confirmation: true
validates :notification_email, presence: true
validates :notification_email, devise_email: true, if: ->(user) { user.notification_email != user.email }
validates :public_email, presence: true, uniqueness: true, devise_email: true, allow_blank: true
validates :commit_email, devise_email: true, allow_nil: true, if: ->(user) { user.commit_email != user.email }
validates :projects_limit,
presence: true,
numericality: { greater_than_or_equal_to: 0, less_than_or_equal_to: Gitlab::Database::MAX_INT_VALUE }
validates :username, presence: true
validates :namespace, presence: true
validate :namespace_move_dir_allowed, if: :username_changed?
validate :unique_email, if: :email_changed?
validate :owns_notification_email, if: :notification_email_changed?
validate :owns_public_email, if: :public_email_changed?
validate :owns_commit_email, if: :commit_email_changed?
validate :signup_domain_valid?, on: :create, if: ->(user) { !user.created_by_id }
validate :check_email_restrictions, on: :create, if: ->(user) { !user.created_by_id }
validates :theme_id, allow_nil: true, inclusion: { in: Gitlab::Themes.valid_ids,
message: _("%{placeholder} is not a valid theme") % { placeholder: '%{value}' } }
validates :color_scheme_id, allow_nil: true, inclusion: { in: Gitlab::ColorSchemes.valid_ids,
message: _("%{placeholder} is not a valid color scheme") % { placeholder: '%{value}' } }
before_validation :sanitize_attrs
before_validation :set_notification_email, if: :new_record?
before_validation :set_public_email, if: :public_email_changed?
before_validation :set_commit_email, if: :commit_email_changed?
before_save :default_private_profile_to_false
before_save :set_public_email, if: :public_email_changed? # in case validation is skipped
before_save :set_commit_email, if: :commit_email_changed? # in case validation is skipped
before_save :ensure_incoming_email_token
before_save :ensure_user_rights_and_limits, if: ->(user) { user.new_record? || user.external_changed? }
before_save :skip_reconfirmation!, if: ->(user) { user.email_changed? && user.read_only_attribute?(:email) }
before_save :check_for_verified_email, if: ->(user) { user.email_changed? && !user.new_record? }
before_validation :ensure_namespace_correct
before_save :ensure_namespace_correct # in case validation is skipped
after_validation :set_username_errors
after_update :username_changed_hook, if: :saved_change_to_username?
after_destroy :post_destroy_hook
after_destroy :remove_key_cache
after_commit(on: :update) do
if previous_changes.key?('email')
# Grab previous_email here since previous_changes changes after
# #update_emails_with_primary_email and #update_notification_email are called
previous_confirmed_at = previous_changes.key?('confirmed_at') ? previous_changes['confirmed_at'][0] : confirmed_at
previous_email = previous_changes[:email][0]
update_emails_with_primary_email(previous_confirmed_at, previous_email)
update_invalid_gpg_signatures
if previous_email == notification_email
self.notification_email = email
save
end
end
end
after_initialize :set_projects_limit
# User's Layout preference
enum layout: { fixed: 0, fluid: 1 }
# User's Dashboard preference
enum dashboard: { projects: 0, stars: 1, project_activity: 2, starred_project_activity: 3, groups: 4, todos: 5, issues: 6, merge_requests: 7, operations: 8 }
# User's Project preference
enum project_view: { readme: 0, activity: 1, files: 2 }
# User's role
enum role: { software_developer: 0, development_team_lead: 1, devops_engineer: 2, systems_administrator: 3, security_analyst: 4, data_analyst: 5, product_manager: 6, product_designer: 7, other: 8 }, _suffix: true
delegate :notes_filter_for,
:set_notes_filter,
:first_day_of_week, :first_day_of_week=,
:timezone, :timezone=,
:time_display_relative, :time_display_relative=,
:time_format_in_24h, :time_format_in_24h=,
:show_whitespace_in_diffs, :show_whitespace_in_diffs=,
:view_diffs_file_by_file, :view_diffs_file_by_file=,
:tab_width, :tab_width=,
:sourcegraph_enabled, :sourcegraph_enabled=,
:gitpod_enabled, :gitpod_enabled=,
:setup_for_company, :setup_for_company=,
:render_whitespace_in_code, :render_whitespace_in_code=,
:experience_level, :experience_level=,
to: :user_preference
delegate :path, to: :namespace, allow_nil: true, prefix: true
delegate :job_title, :job_title=, to: :user_detail, allow_nil: true
delegate :bio, :bio=, :bio_html, to: :user_detail, allow_nil: true
delegate :webauthn_xid, :webauthn_xid=, to: :user_detail, allow_nil: true
accepts_nested_attributes_for :user_preference, update_only: true
accepts_nested_attributes_for :user_detail, update_only: true
state_machine :state, initial: :active do
event :block do
transition active: :blocked
transition deactivated: :blocked
transition ldap_blocked: :blocked
transition blocked_pending_approval: :blocked
end
event :ldap_block do
transition active: :ldap_blocked
transition deactivated: :ldap_blocked
end
event :activate do
transition deactivated: :active
transition blocked: :active
transition ldap_blocked: :active
transition blocked_pending_approval: :active
end
event :block_pending_approval do
transition active: :blocked_pending_approval
end
event :deactivate do
transition active: :deactivated
end
state :blocked, :ldap_blocked, :blocked_pending_approval do
def blocked?
true
end
end
before_transition do
!Gitlab::Database.read_only?
end
# rubocop: disable CodeReuse/ServiceClass
# Ideally we should not call a service object here but user.block
# is also bcalled by Users::MigrateToGhostUserService which references
# this state transition object in order to do a rollback.
# For this reason the tradeoff is to disable this cop.
after_transition any => :blocked do |user|
Ci::CancelUserPipelinesService.new.execute(user)
end
# rubocop: enable CodeReuse/ServiceClass
end
# Scopes
scope :admins, -> { where(admin: true) }
scope :instance_access_request_approvers_to_be_notified, -> { admins.active.order_recent_sign_in.limit(INSTANCE_ACCESS_REQUEST_APPROVERS_TO_BE_NOTIFIED_LIMIT) }
scope :blocked, -> { with_states(:blocked, :ldap_blocked) }
scope :blocked_pending_approval, -> { with_states(:blocked_pending_approval) }
scope :external, -> { where(external: true) }
scope :confirmed, -> { where.not(confirmed_at: nil) }
scope :active, -> { with_state(:active).non_internal }
scope :active_without_ghosts, -> { with_state(:active).without_ghosts }
scope :deactivated, -> { with_state(:deactivated).non_internal }
scope :without_projects, -> { joins('LEFT JOIN project_authorizations ON users.id = project_authorizations.user_id').where(project_authorizations: { user_id: nil }) }
scope :by_username, -> (usernames) { iwhere(username: Array(usernames).map(&:to_s)) }
scope :for_todos, -> (todos) { where(id: todos.select(:user_id)) }
scope :with_emails, -> { preload(:emails) }
scope :with_dashboard, -> (dashboard) { where(dashboard: dashboard) }
scope :with_public_profile, -> { where(private_profile: false) }
scope :with_expiring_and_not_notified_personal_access_tokens, ->(at) do
where('EXISTS (?)',
::PersonalAccessToken
.where('personal_access_tokens.user_id = users.id')
.without_impersonation
.expiring_and_not_notified(at).select(1))
end
scope :with_personal_access_tokens_expired_today, -> do
where('EXISTS (?)',
::PersonalAccessToken
.select(1)
.where('personal_access_tokens.user_id = users.id')
.without_impersonation
.expired_today_and_not_notified)
end
scope :order_recent_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'DESC')) }
scope :order_oldest_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'ASC')) }
scope :order_recent_last_activity, -> { reorder(Gitlab::Database.nulls_last_order('last_activity_on', 'DESC')) }
scope :order_oldest_last_activity, -> { reorder(Gitlab::Database.nulls_first_order('last_activity_on', 'ASC')) }
scope :by_id_and_login, ->(id, login) { where(id: id).where('username = LOWER(:login) OR email = LOWER(:login)', login: login) }
def preferred_language
read_attribute('preferred_language') ||
I18n.default_locale.to_s.presence_in(Gitlab::I18n::AVAILABLE_LANGUAGES.keys) ||
'en'
end
def active_for_authentication?
super && can?(:log_in)
end
# The messages for these keys are defined in `devise.en.yml`
def inactive_message
if blocked_pending_approval?
:blocked_pending_approval
elsif blocked?
:blocked
elsif internal?
:forbidden
else
super
end
end
def self.with_visible_profile(user)
return with_public_profile if user.nil?
if user.admin?
all
else
with_public_profile.or(where(id: user.id))
end
end
# Limits the users to those that have TODOs, optionally in the given state.
#
# user - The user to get the todos for.
#
# with_todos - If we should limit the result set to users that are the
# authors of todos.
#
# todo_state - An optional state to require the todos to be in.
def self.limit_to_todo_authors(user: nil, with_todos: false, todo_state: nil)
if user && with_todos
where(id: Todo.where(user: user, state: todo_state).select(:author_id))
else
all
end
end
# Returns a relation that optionally includes the given user.
#
# user_id - The ID of the user to include.
def self.union_with_user(user_id = nil)
if user_id.present?
# We use "unscoped" here so that any inner conditions are not repeated for
# the outer query, which would be redundant.
User.unscoped.from_union([all, User.unscoped.where(id: user_id)])
else
all
end
end
def self.with_two_factor
with_u2f_registrations = <<-SQL
EXISTS (
SELECT *
FROM u2f_registrations AS u2f
WHERE u2f.user_id = users.id
) OR users.otp_required_for_login = ?
OR
EXISTS (
SELECT *
FROM webauthn_registrations AS webauthn
WHERE webauthn.user_id = users.id
)
SQL
where(with_u2f_registrations, true)
end
def self.without_two_factor
joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id
LEFT OUTER JOIN webauthn_registrations AS webauthn ON webauthn.user_id = users.id")
.where("u2f.id IS NULL AND webauthn.id IS NULL AND users.otp_required_for_login = ?", false)
end
#
# Class methods
#
class << self
# Devise method overridden to allow support for dynamic password lengths
def password_length
Gitlab::CurrentSettings.minimum_password_length..Devise.password_length.max
end
# Generate a random password that conforms to the current password length settings
def random_password
Devise.friendly_token(password_length.max)
end
# Devise method overridden to allow sign in with email or username
def find_for_database_authentication(warden_conditions)
conditions = warden_conditions.dup
if login = conditions.delete(:login)
where(conditions).find_by("lower(username) = :value OR lower(email) = :value", value: login.downcase.strip)
else
find_by(conditions)
end
end
def sort_by_attribute(method)
order_method = method || 'id_desc'
case order_method.to_s
when 'recent_sign_in' then order_recent_sign_in
when 'oldest_sign_in' then order_oldest_sign_in
when 'last_activity_on_desc' then order_recent_last_activity
when 'last_activity_on_asc' then order_oldest_last_activity
else
order_by(order_method)
end
end
def for_github_id(id)
joins(:identities).merge(Identity.with_extern_uid(:github, id))
end
# Find a User by their primary email or any associated secondary email
def find_by_any_email(email, confirmed: false)
return unless email
by_any_email(email, confirmed: confirmed).take
end
# Returns a relation containing all the users for the given email addresses
#
# @param emails [String, Array<String>] email addresses to check
# @param confirmed [Boolean] Only return users where the email is confirmed
def by_any_email(emails, confirmed: false)
emails = Array(emails).map(&:downcase)
from_users = where(email: emails)
from_users = from_users.confirmed if confirmed
from_emails = joins(:emails).where(emails: { email: emails })
from_emails = from_emails.confirmed.merge(Email.confirmed) if confirmed
items = [from_users, from_emails]
user_ids = Gitlab::PrivateCommitEmail.user_ids_for_emails(emails)
items << where(id: user_ids) if user_ids.present?
from_union(items)
end
def find_by_private_commit_email(email)
user_id = Gitlab::PrivateCommitEmail.user_id_for_email(email)
find_by(id: user_id)
end
def filter_items(filter_name)
case filter_name
when 'admins'
admins
when 'blocked'
blocked
when 'blocked_pending_approval'
blocked_pending_approval
when 'two_factor_disabled'
without_two_factor
when 'two_factor_enabled'
with_two_factor
when 'wop'
without_projects
when 'external'
external
when 'deactivated'
deactivated
else
active_without_ghosts
end
end
# Searches users matching the given query.
#
# This method uses ILIKE on PostgreSQL.
#
# query - The search query as a String
#
# Returns an ActiveRecord::Relation.
def search(query, **options)
query = query&.delete_prefix('@')
return none if query.blank?
query = query.downcase
order = <<~SQL
CASE
WHEN users.name = :query THEN 0
WHEN users.username = :query THEN 1
WHEN users.email = :query THEN 2
ELSE 3
END
SQL
sanitized_order_sql = Arel.sql(sanitize_sql_array([order, query: query]))
where(
fuzzy_arel_match(:name, query, lower_exact_match: true)
.or(fuzzy_arel_match(:username, query, lower_exact_match: true))
.or(arel_table[:email].eq(query))
).reorder(sanitized_order_sql, :name)
end
# Limits the result set to users _not_ in the given query/list of IDs.
#
# users - The list of users to ignore. This can be an
# `ActiveRecord::Relation`, or an Array.
def where_not_in(users = nil)
users ? where.not(id: users) : all
end
def reorder_by_name
reorder(:name)
end
# searches user by given pattern
# it compares name, email, username fields and user's secondary emails with given pattern
# This method uses ILIKE on PostgreSQL.
def search_with_secondary_emails(query)
return none if query.blank?
query = query.downcase
email_table = Email.arel_table
matched_by_emails_user_ids = email_table
.project(email_table[:user_id])
.where(email_table[:email].eq(query))
where(
fuzzy_arel_match(:name, query)
.or(fuzzy_arel_match(:username, query))
.or(arel_table[:email].eq(query))
.or(arel_table[:id].in(matched_by_emails_user_ids))
)
end
def by_login(login)
return unless login
if login.include?('@')
unscoped.iwhere(email: login).take
else
unscoped.iwhere(username: login).take
end
end
def find_by_username(username)
by_username(username).take
end
def find_by_username!(username)
by_username(username).take!
end
# Returns a user for the given SSH key.
def find_by_ssh_key_id(key_id)
find_by('EXISTS (?)', Key.select(1).where('keys.user_id = users.id').where(id: key_id))
end
def find_by_full_path(path, follow_redirects: false)
namespace = Namespace.for_user.find_by_full_path(path, follow_redirects: follow_redirects)
namespace&.owner
end
def reference_prefix
'@'
end
# Pattern used to extract `@user` user references from text
def reference_pattern
@reference_pattern ||=
%r{
(?<!\w)
#{Regexp.escape(reference_prefix)}
(?<user>#{Gitlab::PathRegex::FULL_NAMESPACE_FORMAT_REGEX})
}x
end
# Return (create if necessary) the ghost user. The ghost user
# owns records previously belonging to deleted users.
def ghost
email = 'ghost%[email protected]'
unique_internal(where(user_type: :ghost), 'ghost', email) do |u|
u.bio = _('This is a "Ghost User", created to hold all issues authored by users that have since been deleted. This user cannot be removed.')
u.name = 'Ghost User'
end
end
def alert_bot
email_pattern = "alert%s@#{Settings.gitlab.host}"
unique_internal(where(user_type: :alert_bot), 'alert-bot', email_pattern) do |u|
u.bio = 'The GitLab alert bot'
u.name = 'GitLab Alert Bot'
u.avatar = bot_avatar(image: 'alert-bot.png')
end
end
def migration_bot
email_pattern = "noreply+gitlab-migration-bot%s@#{Settings.gitlab.host}"
unique_internal(where(user_type: :migration_bot), 'migration-bot', email_pattern) do |u|
u.bio = 'The GitLab migration bot'
u.name = 'GitLab Migration Bot'
u.confirmed_at = Time.zone.now
end
end
def security_bot
email_pattern = "security-bot%s@#{Settings.gitlab.host}"
unique_internal(where(user_type: :security_bot), 'GitLab-Security-Bot', email_pattern) do |u|
u.bio = 'System bot that monitors detected vulnerabilities for solutions and creates merge requests with the fixes.'
u.name = 'GitLab Security Bot'
u.website_url = Gitlab::Routing.url_helpers.help_page_url('user/application_security/security_bot/index.md')
u.avatar = bot_avatar(image: 'security-bot.png')
end
end
def support_bot
email_pattern = "support%s@#{Settings.gitlab.host}"
unique_internal(where(user_type: :support_bot), 'support-bot', email_pattern) do |u|
u.bio = 'The GitLab support bot used for Service Desk'
u.name = 'GitLab Support Bot'
u.avatar = bot_avatar(image: 'support-bot.png')
end
end
# Return true if there is only single non-internal user in the deployment,
# ghost user is ignored.
def single_user?
User.non_internal.limit(2).count == 1
end
def single_user
User.non_internal.first if single_user?
end
end
#
# Instance methods
#
def full_path
username
end
def to_param
username
end
def to_reference(_from = nil, target_project: nil, full: nil)
"#{self.class.reference_prefix}#{username}"
end
def skip_confirmation=(bool)
skip_confirmation! if bool
end
def skip_reconfirmation=(bool)
skip_reconfirmation! if bool
end
def generate_reset_token
@reset_token, enc = Devise.token_generator.generate(self.class, :reset_password_token)
self.reset_password_token = enc
self.reset_password_sent_at = Time.current.utc
@reset_token
end
def recently_sent_password_reset?
reset_password_sent_at.present? && reset_password_sent_at >= 1.minute.ago
end
def remember_me!
super if ::Gitlab::Database.read_write?
end
def forget_me!
super if ::Gitlab::Database.read_write?
end
def disable_two_factor!
transaction do
update(
otp_required_for_login: false,
encrypted_otp_secret: nil,
encrypted_otp_secret_iv: nil,
encrypted_otp_secret_salt: nil,
otp_grace_period_started_at: nil,
otp_backup_codes: nil
)
self.u2f_registrations.destroy_all # rubocop: disable Cop/DestroyAll
self.webauthn_registrations.destroy_all # rubocop: disable Cop/DestroyAll
end
end
def two_factor_enabled?
two_factor_otp_enabled? || two_factor_webauthn_u2f_enabled?
end
def two_factor_otp_enabled?
otp_required_for_login? || Feature.enabled?(:forti_authenticator, self)
end
def two_factor_u2f_enabled?
if u2f_registrations.loaded?
u2f_registrations.any?
else
u2f_registrations.exists?
end
end
def two_factor_webauthn_u2f_enabled?
two_factor_u2f_enabled? || two_factor_webauthn_enabled?
end
def two_factor_webauthn_enabled?
return false unless Feature.enabled?(:webauthn)
(webauthn_registrations.loaded? && webauthn_registrations.any?) || (!webauthn_registrations.loaded? && webauthn_registrations.exists?)
end
def namespace_move_dir_allowed
if namespace&.any_project_has_container_registry_tags?
errors.add(:username, _('cannot be changed if a personal project has container registry tags.'))
end
end
# will_save_change_to_attribute? is used by Devise to check if it is necessary
# to clear any existing reset_password_tokens before updating an authentication_key
# and login in our case is a virtual attribute to allow login by username or email.
def will_save_change_to_login?
will_save_change_to_username? || will_save_change_to_email?
end
def unique_email
if !emails.exists?(email: email) && Email.exists?(email: email)
errors.add(:email, _('has already been taken'))
end
end
def owns_notification_email
return if new_record? || temp_oauth_email?
errors.add(:notification_email, _("is not an email you own")) unless verified_emails.include?(notification_email)
end
def owns_public_email
return if public_email.blank?
errors.add(:public_email, _("is not an email you own")) unless verified_emails.include?(public_email)
end
def owns_commit_email
return if read_attribute(:commit_email).blank?
errors.add(:commit_email, _("is not an email you own")) unless verified_emails.include?(commit_email)
end
# Define commit_email-related attribute methods explicitly instead of relying
# on ActiveRecord to provide them. Some of the specs use the current state of
# the model code but an older database schema, so we need to guard against the
# possibility of the commit_email column not existing.
def commit_email
return self.email unless has_attribute?(:commit_email)
if super == Gitlab::PrivateCommitEmail::TOKEN
return private_commit_email
end
# The commit email is the same as the primary email if undefined
super.presence || self.email
end
def commit_email=(email)
super if has_attribute?(:commit_email)
end
def commit_email_changed?
has_attribute?(:commit_email) && super
end
def private_commit_email
Gitlab::PrivateCommitEmail.for_user(self)
end
# see if the new email is already a verified secondary email
def check_for_verified_email
skip_reconfirmation! if emails.confirmed.where(email: self.email).any?
end
# Note: the use of the Emails services will cause `saves` on the user object, running
# through the callbacks again and can have side effects, such as the `previous_changes`
# hash and `_was` variables getting munged.
# By using an `after_commit` instead of `after_update`, we avoid the recursive callback
# scenario, though it then requires us to use the `previous_changes` hash
# rubocop: disable CodeReuse/ServiceClass
def update_emails_with_primary_email(previous_confirmed_at, previous_email)
primary_email_record = emails.find_by(email: email)
Emails::DestroyService.new(self, user: self).execute(primary_email_record) if primary_email_record
# the original primary email was confirmed, and we want that to carry over. We don't
# have access to the original confirmation values at this point, so just set confirmed_at
Emails::CreateService.new(self, user: self, email: previous_email).execute(confirmed_at: previous_confirmed_at)
update_columns(confirmed_at: primary_email_record.confirmed_at) if primary_email_record&.confirmed_at
end
# rubocop: enable CodeReuse/ServiceClass
def update_invalid_gpg_signatures
gpg_keys.each(&:update_invalid_gpg_signatures)
end
# Returns the groups a user has access to, either through a membership or a project authorization
def authorized_groups
Group.unscoped do
if Feature.enabled?(:shared_group_membership_auth, self)
authorized_groups_with_shared_membership
else
authorized_groups_without_shared_membership
end
end
end
# Returns the groups a user is a member of, either directly or through a parent group
def membership_groups
Gitlab::ObjectHierarchy.new(groups).base_and_descendants
end
# Returns a relation of groups the user has access to, including their parent
# and child groups (recursively).
def all_expanded_groups
Gitlab::ObjectHierarchy.new(groups).all_objects
end
def expanded_groups_requiring_two_factor_authentication
all_expanded_groups.where(require_two_factor_authentication: true)
end
def source_groups_of_two_factor_authentication_requirement
Gitlab::ObjectHierarchy.new(expanded_groups_requiring_two_factor_authentication)
.all_objects
.where(id: groups)
end
# rubocop: disable CodeReuse/ServiceClass
def refresh_authorized_projects
Users::RefreshAuthorizedProjectsService.new(self).execute
end
# rubocop: enable CodeReuse/ServiceClass
def remove_project_authorizations(project_ids)
project_authorizations.where(project_id: project_ids).delete_all
end
def authorized_projects(min_access_level = nil)
# We're overriding an association, so explicitly call super with no
# arguments or it would be passed as `force_reload` to the association
projects = super()
if min_access_level
projects = projects
.where('project_authorizations.access_level >= ?', min_access_level)
end
projects
end
def authorized_project?(project, min_access_level = nil)
authorized_projects(min_access_level).exists?({ id: project.id })
end
# Typically used in conjunction with projects table to get projects
# a user has been given access to.
# The param `related_project_column` is the column to compare to the
# project_authorizations. By default is projects.id
#
# Example use:
# `Project.where('EXISTS(?)', user.authorizations_for_projects)`
def authorizations_for_projects(min_access_level: nil, related_project_column: 'projects.id')
authorizations = project_authorizations
.select(1)
.where("project_authorizations.project_id = #{related_project_column}")
return authorizations unless min_access_level.present?
authorizations.where('project_authorizations.access_level >= ?', min_access_level)
end
# Returns the projects this user has reporter (or greater) access to, limited
# to at most the given projects.
#
# This method is useful when you have a list of projects and want to
# efficiently check to which of these projects the user has at least reporter
# access.
def projects_with_reporter_access_limited_to(projects)
authorized_projects(Gitlab::Access::REPORTER).where(id: projects)
end
def owned_projects
@owned_projects ||= Project.from_union(
[
Project.where(namespace: namespace),
Project.joins(:project_authorizations)
.where("projects.namespace_id <> ?", namespace.id)
.where(project_authorizations: { user_id: id, access_level: Gitlab::Access::OWNER })
],
remove_duplicates: false
)
end
# Returns projects which user can admin issues on (for example to move an issue to that project).
#
# This logic is duplicated from `Ability#project_abilities` into a SQL form.
def projects_where_can_admin_issues
authorized_projects(Gitlab::Access::REPORTER).non_archived.with_issues_enabled
end
# rubocop: disable CodeReuse/ServiceClass
def require_ssh_key?
count = Users::KeysCountService.new(self).count
count == 0 && Gitlab::ProtocolAccess.allowed?('ssh')
end
# rubocop: enable CodeReuse/ServiceClass
def require_password_creation_for_web?
allow_password_authentication_for_web? && password_automatically_set?
end
def require_password_creation_for_git?
allow_password_authentication_for_git? && password_automatically_set?
end
def require_personal_access_token_creation_for_git_auth?
return false if allow_password_authentication_for_git? || ldap_user?
PersonalAccessTokensFinder.new(user: self, impersonation: false, state: 'active').execute.none?
end
def require_extra_setup_for_git_auth?
require_password_creation_for_git? || require_personal_access_token_creation_for_git_auth?
end
def allow_password_authentication?
allow_password_authentication_for_web? || allow_password_authentication_for_git?
end
def allow_password_authentication_for_web?
Gitlab::CurrentSettings.password_authentication_enabled_for_web? && !ldap_user?
end
def allow_password_authentication_for_git?
Gitlab::CurrentSettings.password_authentication_enabled_for_git? && !ldap_user?
end
def can_change_username?
gitlab_config.username_changing_enabled
end
def can_create_project?
projects_limit_left > 0
end
def can_create_group?
can?(:create_group)
end
def can_select_namespace?
several_namespaces? || admin
end
def can?(action, subject = :global)
Ability.allowed?(self, action, subject)
end
def confirm_deletion_with_password?
!password_automatically_set? && allow_password_authentication?
end
def first_name
read_attribute(:first_name) || begin
name.split(' ').first unless name.blank?
end
end
def last_name
read_attribute(:last_name) || begin
name.split(' ').drop(1).join(' ') unless name.blank?
end
end
def projects_limit_left
projects_limit - personal_projects_count
end
# rubocop: disable CodeReuse/ServiceClass
def recent_push(project = nil)
service = Users::LastPushEventService.new(self)
if project
service.last_event_for_project(project)
else
service.last_event_for_user
end
end
# rubocop: enable CodeReuse/ServiceClass
def several_namespaces?
union_sql = ::Gitlab::SQL::Union.new(
[owned_groups,
maintainers_groups,
groups_with_developer_maintainer_project_access]).to_sql
::Group.from("(#{union_sql}) #{::Group.table_name}").any?
end
def namespace_id
namespace.try :id
end
def name_with_username
"#{name} (#{username})"
end
def already_forked?(project)
!!fork_of(project)
end
def fork_of(project)
namespace.find_fork_of(project)
end
def ldap_user?
if identities.loaded?
identities.find { |identity| Gitlab::Auth::OAuth::Provider.ldap_provider?(identity.provider) && !identity.extern_uid.nil? }
else
identities.exists?(["provider LIKE ? AND extern_uid IS NOT NULL", "ldap%"])
end
end
def ldap_identity
@ldap_identity ||= identities.find_by(["provider LIKE ?", "ldap%"])
end
def matches_identity?(provider, extern_uid)
identities.where(provider: provider, extern_uid: extern_uid).exists?
end
def project_deploy_keys
@project_deploy_keys ||= DeployKey.in_projects(authorized_projects.select(:id)).distinct(:id)
end
def highest_role
user_highest_role&.highest_access_level || Gitlab::Access::NO_ACCESS
end
def accessible_deploy_keys
DeployKey.from_union([
DeployKey.where(id: project_deploy_keys.select(:deploy_key_id)),
DeployKey.are_public
])
end
def created_by
User.find_by(id: created_by_id) if created_by_id
end
def sanitize_attrs
%i[skype linkedin twitter].each do |attr|
value = self[attr]
self[attr] = Sanitize.clean(value) if value.present?
end
end
def set_notification_email
if notification_email.blank? || all_emails.exclude?(notification_email)
self.notification_email = email
end
end
def set_public_email
if public_email.blank? || all_emails.exclude?(public_email)
self.public_email = ''
end
end
def set_commit_email
if commit_email.blank? || verified_emails.exclude?(commit_email)
self.commit_email = nil
end
end
def update_secondary_emails!
set_notification_email
set_public_email
set_commit_email
save if notification_email_changed? || public_email_changed? || commit_email_changed?
end
def set_projects_limit
# `User.select(:id)` raises
# `ActiveModel::MissingAttributeError: missing attribute: projects_limit`
# without this safeguard!
return unless has_attribute?(:projects_limit) && projects_limit.nil?
self.projects_limit = Gitlab::CurrentSettings.default_projects_limit
end
def requires_ldap_check?
if !Gitlab.config.ldap.enabled
false
elsif ldap_user?
!last_credential_check_at || (last_credential_check_at + ldap_sync_time) < Time.current
else
false
end
end
def ldap_sync_time
# This number resides in this method so it can be redefined in EE.
1.hour
end
def try_obtain_ldap_lease
# After obtaining this lease LDAP checks will be blocked for 600 seconds
# (10 minutes) for this user.
lease = Gitlab::ExclusiveLease.new("user_ldap_check:#{id}", timeout: 600)
lease.try_obtain
end
def solo_owned_groups
@solo_owned_groups ||= owned_groups.select do |group|
group.owners == [self]
end
end
def with_defaults
User.defaults.each do |k, v|
public_send("#{k}=", v) # rubocop:disable GitlabSecurity/PublicSend
end
self
end
def can_leave_project?(project)
project.namespace != namespace &&
project.project_member(self)
end
def full_website_url
return "http://#{website_url}" if website_url !~ %r{\Ahttps?://}
website_url
end
def short_website_url
website_url.sub(%r{\Ahttps?://}, '')
end
def all_ssh_keys
keys.map(&:publishable_key)
end
def temp_oauth_email?
email.start_with?('temp-email-for-oauth')
end
# rubocop: disable CodeReuse/ServiceClass
def avatar_url(size: nil, scale: 2, **args)
GravatarService.new.execute(email, size, scale, username: username)
end
# rubocop: enable CodeReuse/ServiceClass
def primary_email_verified?
confirmed? && !temp_oauth_email?
end
def accept_pending_invitations!
pending_invitations.select do |member|
member.accept_invite!(self)
end
end
def pending_invitations
Member.where(invite_email: verified_emails).invite
end
def all_emails(include_private_email: true)
all_emails = []
all_emails << email unless temp_oauth_email?
all_emails << private_commit_email if include_private_email
all_emails.concat(emails.map(&:email))
all_emails
end
def verified_emails(include_private_email: true)
verified_emails = []
verified_emails << email if primary_email_verified?
verified_emails << private_commit_email if include_private_email
verified_emails.concat(emails.confirmed.pluck(:email))
verified_emails
end
def public_verified_emails
emails = verified_emails(include_private_email: false)
emails << email unless temp_oauth_email?
emails.uniq
end
def any_email?(check_email)
downcased = check_email.downcase
# handle the outdated private commit email case
return true if persisted? &&
id == Gitlab::PrivateCommitEmail.user_id_for_email(downcased)
all_emails.include?(check_email.downcase)
end
def verified_email?(check_email)
downcased = check_email.downcase
# handle the outdated private commit email case
return true if persisted? &&
id == Gitlab::PrivateCommitEmail.user_id_for_email(downcased)
verified_emails.include?(check_email.downcase)
end
def hook_attrs
{
name: name,
username: username,
avatar_url: avatar_url(only_path: false),
email: email
}
end
def ensure_namespace_correct
if namespace
namespace.path = username if username_changed?
namespace.name = name if name_changed?
else
namespace = build_namespace(path: username, name: name)
namespace.build_namespace_settings
end
end
def set_username_errors
namespace_path_errors = self.errors.delete(:"namespace.path")
self.errors[:username].concat(namespace_path_errors) if namespace_path_errors
end
def username_changed_hook
system_hook_service.execute_hooks_for(self, :rename)
end
def post_destroy_hook
log_info("User \"#{name}\" (#{email}) was removed")
system_hook_service.execute_hooks_for(self, :destroy)
end
# rubocop: disable CodeReuse/ServiceClass
def remove_key_cache
Users::KeysCountService.new(self).delete_cache
end
# rubocop: enable CodeReuse/ServiceClass
def delete_async(deleted_by:, params: {})
block if params[:hard_delete]
DeleteUserWorker.perform_async(deleted_by.id, id, params.to_h)
end
# rubocop: disable CodeReuse/ServiceClass
def notification_service
NotificationService.new
end
# rubocop: enable CodeReuse/ServiceClass
def log_info(message)
Gitlab::AppLogger.info message
end
# rubocop: disable CodeReuse/ServiceClass
def system_hook_service
SystemHooksService.new
end
# rubocop: enable CodeReuse/ServiceClass
def starred?(project)
starred_projects.exists?(project.id)
end
def toggle_star(project)
UsersStarProject.transaction do
user_star_project = users_star_projects
.where(project: project, user: self).lock(true).first
if user_star_project
user_star_project.destroy
else
UsersStarProject.create!(project: project, user: self)
end
end
end
def manageable_namespaces
@manageable_namespaces ||= [namespace] + manageable_groups
end
def manageable_groups(include_groups_with_developer_maintainer_access: false)
owned_and_maintainer_group_hierarchy = Gitlab::ObjectHierarchy.new(owned_or_maintainers_groups).base_and_descendants
if include_groups_with_developer_maintainer_access
union_sql = ::Gitlab::SQL::Union.new(
[owned_and_maintainer_group_hierarchy,
groups_with_developer_maintainer_project_access]).to_sql
::Group.from("(#{union_sql}) #{::Group.table_name}")
else
owned_and_maintainer_group_hierarchy
end
end
def manageable_groups_with_routes(include_groups_with_developer_maintainer_access: false)
manageable_groups(include_groups_with_developer_maintainer_access: include_groups_with_developer_maintainer_access)
.eager_load(:route)
.order('routes.path')
end
def namespaces
namespace_ids = groups.pluck(:id)
namespace_ids.push(namespace.id)
Namespace.where(id: namespace_ids)
end
def oauth_authorized_tokens
Doorkeeper::AccessToken.where(resource_owner_id: id, revoked_at: nil)
end
# Returns the projects a user contributed to in the last year.
#
# This method relies on a subquery as this performs significantly better
# compared to a JOIN when coupled with, for example,
# `Project.visible_to_user`. That is, consider the following code:
#
# some_user.contributed_projects.visible_to_user(other_user)
#
# If this method were to use a JOIN the resulting query would take roughly 200
# ms on a database with a similar size to GitLab.com's database. On the other
# hand, using a subquery means we can get the exact same data in about 40 ms.
def contributed_projects
events = Event.select(:project_id)
.contributions.where(author_id: self)
.where("created_at > ?", Time.current - 1.year)
.distinct
.reorder(nil)
Project.where(id: events)
end
def can_be_removed?
!solo_owned_groups.present?
end
def ci_owned_runners
@ci_owned_runners ||= begin
project_runners = Ci::RunnerProject
.where(project: authorized_projects(Gitlab::Access::MAINTAINER))
.joins(:runner)
.select('ci_runners.*')
group_runners = Ci::RunnerNamespace
.where(namespace_id: Gitlab::ObjectHierarchy.new(owned_groups).base_and_descendants.select(:id))
.joins(:runner)
.select('ci_runners.*')
Ci::Runner.from_union([project_runners, group_runners])
end
end
def notification_email_for(notification_group)
# Return group-specific email address if present, otherwise return global notification email address
notification_group&.notification_email_for(self) || notification_email
end
def notification_settings_for(source, inherit: false)
if notification_settings.loaded?
notification_settings.find do |notification|
notification.source_type == source.class.base_class.name &&
notification.source_id == source.id
end
else
notification_settings.find_or_initialize_by(source: source) do |ns|
next unless source.is_a?(Group) && inherit
# If we're here it means we're trying to create a NotificationSetting for a group that doesn't have one.
# Find the closest parent with a notification_setting that's not Global level, or that has an email set.
ancestor_ns = source
.notification_settings(hierarchy_order: :asc)
.where(user: self)
.find_by('level != ? OR notification_email IS NOT NULL', NotificationSetting.levels[:global])
# Use it to seed the settings
ns.assign_attributes(ancestor_ns&.slice(*NotificationSetting.allowed_fields))
ns.source = source
ns.user = self
end
end
end
def notification_settings_for_groups(groups)
ids = groups.is_a?(ActiveRecord::Relation) ? groups.select(:id) : groups.map(&:id)
notification_settings.for_groups.where(source_id: ids)
end
# Lazy load global notification setting
# Initializes User setting with Participating level if setting not persisted
def global_notification_setting
return @global_notification_setting if defined?(@global_notification_setting)
@global_notification_setting = notification_settings.find_or_initialize_by(source: nil)
@global_notification_setting.update(level: NotificationSetting.levels[DEFAULT_NOTIFICATION_LEVEL]) unless @global_notification_setting.persisted?
@global_notification_setting
end
def assigned_open_merge_requests_count(force: false)
Rails.cache.fetch(['users', id, 'assigned_open_merge_requests_count'], force: force, expires_in: 20.minutes) do
MergeRequestsFinder.new(self, assignee_id: self.id, state: 'opened', non_archived: true).execute.count
end
end
def assigned_open_issues_count(force: false)
Rails.cache.fetch(['users', id, 'assigned_open_issues_count'], force: force, expires_in: 20.minutes) do
IssuesFinder.new(self, assignee_id: self.id, state: 'opened', non_archived: true).execute.count
end
end
def todos_done_count(force: false)
Rails.cache.fetch(['users', id, 'todos_done_count'], force: force, expires_in: 20.minutes) do
TodosFinder.new(self, state: :done).execute.count
end
end
def todos_pending_count(force: false)
Rails.cache.fetch(['users', id, 'todos_pending_count'], force: force, expires_in: 20.minutes) do
TodosFinder.new(self, state: :pending).execute.count
end
end
def personal_projects_count(force: false)
Rails.cache.fetch(['users', id, 'personal_projects_count'], force: force, expires_in: 24.hours, raw: true) do
personal_projects.count
end.to_i
end
def update_todos_count_cache
todos_done_count(force: true)
todos_pending_count(force: true)
end
def invalidate_cache_counts
invalidate_issue_cache_counts
invalidate_merge_request_cache_counts
invalidate_todos_done_count
invalidate_todos_pending_count
invalidate_personal_projects_count
end
def invalidate_issue_cache_counts
Rails.cache.delete(['users', id, 'assigned_open_issues_count'])
end
def invalidate_merge_request_cache_counts
Rails.cache.delete(['users', id, 'assigned_open_merge_requests_count'])
end
def invalidate_todos_done_count
Rails.cache.delete(['users', id, 'todos_done_count'])
end
def invalidate_todos_pending_count
Rails.cache.delete(['users', id, 'todos_pending_count'])
end
def invalidate_personal_projects_count
Rails.cache.delete(['users', id, 'personal_projects_count'])
end
# This is copied from Devise::Models::Lockable#valid_for_authentication?, as our auth
# flow means we don't call that automatically (and can't conveniently do so).
#
# See:
# <https://github.com/plataformatec/devise/blob/v4.7.1/lib/devise/models/lockable.rb#L104>
#
# rubocop: disable CodeReuse/ServiceClass
def increment_failed_attempts!
return if ::Gitlab::Database.read_only?
increment_failed_attempts
if attempts_exceeded?
lock_access! unless access_locked?
else
Users::UpdateService.new(self, user: self).execute(validate: false)
end
end
# rubocop: enable CodeReuse/ServiceClass
def access_level
if admin?
:admin
else
:regular
end
end
def access_level=(new_level)
new_level = new_level.to_s
return unless %w(admin regular).include?(new_level)
self.admin = (new_level == 'admin')
end
def can_read_all_resources?
can?(:read_all_resources)
end
def update_two_factor_requirement
periods = expanded_groups_requiring_two_factor_authentication.pluck(:two_factor_grace_period)
self.require_two_factor_authentication_from_group = periods.any?
self.two_factor_grace_period = periods.min || User.column_defaults['two_factor_grace_period']
save
end
# each existing user needs to have an `feed_token`.
# we do this on read since migrating all existing users is not a feasible
# solution.
def feed_token
ensure_feed_token!
end
# Each existing user needs to have a `static_object_token`.
# We do this on read since migrating all existing users is not a feasible
# solution.
def static_object_token
ensure_static_object_token!
end
def sync_attribute?(attribute)
return true if ldap_user? && attribute == :email
attributes = Gitlab.config.omniauth.sync_profile_attributes
if attributes.is_a?(Array)
attributes.include?(attribute.to_s)
else
attributes
end
end
def read_only_attribute?(attribute)
user_synced_attributes_metadata&.read_only?(attribute)
end
# override, from Devise
def lock_access!
Gitlab::AppLogger.info("Account Locked: username=#{username}")
super
end
# Determine the maximum access level for a group of projects in bulk.
#
# Returns a Hash mapping project ID -> maximum access level.
def max_member_access_for_project_ids(project_ids)
max_member_access_for_resource_ids(Project, project_ids) do |project_ids|
project_authorizations.where(project: project_ids)
.group(:project_id)
.maximum(:access_level)
end
end
def max_member_access_for_project(project_id)
max_member_access_for_project_ids([project_id])[project_id]
end
# Determine the maximum access level for a group of groups in bulk.
#
# Returns a Hash mapping project ID -> maximum access level.
def max_member_access_for_group_ids(group_ids)
max_member_access_for_resource_ids(Group, group_ids) do |group_ids|
group_members.where(source: group_ids).group(:source_id).maximum(:access_level)
end
end
def max_member_access_for_group(group_id)
max_member_access_for_group_ids([group_id])[group_id]
end
def terms_accepted?
return true if project_bot?
accepted_term_id.present?
end
def required_terms_not_accepted?
Gitlab::CurrentSettings.current_application_settings.enforce_terms? &&
!terms_accepted?
end
def requires_usage_stats_consent?
self.admin? && 7.days.ago > self.created_at && !has_current_license? && User.single_user? && !consented_usage_stats?
end
# Avoid migrations only building user preference object when needed.
def user_preference
super.presence || build_user_preference
end
def user_detail
super.presence || build_user_detail
end
def pending_todo_for(target)
todos.find_by(target: target, state: :pending)
end
def password_expired?
!!(password_expires_at && password_expires_at < Time.current)
end
def can_be_deactivated?
active? && no_recent_activity? && !internal?
end
def last_active_at
last_activity = last_activity_on&.to_time&.in_time_zone
last_sign_in = current_sign_in_at
[last_activity, last_sign_in].compact.max
end
REQUIRES_ROLE_VALUE = 99
def role_required?
role_before_type_cast == REQUIRES_ROLE_VALUE
end
def set_role_required!
update_column(:role, REQUIRES_ROLE_VALUE)
end
def dismissed_callout?(feature_name:, ignore_dismissal_earlier_than: nil)
callouts = self.callouts.with_feature_name(feature_name)
callouts = callouts.with_dismissed_after(ignore_dismissal_earlier_than) if ignore_dismissal_earlier_than
callouts.any?
end
# Load the current highest access by looking directly at the user's memberships
def current_highest_access_level
members.non_request.maximum(:access_level)
end
def confirmation_required_on_sign_in?
!confirmed? && !confirmation_period_valid?
end
def impersonated?
impersonator.present?
end
def created_recently?
created_at > Devise.confirm_within.ago
end
protected
# override, from Devise::Validatable
def password_required?
return false if internal? || project_bot?
super
end
# override from Devise::Confirmable
def confirmation_period_valid?
return false if Feature.disabled?(:soft_email_confirmation)
super
end
# This is copied from Devise::Models::TwoFactorAuthenticatable#consume_otp!
#
# An OTP cannot be used more than once in a given timestep
# Storing timestep of last valid OTP is sufficient to satisfy this requirement
#
# See:
# <https://github.com/tinfoil/devise-two-factor/blob/master/lib/devise_two_factor/models/two_factor_authenticatable.rb#L66>
#
def consume_otp!
if self.consumed_timestep != current_otp_timestep
self.consumed_timestep = current_otp_timestep
return Gitlab::Database.read_only? ? true : save(validate: false)
end
false
end
private
def authorized_groups_without_shared_membership
Group.from_union([
groups,
authorized_projects.joins(:namespace).select('namespaces.*')
])
end
def authorized_groups_with_shared_membership
cte = Gitlab::SQL::CTE.new(:direct_groups, authorized_groups_without_shared_membership)
cte_alias = cte.table.alias(Group.table_name)
Group
.with(cte.to_arel)
.from_union([
Group.from(cte_alias),
Group.joins(:shared_with_group_links)
.where(group_group_links: { shared_with_group_id: Group.from(cte_alias) })
])
end
def default_private_profile_to_false
return unless private_profile_changed? && private_profile.nil?
self.private_profile = false
end
def has_current_license?
false
end
def consented_usage_stats?
# Bypass the cache here because it's possible the admin enabled the
# usage ping, and we don't want to annoy the user again if they
# already set the value. This is a bit of hack, but the alternative
# would be to put in a more complex cache invalidation step. Since
# this call only gets called in the uncommon situation where the
# user is an admin and the only user in the instance, this shouldn't
# cause too much load on the system.
ApplicationSetting.current_without_cache&.usage_stats_set_by_user_id == self.id
end
def ensure_user_rights_and_limits
if external?
self.can_create_group = false
self.projects_limit = 0
else
# Only revert these back to the default if they weren't specifically changed in this update.
self.can_create_group = gitlab_config.default_can_create_group unless can_create_group_changed?
self.projects_limit = Gitlab::CurrentSettings.default_projects_limit unless projects_limit_changed?
end
end
def signup_domain_valid?
valid = true
error = nil
if Gitlab::CurrentSettings.domain_denylist_enabled?
blocked_domains = Gitlab::CurrentSettings.domain_denylist
if domain_matches?(blocked_domains, email)
error = 'is not from an allowed domain.'
valid = false
end
end
allowed_domains = Gitlab::CurrentSettings.domain_allowlist
unless allowed_domains.blank?
if domain_matches?(allowed_domains, email)
valid = true
else
error = "domain is not authorized for sign-up"
valid = false
end
end
errors.add(:email, error) unless valid
valid
end
def domain_matches?(email_domains, email)
signup_domain = Mail::Address.new(email).domain
email_domains.any? do |domain|
escaped = Regexp.escape(domain).gsub('\*', '.*?')
regexp = Regexp.new "^#{escaped}$", Regexp::IGNORECASE
signup_domain =~ regexp
end
end
def check_email_restrictions
return unless Gitlab::CurrentSettings.email_restrictions_enabled?
restrictions = Gitlab::CurrentSettings.email_restrictions
return if restrictions.blank?
if Gitlab::UntrustedRegexp.new(restrictions).match?(email)
errors.add(:email, _('is not allowed. Try again with a different email address, or contact your GitLab admin.'))
end
end
def groups_with_developer_maintainer_project_access
project_creation_levels = [::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS]
if ::Gitlab::CurrentSettings.default_project_creation == ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS
project_creation_levels << nil
end
developer_groups_hierarchy = ::Gitlab::ObjectHierarchy.new(developer_groups).base_and_descendants
::Group.where(id: developer_groups_hierarchy.select(:id),
project_creation_level: project_creation_levels)
end
def no_recent_activity?
last_active_at.to_i <= MINIMUM_INACTIVE_DAYS.days.ago.to_i
end
def update_highest_role?
return false unless persisted?
(previous_changes.keys & %w(state user_type)).any?
end
def update_highest_role_attribute
id
end
end
User.prepend_if_ee('EE::User')
| 33.292633 | 214 | 0.728556 |
797e0dc8d80ae608b4fe9b423c7cc51ad9641c20 | 154 | require 'test_helper'
class Api::V1::UserSchedulesControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 19.25 | 76 | 0.74026 |
3334f5dfd945684b5395df9243331fb1d07cca54 | 697 |
class VerizonApi::Api
include HTTParty
require 'yaml'
require 'json'
# Here, we bring in all our subordinate classes
require 'verizon_api/waf'
headers 'Host' => 'api.edgecast.com', 'Accept' => 'Application/JSON', 'Content-Type' => 'Application/JSON'
format :json
MEDIA_TYPES = {:flash_media_streaming => 2, :http_large_object => 3, :http_small_object => 8, :application_delivery_network => 14}
def initialize(account_id = VerizonApi::Config['account_id'], api_token = VerizonApi::Config['api_token'])
self.class.base_uri "https://api.edgecast.com/v2/mcc/customers/#{account_id}"
self.class.default_options[:headers]['Authorization'] = "TOK:#{api_token}"
end
end
| 31.681818 | 132 | 0.715925 |
21e54dd25378ebbe96cd5c7862e985746e2af296 | 933 | cask 'lando' do
version '3.0.0-rc.15'
sha256 '0b5864e764cb1437423790cbd1e3c05bdd46a042ae34f51ff209ee9bcd21a4ba'
# github.com/lando/lando was verified as official when first introduced to the cask
url "https://github.com/lando/lando/releases/download/v#{version}/lando-v#{version}.dmg"
appcast 'https://github.com/lando/lando/releases.atom'
name 'Lando'
homepage 'https://docs.devwithlando.io/'
depends_on cask: 'docker'
pkg 'LandoInstaller.pkg',
choices: [
{
'choiceIdentifier' => 'choiceDocker',
'choiceAttribute' => 'selected',
'attributeSetting' => 0,
},
{
'choiceIdentifier' => 'choiceLando',
'choiceAttribute' => 'selected',
'attributeSetting' => 1,
},
]
uninstall pkgutil: 'io.lando.pkg.lando'
end
| 32.172414 | 90 | 0.566988 |
036aec1768cf40b7f5910fcfffd53133fd0de0f1 | 190 | module Features
def sign_in_as(user)
visit("/")
click_on t("application.header.sign_in")
fill_form_and_submit(:session, email: user.email, password: user.password)
end
end
| 19 | 78 | 0.715789 |
33480b05570070151595e4a858c55b5db65c971d | 283 | # frozen_string_literal: true
module Types
PhotoType = GraphQL::ObjectType.define do
name "PhotoType"
description "Photo associated with sound"
field :id, !types.ID
field :file_name, !types.String
field :user, !UserType
field :sound, !SoundType
end
end
| 20.214286 | 45 | 0.70318 |
18d143716abd3842fc3bef76ecaadf110d80fb59 | 1,580 | # encoding: utf-8
include SecQuery
require 'spec_helper'
describe SecQuery::SecURI do
describe '#browse_edgar_uri' do
it 'builds a default /browse-edgar/ url' do
uri = SecQuery::SecURI.browse_edgar_uri
expect(uri.to_s).to eq('https://www.sec.gov/cgi-bin/browse-edgar')
end
it 'builds a default /browse-edgar/ url with options: {symbol: "AAPL"}' do
uri = SecQuery::SecURI.browse_edgar_uri(symbol: 'AAPL')
expect(uri.to_s)
.to include('https://www.sec.gov/cgi-bin/browse-edgar?CIK=AAPL')
end
it 'builds a default /browse-edgar/ url with options: {cik: "AAPL"}' do
uri = SecQuery::SecURI.browse_edgar_uri(cik: 'AAPL')
expect(uri.to_s)
.to include('https://www.sec.gov/cgi-bin/browse-edgar?CIK=AAPL')
end
it 'builds a default /browse-edgar/ url with options: "AAPL"' do
uri = SecQuery::SecURI.browse_edgar_uri('AAPL')
expect(uri.to_s)
.to eq('https://www.sec.gov/cgi-bin/browse-edgar?CIK=AAPL')
end
it 'builds a default /browse-edgar/ url with options: "Apple Inc"' do
uri = SecQuery::SecURI.browse_edgar_uri('Apple Inc')
expect(uri.to_s)
.to eq('https://www.sec.gov/cgi-bin/browse-edgar?company=Apple%20Inc')
end
end
describe 'Date additions' do
subject(:d) { Date.parse('2012-04-26') }
it 'calculates the correct quarter' do
expect(d.quarter).to eq(2)
end
it 'calculates the correct sec formatted path uri for a date' do
expect(d.to_sec_uri_format).to eq('2012/QTR2/company.20120426.idx')
end
end
end
| 31.6 | 78 | 0.660127 |
01b8c0456bb1170ae976d63a9798fa63066b05f5 | 221 |
id = ARGV.first
out = []
$stdin.each_line do |line|
out << id + ' ' + line.strip unless /BATCH/ =~ line
if /BATCH/ =~ line or out.size >= 8
out.each {|line| puts line}
out = []
$stdout.flush
end
end
| 14.733333 | 53 | 0.556561 |
03d3fe71ec810ad63b754581a9e6fabc2dbeb0d1 | 807 | # encoding: utf-8
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# /spec/fixtures/responses/whois.nic.coop/property_status_multiple.expected
#
# and regenerate the tests with the following rake task
#
# $ rake spec:generate
#
require 'spec_helper'
require 'whois/record/parser/whois.nic.coop.rb'
describe Whois::Record::Parser::WhoisNicCoop, "property_status_multiple.expected" do
subject do
file = fixture("responses", "whois.nic.coop/property_status_multiple.txt")
part = Whois::Record::Part.new(body: File.read(file))
described_class.new(part)
end
describe "#status" do
it do
expect(subject.status).to eq(["clientDeleteProhibited", "clientTransferProhibited", "clientUpdateProhibited"])
end
end
end
| 26.9 | 116 | 0.739777 |
ac25d47fa63d61a2487387cbf1162e542ce565dd | 2,043 | require File.join(File.dirname(__FILE__), "helper")
HUMAN_ID = '[email protected]'
ROBOT_ID = '[email protected]'
URL = 'http://googlewave.com'
describe Rave::Models::User do
it_should_behave_like "Component id()"
it_should_behave_like "Component initialize()"
describe "robot?()" do
it "should return false for a human user" do
human = Rave::Models::User.new(:id => HUMAN_ID)
human.robot?.should be_false
end
it "should return true for a robot" do
robot = Rave::Models::User.new(:id => ROBOT_ID)
robot.robot?.should be_true
end
end
describe "generated?" do
it "should always be false" do
human = Rave::Models::User.new(:id => HUMAN_ID)
human.generated?.should be_false
end
end
describe "profile_url()" do
it "Should be the :url passed in the constructor" do
user = Rave::Models::User.new(:profile_url => URL, :id => "bleh")
user.profile_url.should == URL
end
it "should default to an empty string" do
user = Rave::Models::User.new(:id => "bleh")
user.profile_url.should == ''
end
end
describe "image_url()" do
it "Should be the url passed in the constructor" do
user = Rave::Models::User.new(:image_url => URL, :id => "bleh")
user.image_url.should == URL
end
it "should default to an empty string" do
user = Rave::Models::User.new(:id => "bleh")
user.image_url.should == ''
end
end
describe "name()" do
it "should return the name passed in the constructor" do
user = Rave::Models::User.new(:id => HUMAN_ID, :name => 'fred')
user.name.should == 'fred'
end
it "should default to the ID" do
user = Rave::Models::User.new(:id => HUMAN_ID)
user.name.should == HUMAN_ID
end
end
describe "to_s()" do
user = Rave::Models::User.new(:id => HUMAN_ID)
user.to_s.should == HUMAN_ID
end
describe "to_json()" do
user = Rave::Models::User.new(:id => HUMAN_ID)
user.to_json.should == HUMAN_ID.to_json
end
end
| 26.881579 | 71 | 0.631914 |
6aa2325c8918462f89df5ad3859797af9a432f7c | 1,203 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'redcli/version'
Gem::Specification.new do |spec|
spec.name = "redcli"
spec.version = Redcli::VERSION
spec.authors = ["Kavinder Dhaliwal"]
spec.email = ["[email protected]"]
spec.summary = %q{Reddit Cli reader}
spec.description = %q{}
spec.homepage = "https://github.com/kavinderd/redcli"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "pry", '~> 0.10'
spec.add_development_dependency "pry-debugger", '~> 0.2'
spec.add_development_dependency "fakeweb", "~> 1.3"
spec.add_development_dependency 'vcr', '~> 2.9'
spec.add_runtime_dependency "faraday", '~> 0.9'
spec.add_runtime_dependency "main", '~> 6.1'
spec.add_runtime_dependency "colorize", "~> 0.7"
end
| 37.59375 | 74 | 0.65586 |
f84fdc99f58e880b72244d3fc052e4ab68316185 | 4,439 | require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Include generic and useful information about system operation, but avoid logging too much
# information to avoid inadvertent exposure of personally identifiable information (PII).
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "dummy_production"
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Log disallowed deprecations.
config.active_support.disallowed_deprecation = :log
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require "syslog/logger"
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.39 | 114 | 0.766614 |
335c4f980a3346b3940bbe0ce64874c6f471f793 | 936 | require 'test_helper'
class MaintainersControllerTest < ActionDispatch::IntegrationTest
setup do
@maintainer = maintainers(:one)
end
test "should get index" do
get maintainers_url, as: :json
assert_response :success
end
test "should create maintainer" do
assert_difference('Maintainer.count') do
post maintainers_url, params: { maintainer: { user_id: @maintainer.user_id } }, as: :json
end
assert_response 201
end
test "should show maintainer" do
get maintainer_url(@maintainer), as: :json
assert_response :success
end
test "should update maintainer" do
patch maintainer_url(@maintainer), params: { maintainer: { user_id: @maintainer.user_id } }, as: :json
assert_response 200
end
test "should destroy maintainer" do
assert_difference('Maintainer.count', -1) do
delete maintainer_url(@maintainer), as: :json
end
assert_response 204
end
end
| 24 | 106 | 0.711538 |
87577fc3a97341262da04592df38aae71a896614 | 216 | # frozen_string_literal: true
# We need to have a policy for StudentTableRow model which is used in some views. The policy should be the same as the
# one for student
class StudentTableRowPolicy < StudentPolicy
end
| 30.857143 | 118 | 0.800926 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.