hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
01bd562f77c0bbdb442ed522bca7a0a173de31b1 | 56 | class Parties::RoleKind
include Mongoid::Document
end
| 14 | 27 | 0.803571 |
acb31d335e3effe734e825fbd12e8ccc535aef62 | 3,575 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Web::Mgmt::V2020_09_01
module Models
#
# Model object.
#
#
class AzureActiveDirectoryRegistration < ProxyOnlyResource
include MsRestAzure
# @return [String]
attr_accessor :open_id_issuer
# @return [String]
attr_accessor :client_id
# @return [String]
attr_accessor :client_secret_setting_name
# @return [String]
attr_accessor :client_secret_certificate_thumbprint
#
# Mapper for AzureActiveDirectoryRegistration class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'AzureActiveDirectoryRegistration',
type: {
name: 'Composite',
class_name: 'AzureActiveDirectoryRegistration',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
kind: {
client_side_validation: true,
required: false,
serialized_name: 'kind',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
system_data: {
client_side_validation: true,
required: false,
serialized_name: 'systemData',
type: {
name: 'Composite',
class_name: 'SystemData'
}
},
open_id_issuer: {
client_side_validation: true,
required: false,
serialized_name: 'properties.openIdIssuer',
type: {
name: 'String'
}
},
client_id: {
client_side_validation: true,
required: false,
serialized_name: 'properties.clientId',
type: {
name: 'String'
}
},
client_secret_setting_name: {
client_side_validation: true,
required: false,
serialized_name: 'properties.clientSecretSettingName',
type: {
name: 'String'
}
},
client_secret_certificate_thumbprint: {
client_side_validation: true,
required: false,
serialized_name: 'properties.clientSecretCertificateThumbprint',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 28.6 | 80 | 0.46014 |
217b53e1ca588a0e6e8382c2c62f740cdf6e2351 | 250 | class CreateInspirationalQuotes < ActiveRecord::Migration[6.1]
def change
create_table :inspirational_quotes do |t|
t.string :quote
t.string :author
t.string :meaning
t.string :date
t.timestamps
end
end
end
| 19.230769 | 62 | 0.668 |
264692f9aabea0dd7bc935ab1719cb8e10df705a | 3,478 | require 'spec_helper'
describe Qa::Authorities::Getty::TGN do
let(:authority) { described_class.new }
describe "#build_query_url" do
subject { authority.build_query_url("foo") }
it { is_expected.to match(/^http:\/\/vocab\.getty\.edu\//) }
end
describe "#find_url" do
subject { authority.find_url("1028772") }
it { is_expected.to eq "http://vocab.getty.edu/tgn/1028772.json" }
end
describe "#search" do
context "authorities" do
before do
stub_request(:get, /vocab\.getty\.edu.*/)
.to_return(body: webmock_fixture("tgn-response.txt"), status: 200)
end
subject { authority.search('whatever') }
it "has id and label keys" do
expect(subject.first).to eq("id" => 'http://vocab.getty.edu/tgn/2058300', "label" => "Cawood (Andrew, Missouri, United States)")
expect(subject.last).to eq("id" => 'http://vocab.getty.edu/tgn/7022503', "label" => "Cawood Branch (Kentucky, United States)")
expect(subject.size).to eq(6)
end
end
end
describe "#untaint" do
subject { authority.untaint(value) }
context "with a good string" do
let(:value) { 'Cawood' }
it { is_expected.to eq 'Cawood' }
end
context "bad stuff" do
let(:value) { './"' }
it { is_expected.to eq '' }
end
end
describe "#find" do
context "using a subject id" do
before do
stub_request(:get, "http://vocab.getty.edu/tgn/1028772.json")
.to_return(status: 200, body: webmock_fixture("getty-tgn-find-response.json"))
end
subject { authority.find("1028772") }
it "returns the complete record for a given subject" do
expect(subject['results']['bindings'].size).to eq 103
expect(subject['results']['bindings']).to all(have_key('Subject'))
expect(subject['results']['bindings']).to all(have_key('Predicate'))
expect(subject['results']['bindings']).to all(have_key('Object'))
end
end
end
describe "#request_options" do
subject { authority.request_options }
it { is_expected.to eq(accept: "application/sparql-results+json") }
end
describe "#sparql" do
context "using a single subject term" do
subject { authority.sparql('search_term') }
it {
is_expected.to eq 'SELECT DISTINCT ?s ?name ?par {
?s a skos:Concept; luc:term "search_term";
skos:inScheme <http://vocab.getty.edu/tgn/> ;
gvp:prefLabelGVP [skosxl:literalForm ?name] ;
gvp:parentString ?par .
FILTER regex(?name, "search_term", "i") .
} ORDER BY ?name ASC(?par)' }
end
context "using a two subject terms" do
subject { authority.sparql('search term') }
# rubocop:disable Metrics/LineLength
it {
is_expected.to eq "SELECT DISTINCT ?s ?name ?par {
?s a skos:Concept; luc:term \"search term\";
skos:inScheme <http://vocab.getty.edu/tgn/> ;
gvp:prefLabelGVP [skosxl:literalForm ?name] ;
gvp:parentString ?par .
FILTER ((regex(CONCAT(?name, ', ', REPLACE(str(?par), \",[^,]+,[^,]+$\", \"\")), \"search\",\"i\" ) && regex(CONCAT(?name, ', ', REPLACE(str(?par), \",[^,]+,[^,]+$\", \"\")), \"term\",\"i\" ) ) && (regex(?name, \"search\",\"i\" ) || regex(?name, \"term\",\"i\" ) ) ) .
} ORDER BY ?name ASC(?par)" }
# rubocop:enable Metrics/LineLength
end
end
end
| 36.229167 | 282 | 0.586544 |
333e46ae5329a0cdd73bd9c9d2f1f136b7aa5c36 | 3,083 | module SolidusSalePrices
module Spree
module PriceDecorator
def self.prepended(base)
base.has_many :sale_prices, dependent: :destroy
base.has_many :active_sale_prices, -> { merge(::Spree::SalePrice.active) }, class_name: '::Spree::SalePrice'
base.after_save :update_calculated_sale_prices
base.after_discard do
sale_prices.discard_all
end
end
def update_calculated_sale_prices
reload
sale_prices.each(&:update_calculated_price!)
end
def put_on_sale(value, params = {})
new_sale(value, params).save
end
def new_sale(value, params = {})
sale_price_params = {
value: value,
start_at: params.fetch(:start_at, Time.now),
end_at: params.fetch(:end_at, nil),
enabled: params.fetch(:enabled, true),
calculator: params.fetch(:calculator_type, ::Spree::Calculator::FixedAmountSalePriceCalculator.new)
}
return sale_prices.new(sale_price_params)
end
# TODO make update_sale method
def active_sale
first_sale(active_sale_prices) if on_sale?
end
alias :current_sale :active_sale
def next_active_sale
first_sale(sale_prices) if sale_prices.present?
end
alias :next_current_sale :next_active_sale
def sale_price
active_sale.calculated_price if on_sale?
end
def sale_price=(value)
if on_sale?
active_sale.update_attribute(:value, value)
else
put_on_sale(value)
end
end
def discount_percent
return 0.0 unless original_price > 0
return 0.0 unless on_sale?
(1 - (sale_price / original_price)) * 100
end
def on_sale?
return false unless (first_active_sale_calculated_price = first_sale(active_sale_prices)&.calculated_price)
first_active_sale_calculated_price < original_price
end
def original_price
self[:amount]
end
def original_price=(value)
self[:amount] = ::Spree::LocalizedNumber.parse(value)
end
def price
on_sale? ? sale_price : original_price
end
def price=(price)
if on_sale?
sale_price = price
else
self[:amount] = ::Spree::LocalizedNumber.parse(price)
end
end
def amount
price
end
def enable_sale
next_active_sale.enable if next_active_sale.present?
end
def disable_sale
active_sale.disable if active_sale.present?
end
def start_sale(end_time = nil)
next_active_sale.start(end_time) if next_active_sale.present?
end
def stop_sale
active_sale.stop if active_sale.present?
end
private
def first_sale(scope)
# adding 'order' to scope will invalidate any eager loading so
# better do it in memory
scope.sort { |p1, p2| p1.created_at <=> p2.created_at }.first
end
::Spree::Price.prepend self
end
end
end
| 25.691667 | 116 | 0.628933 |
bb6aa2f78ac098ee6a6d989edd5fc974c30917f3 | 844 | # frozen_string_literal: true
class PipelineDetailsEntity < Ci::PipelineEntity
expose :project, using: ProjectEntity
expose :flags do
expose :latest?, as: :latest
end
expose :details do
expose :artifacts do |pipeline, options|
rel = pipeline.downloadable_artifacts
if Feature.enabled?(:non_public_artifacts, type: :development)
rel = rel.select { |artifact| can?(request.current_user, :read_job_artifacts, artifact.job) }
end
BuildArtifactEntity.represent(rel, options.merge(project: pipeline.project))
end
expose :manual_actions, using: BuildActionEntity
expose :scheduled_actions, using: BuildActionEntity
end
expose :triggered_by_pipeline, as: :triggered_by, with: TriggeredPipelineEntity
expose :triggered_pipelines, as: :triggered, using: TriggeredPipelineEntity
end
| 31.259259 | 101 | 0.748815 |
d55eed0739d68a80c746d9d8ba0c2c6ab62da4bf | 795 |
# Configure Rails Envinronment
ENV["RAILS_ENV"] = "test"
ENGINE_RAILS_ROOT=File.join(File.dirname(__FILE__), '../')
require File.expand_path("../dummy/config/environment.rb", __FILE__)
require "rails/test_help"
ActionMailer::Base.delivery_method = :test
ActionMailer::Base.perform_deliveries = true
ActionMailer::Base.default_url_options[:host] = "test.com"
Rails.backtrace_cleaner.remove_silencers!
# Configure capybara for integration testing
require "capybara/rails"
require 'capybara/poltergeist'
Capybara.javascript_driver = :poltergeist
Capybara.default_selector = :css
# Run any available migration
ActiveRecord::Migrator.migrate File.expand_path("../dummy/db/migrate/", __FILE__)
# Load support files
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f } | 24.84375 | 81 | 0.771069 |
796f9ca719b876efc855c18418f2a222429d43ae | 1,764 | require 'test_helper'
class UsersEditTest < ActionDispatch::IntegrationTest
def setup
@user = users(:jack)
end
test "unsuccessful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
patch user_path(@user), params: { user: { name: "",
email: "foo@invalid",
password: "foo",
password_confirmation: "bar" } }
assert_template 'users/edit'
end
test "successful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
name = "Foo Bar"
email = "[email protected]"
patch user_path(@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
test "successful edit with friendly forwarding" do
get edit_user_path(@user)
log_in_as(@user)
assert_redirected_to edit_user_url(@user)
name = "Foo Bar"
email = "[email protected]"
patch user_path(@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
end
| 32.072727 | 78 | 0.501134 |
87ea719b6a8661d401ec4cf58eee387a7b82583f | 13,762 | # encoding: UTF-8
require "csv"
module Jekyll
class Site
attr_reader :source, :dest, :config
attr_accessor :layouts, :pages, :static_files, :drafts,
:exclude, :include, :lsi, :highlighter, :permalink_style,
:time, :future, :unpublished, :safe, :plugins, :limit_posts,
:show_drafts, :keep_files, :baseurl, :data, :file_read_opts,
:gems, :plugin_manager, :theme
attr_accessor :converters, :generators, :reader
attr_reader :regenerator, :liquid_renderer, :includes_load_paths
# Public: Initialize a new Site.
#
# config - A Hash containing site configuration details.
def initialize(config)
# Source and destination may not be changed after the site has been created.
@source = File.expand_path(config["source"]).freeze
@dest = File.expand_path(config["destination"]).freeze
self.config = config
@reader = Reader.new(self)
@regenerator = Regenerator.new(self)
@liquid_renderer = LiquidRenderer.new(self)
Jekyll.sites << self
reset
setup
Jekyll::Hooks.trigger :site, :after_init, self
end
# Public: Set the site's configuration. This handles side-effects caused by
# changing values in the configuration.
#
# config - a Jekyll::Configuration, containing the new configuration.
#
# Returns the new configuration.
def config=(config)
@config = config.clone
%w(safe lsi highlighter baseurl exclude include future unpublished
show_drafts limit_posts keep_files gems).each do |opt|
self.send("#{opt}=", config[opt])
end
configure_plugins
configure_theme
configure_include_paths
configure_file_read_opts
self.permalink_style = config["permalink"].to_sym
@config
end
# Public: Read, process, and write this Site to output.
#
# Returns nothing.
def process
reset
read
generate
render
cleanup
write
print_stats
end
def print_stats
if @config["profile"]
puts @liquid_renderer.stats_table
end
end
# Reset Site details.
#
# Returns nothing
def reset
if config["time"]
self.time = Utils.parse_date(config["time"].to_s, "Invalid time in _config.yml.")
else
self.time = Time.now
end
self.layouts = {}
self.pages = []
self.static_files = []
self.data = {}
@collections = nil
@regenerator.clear_cache
@liquid_renderer.reset
if limit_posts < 0
raise ArgumentError, "limit_posts must be a non-negative number"
end
Jekyll::Hooks.trigger :site, :after_reset, self
end
# Load necessary libraries, plugins, converters, and generators.
#
# Returns nothing.
def setup
ensure_not_in_dest
plugin_manager.conscientious_require
self.converters = instantiate_subclasses(Jekyll::Converter)
self.generators = instantiate_subclasses(Jekyll::Generator)
end
# Check that the destination dir isn't the source dir or a directory
# parent to the source dir.
def ensure_not_in_dest
dest_pathname = Pathname.new(dest)
Pathname.new(source).ascend do |path|
if path == dest_pathname
raise(
Errors::FatalException,
"Destination directory cannot be or contain the Source directory."
)
end
end
end
# The list of collections and their corresponding Jekyll::Collection instances.
# If config['collections'] is set, a new instance is created
# for each item in the collection, a new hash is returned otherwise.
#
# Returns a Hash containing collection name-to-instance pairs.
def collections
@collections ||= Hash[collection_names.map do |coll|
[coll, Jekyll::Collection.new(self, coll)]
end]
end
# The list of collection names.
#
# Returns an array of collection names from the configuration,
# or an empty array if the `collections` key is not set.
def collection_names
case config["collections"]
when Hash
config["collections"].keys
when Array
config["collections"]
when nil
[]
else
raise ArgumentError, "Your `collections` key must be a hash or an array."
end
end
# Read Site data from disk and load it into internal data structures.
#
# Returns nothing.
def read
reader.read
limit_posts!
Jekyll::Hooks.trigger :site, :post_read, self
end
# Run each of the Generators.
#
# Returns nothing.
def generate
generators.each do |generator|
start = Time.now
generator.generate(self)
Jekyll.logger.debug "Generating:",
"#{generator.class} finished in #{Time.now - start} seconds."
end
end
# Render the site to the destination.
#
# Returns nothing.
def render
relative_permalinks_are_deprecated
payload = site_payload
Jekyll::Hooks.trigger :site, :pre_render, self, payload
render_docs(payload)
render_pages(payload)
Jekyll::Hooks.trigger :site, :post_render, self, payload
end
# Remove orphaned files and empty directories in destination.
#
# Returns nothing.
def cleanup
site_cleaner.cleanup!
end
# Write static files, pages, and posts.
#
# Returns nothing.
def write
each_site_file do |item|
item.write(dest) if regenerator.regenerate?(item)
end
regenerator.write_metadata
Jekyll::Hooks.trigger :site, :post_write, self
end
def posts
collections["posts"] ||= Collection.new(self, "posts")
end
# Construct a Hash of Posts indexed by the specified Post attribute.
#
# post_attr - The String name of the Post attribute.
#
# Examples
#
# post_attr_hash('categories')
# # => { 'tech' => [<Post A>, <Post B>],
# # 'ruby' => [<Post B>] }
#
# Returns the Hash: { attr => posts } where
# attr - One of the values for the requested attribute.
# posts - The Array of Posts with the given attr value.
def post_attr_hash(post_attr)
# Build a hash map based on the specified post attribute ( post attr =>
# array of posts ) then sort each array in reverse order.
hash = Hash.new { |h, key| h[key] = [] }
posts.docs.each do |p|
p.data[post_attr].each { |t| hash[t] << p } if p.data[post_attr]
end
hash.values.each { |posts| posts.sort!.reverse! }
hash
end
def tags
post_attr_hash("tags")
end
def categories
post_attr_hash("categories")
end
# Prepare site data for site payload. The method maintains backward compatibility
# if the key 'data' is already used in _config.yml.
#
# Returns the Hash to be hooked to site.data.
def site_data
config["data"] || data
end
# The Hash payload containing site-wide data.
#
# Returns the Hash: { "site" => data } where data is a Hash with keys:
# "time" - The Time as specified in the configuration or the
# current time if none was specified.
# "posts" - The Array of Posts, sorted chronologically by post date
# and then title.
# "pages" - The Array of all Pages.
# "html_pages" - The Array of HTML Pages.
# "categories" - The Hash of category values and Posts.
# See Site#post_attr_hash for type info.
# "tags" - The Hash of tag values and Posts.
# See Site#post_attr_hash for type info.
def site_payload
Drops::UnifiedPayloadDrop.new self
end
alias_method :to_liquid, :site_payload
# Get the implementation class for the given Converter.
# Returns the Converter instance implementing the given Converter.
# klass - The Class of the Converter to fetch.
def find_converter_instance(klass)
converters.find { |klass_| klass_.instance_of?(klass) } || \
raise("No Converters found for #{klass}")
end
# klass - class or module containing the subclasses.
# Returns array of instances of subclasses of parameter.
# Create array of instances of the subclasses of the class or module
# passed in as argument.
def instantiate_subclasses(klass)
klass.descendants.select { |c| !safe || c.safe }.sort.map do |c|
c.new(config)
end
end
# Warns the user if permanent links are relative to the parent
# directory. As this is a deprecated function of Jekyll.
#
# Returns
def relative_permalinks_are_deprecated
if config["relative_permalinks"]
Jekyll.logger.abort_with "Since v3.0, permalinks for pages" \
" in subfolders must be relative to the" \
" site source directory, not the parent" \
" directory. Check https://jekyllrb.com/docs/upgrading/"\
" for more info."
end
end
# Get the to be written documents
#
# Returns an Array of Documents which should be written
def docs_to_write
documents.select(&:write?)
end
# Get all the documents
#
# Returns an Array of all Documents
def documents
collections.reduce(Set.new) do |docs, (_, collection)|
docs + collection.docs + collection.files
end.to_a
end
def each_site_file
%w(pages static_files docs_to_write).each do |type|
send(type).each do |item|
yield item
end
end
end
# Returns the FrontmatterDefaults or creates a new FrontmatterDefaults
# if it doesn't already exist.
#
# Returns The FrontmatterDefaults
def frontmatter_defaults
@frontmatter_defaults ||= FrontmatterDefaults.new(self)
end
# Whether to perform a full rebuild without incremental regeneration
#
# Returns a Boolean: true for a full rebuild, false for normal build
def incremental?(override = {})
override["incremental"] || config["incremental"]
end
# Returns the publisher or creates a new publisher if it doesn't
# already exist.
#
# Returns The Publisher
def publisher
@publisher ||= Publisher.new(self)
end
# Public: Prefix a given path with the source directory.
#
# paths - (optional) path elements to a file or directory within the
# source directory
#
# Returns a path which is prefixed with the source directory.
def in_source_dir(*paths)
paths.reduce(source) do |base, path|
Jekyll.sanitized_path(base, path)
end
end
# Public: Prefix a given path with the theme directory.
#
# paths - (optional) path elements to a file or directory within the
# theme directory
#
# Returns a path which is prefixed with the theme root directory.
def in_theme_dir(*paths)
return nil unless theme
paths.reduce(theme.root) do |base, path|
Jekyll.sanitized_path(base, path)
end
end
# Public: Prefix a given path with the destination directory.
#
# paths - (optional) path elements to a file or directory within the
# destination directory
#
# Returns a path which is prefixed with the destination directory.
def in_dest_dir(*paths)
paths.reduce(dest) do |base, path|
Jekyll.sanitized_path(base, path)
end
end
# Limits the current posts; removes the posts which exceed the limit_posts
#
# Returns nothing
private
def limit_posts!
if limit_posts > 0
limit = posts.docs.length < limit_posts ? posts.docs.length : limit_posts
self.posts.docs = posts.docs[-limit, limit]
end
end
# Returns the Cleaner or creates a new Cleaner if it doesn't
# already exist.
#
# Returns The Cleaner
private
def site_cleaner
@site_cleaner ||= Cleaner.new(self)
end
private
def configure_plugins
self.plugin_manager = Jekyll::PluginManager.new(self)
self.plugins = plugin_manager.plugins_path
end
private
def configure_theme
self.theme = nil
return if config["theme"].nil?
self.theme =
if config["theme"].is_a?(String)
Jekyll::Theme.new(config["theme"])
else
Jekyll.logger.warn "Theme:", "value of 'theme' in config should be " \
"String to use gem-based themes, but got #{config["theme"].class}"
nil
end
end
private
def configure_include_paths
@includes_load_paths = Array(in_source_dir(config["includes_dir"].to_s))
@includes_load_paths << theme.includes_path if theme && theme.includes_path
end
private
def configure_file_read_opts
self.file_read_opts = {}
self.file_read_opts[:encoding] = config["encoding"] if config["encoding"]
end
private
def render_docs(payload)
collections.each do |_, collection|
collection.docs.each do |document|
if regenerator.regenerate?(document)
document.output = Jekyll::Renderer.new(self, document, payload).run
document.trigger_hooks(:post_render)
end
end
end
end
private
def render_pages(payload)
pages.flatten.each do |page|
if regenerator.regenerate?(page)
page.output = Jekyll::Renderer.new(self, page, payload).run
page.trigger_hooks(:post_render)
end
end
end
end
end
| 29.280851 | 89 | 0.62767 |
1cb12612616a49dbcfaa01e015baaf42dd152972 | 4,290 | require 'pycall/wrapper_object_cache'
module PyCall
module PyObjectWrapper
attr_reader :__pyptr__
def self.extend_object(obj)
pyptr = obj.instance_variable_get(:@__pyptr__)
unless pyptr.kind_of? PyPtr
raise TypeError, "@__pyptr__ should have PyCall::PyPtr object"
end
super
end
OPERATOR_METHOD_NAMES = {
:+ => :__add__,
:- => :__sub__,
:* => :__mul__,
:/ => :__truediv__,
:% => :__mod__,
:** => :__pow__,
:<< => :__lshift__,
:>> => :__rshift__,
:& => :__and__,
:^ => :__xor__,
:| => :__or__
}.freeze
def method_missing(name, *args)
name_str = name.to_s if name.kind_of?(Symbol)
name_str.chop! if name_str.end_with?('=')
case name
when *OPERATOR_METHOD_NAMES.keys
op_name = OPERATOR_METHOD_NAMES[name]
if LibPython::Helpers.hasattr?(__pyptr__, op_name)
LibPython::Helpers.define_wrapper_method(self, op_name)
singleton_class.__send__(:alias_method, name, op_name)
return self.__send__(name, *args)
end
else
if LibPython::Helpers.hasattr?(__pyptr__, name_str)
LibPython::Helpers.define_wrapper_method(self, name)
return self.__send__(name, *args)
end
end
super
end
def respond_to_missing?(name, include_private)
return true if LibPython::Helpers.hasattr?(__pyptr__, name)
super
end
def kind_of?(cls)
case cls
when PyTypeObjectWrapper
__pyptr__.kind_of?(cls.__pyptr__)
else
super
end
end
[:==, :!=, :<, :<=, :>, :>=].each do |op|
class_eval("#{<<-"begin;"}\n#{<<-"end;"}", __FILE__, __LINE__+1)
begin;
def #{op}(other)
case other
when PyObjectWrapper
LibPython::Helpers.compare(:#{op}, __pyptr__, other.__pyptr__)
else
other = Conversion.from_ruby(other)
LibPython::Helpers.compare(:#{op}, __pyptr__, other)
end
end
end;
end
def [](*key)
LibPython::Helpers.getitem(__pyptr__, key)
end
def []=(*key, value)
LibPython::Helpers.setitem(__pyptr__, key, value)
end
def call(*args)
LibPython::Helpers.call_object(__pyptr__, *args)
end
class SwappedOperationAdapter
def initialize(obj)
@obj = obj
end
attr_reader :obj
def +(other)
other.__radd__(self.obj)
end
def -(other)
other.__rsub__(self.obj)
end
def *(other)
other.__rmul__(self.obj)
end
def /(other)
other.__rtruediv__(self.obj)
end
def %(other)
other.__rmod__(self.obj)
end
def **(other)
other.__rpow__(self.obj)
end
def <<(other)
other.__rlshift__(self.obj)
end
def >>(other)
other.__rrshift__(self.obj)
end
def &(other)
other.__rand__(self.obj)
end
def ^(other)
other.__rxor__(self.obj)
end
def |(other)
other.__ror__(self.obj)
end
end
def coerce(other)
[SwappedOperationAdapter.new(other), self]
end
def dup
super.tap do |duped|
copied = PyCall.import_module('copy').copy(__pyptr__)
copied = copied.__pyptr__ if copied.kind_of? PyObjectWrapper
duped.instance_variable_set(:@__pyptr__, copied)
end
end
def inspect
PyCall.builtins.repr(__pyptr__)
end
def to_s
LibPython::Helpers.str(__pyptr__)
end
def to_i
LibPython::Helpers.call_object(PyCall::builtins.int.__pyptr__, __pyptr__)
end
def to_f
LibPython::Helpers.call_object(PyCall::builtins.float.__pyptr__, __pyptr__)
end
end
module_function
def check_ismodule(pyptr)
return if pyptr.kind_of? LibPython::API::PyModule_Type
raise TypeError, "PyModule object is required"
end
def check_isclass(pyptr)
pyptr = pyptr.__pyptr__ if pyptr.kind_of? PyObjectWrapper
return if pyptr.kind_of? LibPython::API::PyType_Type
return defined?(LibPython::API::PyClass_Type) && pyptr.kind_of?(LibPython::API::PyClass_Type)
raise TypeError, "PyType object is required"
end
end
| 23.189189 | 97 | 0.59697 |
ff447d24772554ca04886c4fac3c4de831505e2d | 461 | require 'set'
module BotManager
module Alexa
module Manifest
class PublishingOptions
attr_reader :distribution_countries
attr_accessor :is_available_worldwide, :testing_instructions, :category, :distribution_mode
def initialize
@distribution_countries = Set.new
end
def add_distribution_country country
@distribution_countries.add country
end
end
end
end
end | 16.464286 | 99 | 0.67462 |
ffcb5a0109121c561e55f870adee9c94e28705b3 | 1,647 | # encoding: utf-8
class AvatarUploader < CarrierWave::Uploader::Base
# Include RMagick or MiniMagick support:
# include CarrierWave::RMagick
include CarrierWave::MiniMagick
# Choose what kind of storage to use for this uploader:
storage :file
# storage :fog
# Override the directory where uploaded files will be stored.
# This is a sensible default for uploaders that are meant to be mounted:
def store_dir
"avatars/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
end
version :thumb do
process :resize_to_fill => [50, 50]
end
version :avatar do
process :resize_to_fill => [120, 120]
end
# Provide a default URL as a default if there hasn't been a file uploaded:
# def default_url
# # For Rails 3.1+ asset pipeline compatibility:
# # ActionController::Base.helpers.asset_path("fallback/" + [version_name, "default.png"].compact.join('_'))
#
# "/images/fallback/" + [version_name, "default.png"].compact.join('_')
# end
# Process files as they are uploaded:
# process :scale => [200, 300]
#
# def scale(width, height)
# # do something
# end
# Create different versions of your uploaded files:
# version :thumb do
# process :resize_to_fit => [50, 50]
# end
# Add a white list of extensions which are allowed to be uploaded.
# For images you might use something like this:
# def extension_white_list
# %w(jpg jpeg gif png)
# end
# Override the filename of the uploaded files:
# Avoid using model.id or version_name here, see uploader/store.rb for details.
# def filename
# "something.jpg" if original_filename
# end
end
| 27 | 112 | 0.690953 |
e9d81bd4bed332d597c43c0b898897236c4aa054 | 2,343 | class ProtectedController < ApplicationController
before_action :login_required
protected
def login_required
if logged_in?
@account = Account.find(session[:account_id])
true
else
store_location
flash[:error] = "You must be logged in to the see this page."
redirect_to login_accounts_path and return false
end
end
def logged_in?
session[:account_id] || false
end
def store_location
session[:return_to] = last_location
end
def last_location
request.get? ? request.url :
(request.env["HTTP_REFERER"] || request.env["REQUEST_URI"])
end
def redirect_back_or_default(default)
redirect_to(session[:return_to] || default)
session[:return_to] = nil
end
def is_arp_admin?
if @account && @account.arp_admin?
return true
else
flash[:error] = "You took a wrong turn at Albuquerque"
redirect_to login_accounts_path
return false
end
end
def is_arp_sub_admin?
if (@account && @account.arp_sub_admin?) ||
@account.arp_admin?
return true
else
flash[:error] = "You took a wrong turn at Albuquerque"
redirect_to login_accounts_path
return false
end
end
# For services
def instantiate_resources_of_service(service)
@resources = service.resources
@ip_blocks = service.ip_blocks.sort do |a, b|
a.version <=> b.version # IPv4 to appear before IPv6
end
end
def verify_otp
begin
otp = params[:otp]
raise ArgumentError unless otp
# Our identity
raise ArgumentError if otp[0..11] != $OTP_PREFIX
otp = Yubikey::OTP::Verify.new(otp)
return true if otp.valid?
raise ArgumentError
rescue
flash[:error] = "You took a wrong turn at Albuquerque"
redirect_to login_accounts_path
return false
end
end
# For dispatcher, originally from VM controller
def write_request(vm, action, other = nil)
ts = Time.new.to_i
File.open("tmp/requests/#{vm.uuid}-#{ts}", "w") do |f|
f.puts "#{action} #{vm.uuid} #{vm.host} #{other}"
end
end
def iso_files
begin
files = File.readlines("config/arp/iso-files.txt").map do |item|
item.strip
end
files.sort do |a,b|
a.downcase <=> b.downcase
end
rescue
[]
end
end
end
| 21.897196 | 70 | 0.645327 |
01a09fa861d934e1346ed16dd4f4375986fdfc65 | 813 | class ProgressBar < Struct.new :log, :file_name, :total_units, :completed_units
TOTAL_BAR_LENGTH = 40.freeze
def print
log.print(header + bar + progress_text)
end
private
def header
"\r #{file_name}#{padding_after_file_name}"
end
def padding_after_file_name
' ' * (24 - file_name.size)
end
def bar
"[#{''.ljust(bar_progress, '=')}#{''.ljust(bar_remainder, ' ')}]"
end
def bar_progress
(fractional_progress * TOTAL_BAR_LENGTH).to_i
end
def fractional_progress
completed_units.to_f / total_units.to_f
end
def bar_remainder
TOTAL_BAR_LENGTH - bar_progress
end
def progress_text
" #{percentage_progress} (#{completed_units} out of #{total_units}) "
end
def percentage_progress
(fractional_progress * 100).to_i.to_s + "%"
end
end
| 18.906977 | 79 | 0.686347 |
e85be4105d5bf1fe9ad94c600d9337998bfbcd17 | 2,110 | Pod::Spec.new do |s|
s.name = 'MWPhotoBrowser'
s.version = '2.1.3'
s.license = 'MIT'
s.summary = 'A simple iOS photo and video browser with optional grid view, captions and selections.'
s.description = <<-DESCRIPTION
MWPhotoBrowser can display one or more images or videos by providing either UIImage
objects, PHAsset objects, or URLs to library assets, web images/videos or local files.
The photo browser handles the downloading and caching of photos from the web seamlessly.
Photos can be zoomed and panned, and optional (customisable) captions can be displayed.
DESCRIPTION
s.screenshots = [
'https://raw.github.com/mwaterfall/MWPhotoBrowser/master/Screenshots/MWPhotoBrowser1.png',
'https://raw.github.com/mwaterfall/MWPhotoBrowser/master/Screenshots/MWPhotoBrowser2.png',
'https://raw.github.com/mwaterfall/MWPhotoBrowser/master/Screenshots/MWPhotoBrowser3.png',
'https://raw.github.com/mwaterfall/MWPhotoBrowser/master/Screenshots/MWPhotoBrowser4.png',
'https://raw.github.com/mwaterfall/MWPhotoBrowser/master/Screenshots/MWPhotoBrowser5.png',
'https://raw.github.com/mwaterfall/MWPhotoBrowser/master/Screenshots/MWPhotoBrowser6.png'
]
s.homepage = 'https://github.com/mwaterfall/MWPhotoBrowser'
s.author = { 'Michael Waterfall' => '[email protected]' }
s.social_media_url = 'https://twitter.com/mwaterfall'
s.source = {
:git => 'https://github.com/y500/MWPhotoBrowser',
:tag => '2.1.3'
}
s.platform = :ios, '7.0'
s.source_files = 'Pod/Classes/**/*'
s.resource_bundles = {
'MWPhotoBrowser' => ['Pod/Assets/*.png']
}
s.requires_arc = true
s.frameworks = 'ImageIO', 'QuartzCore', 'AssetsLibrary', 'MediaPlayer'
s.weak_frameworks = 'Photos'
s.dependency 'MBProgressHUD', '~> 1.0.0'
s.dependency 'DACircularProgress', '~> 2.3.1'
# SDWebImage
# 3.7.2 contains bugs downloading local files
# https://github.com/rs/SDWebImage/issues/1109
s.dependency 'SDWebImage', '~> 4.0.0'
s.dependency 'ASValueTrackingSlider'
end
| 42.2 | 106 | 0.699052 |
1d9557b5dc6755b0e9f82907da9458bb96329a09 | 151 | class AddUserIdToCategories < ActiveRecord::Migration[6.0]
def change
add_reference :categories, :user, null: false, foreign_key: true
end
end
| 25.166667 | 68 | 0.761589 |
215d2973b5703ac9442510943f660274f94b3d75 | 1,313 | =begin
#Selling Partner API for Catalog Items
#The Selling Partner API for Catalog Items helps you programmatically retrieve item details for items in the catalog.
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.26
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for AmzSpApi::CatalogItemsApiModel::DecimalWithUnits
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'DecimalWithUnits' do
before do
# run before each test
@instance = AmzSpApi::CatalogItemsApiModel::DecimalWithUnits.new
end
after do
# run after each test
end
describe 'test an instance of DecimalWithUnits' do
it 'should create an instance of DecimalWithUnits' do
expect(@instance).to be_instance_of(AmzSpApi::CatalogItemsApiModel::DecimalWithUnits)
end
end
describe 'test attribute "value"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "units"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 27.354167 | 117 | 0.751714 |
f80f4ce6cb4c5400320245d5d4834d8b1bdd6dbe | 826 | RSpec.feature "Session based flow navigation", flow_dir: :fixture do
scenario "User completes a flow" do
visit "/session-based/s"
choose "Response 1"
click_button "Continue"
fill_in "response", with: "Response"
click_button "Continue"
expect(page).to have_text("Results title")
end
scenario "User changes their answer to previous question" do
visit "/session-based/s"
choose "Response 1"
click_button "Continue"
click_on "Change"
expect(page).to have_text("Question 1 title")
end
scenario "User start the flow again" do
visit "/session-based/s"
choose "Response 1"
click_button "Continue"
fill_in "response", with: "Response"
click_button "Continue"
click_on "Start again"
expect(page).to have_text("This is a test flow")
end
end
| 21.179487 | 68 | 0.684019 |
e821a2b7df11adff60febe500515ed92ba5b1491 | 330 | name 'pw_base'
maintainer 'Christoph Lukas'
maintainer_email '[email protected]'
license 'All rights reserved'
description 'base cookbook to setup hosts file and name resolution'
long_description 'base cookbook to setup hosts file and name resolution'
version '0.2.0'
depends 'apt'
| 33 | 72 | 0.70303 |
79129255bdb0b2c8bfb4dd0e70254ff730b34b41 | 321 | require "time"
require "date"
require "json"
require "yaml"
require "tmpdir"
require "pathname"
REPO_ROOT = Pathname.new(File.expand_path("../../../", __FILE__)).freeze
require_relative "util"
require_relative "deploy_doc_test"
require_relative "travis"
require_relative "find_changes"
require_relative "test_spawner"
| 20.0625 | 72 | 0.778816 |
bf468c8ffeffc4254b8a7122e25c1d0df6d59389 | 1,064 | cask "gemini" do
version "2.7.1,357:1605619595"
sha256 "ce1fc36bed622a9419f7ecde0a3c671766bee87e8ec280c2e6c0c58700639aa3"
# dl.devmate.com/com.macpaw.site.Gemini was verified as official when first introduced to the cask
url "https://dl.devmate.com/com.macpaw.site.Gemini#{version.major}/#{version.after_comma.before_colon}/#{version.after_colon}/Gemini#{version.major}-#{version.after_comma.before_colon}.zip"
appcast "https://updates.devmate.com/com.macpaw.site.Gemini#{version.major}.xml"
name "Gemini"
desc "Disk space cleaner that finds and deletes duplicated and similar files"
homepage "https://macpaw.com/gemini"
app "Gemini #{version.major}.app"
zap trash: [
"~/Library/Application Support/Gemini*",
"~/Library/Caches/com.macpaw.site.Gemini*",
"~/Library/Cookies/com.macpaw.site.Gemini*.binarycookies",
"~/Library/Logs/com.macpaw.site.Gemini*",
"~/Library/Preferences/com.macpaw.site.Gemini*",
"~/Library/Saved Application State/com.macpaw.site.Gemini*",
"/Users/Shared/Gemini #{version.major}",
]
end
| 44.333333 | 191 | 0.739662 |
7a41fac58574d75a64b6334046f6921db3de672e | 155 | require File.expand_path('../../../spec_helper', __FILE__)
describe "MonitorMixin#mon_try_enter" do
it "needs to be reviewed for spec completeness"
end
| 25.833333 | 58 | 0.754839 |
f8416b0adc0932602f35b0b81c833e202402af09 | 43 | sprout_osx_apps_homebrew_cask "virtualbox"
| 21.5 | 42 | 0.906977 |
28874bc0f3ae565c89ea12b2101bcd4fdae4aac6 | 6,101 | #
# Author:: Adam Jacob (<[email protected]>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
class NoWhyrunDemonstrator < Chef::Provider
attr_reader :system_state_altered
def whyrun_supported?
false
end
def load_current_resource; end
def action_foo
@system_state_altered = true
end
end
class ConvergeActionDemonstrator < Chef::Provider
attr_reader :system_state_altered
def whyrun_supported?
true
end
def load_current_resource; end
def action_foo
converge_by("running a state changing action") do
@system_state_altered = true
end
end
end
class CheckResourceSemanticsDemonstrator < ConvergeActionDemonstrator
def check_resource_semantics!
raise Chef::Exceptions::InvalidResourceSpecification.new("check_resource_semantics!")
end
end
describe Chef::Provider do
before(:each) do
@cookbook_collection = Chef::CookbookCollection.new([])
@node = Chef::Node.new
@node.name "latte"
@events = Chef::EventDispatch::Dispatcher.new
@run_context = Chef::RunContext.new(@node, @cookbook_collection, @events)
@resource = Chef::Resource.new("funk", @run_context)
@resource.cookbook_name = "a_delicious_pie"
@provider = Chef::Provider.new(@resource, @run_context)
end
it "should mixin shell_out" do
expect(@provider.respond_to?(:shell_out)).to be true
end
it "should mixin shell_out!" do
expect(@provider.respond_to?(:shell_out!)).to be true
end
it "should store the resource passed to new as new_resource" do
expect(@provider.new_resource).to eql(@resource)
end
it "should store the node passed to new as node" do
expect(@provider.node).to eql(@node)
end
it "should have nil for current_resource by default" do
expect(@provider.current_resource).to eql(nil)
end
it "should support whyrun by default" do
expect(@provider.send(:whyrun_supported?)).to eql(true)
end
it "should do nothing for check_resource_semantics! by default" do
expect { @provider.check_resource_semantics! }.not_to raise_error
end
it "should return true for action_nothing" do
expect(@provider.action_nothing).to eql(true)
end
it "evals embedded recipes with a pristine resource collection" do
@provider.run_context.instance_variable_set(:@resource_collection, "doesn't matter what this is")
temporary_collection = nil
snitch = Proc.new { temporary_collection = @run_context.resource_collection }
@provider.send(:recipe_eval, &snitch)
expect(temporary_collection).to be_an_instance_of(Chef::ResourceCollection)
expect(@provider.run_context.instance_variable_get(:@resource_collection)).to eq("doesn't matter what this is")
end
it "does not re-load recipes when creating the temporary run context" do
expect_any_instance_of(Chef::RunContext).not_to receive(:load)
snitch = Proc.new { temporary_collection = @run_context.resource_collection }
@provider.send(:recipe_eval, &snitch)
end
context "when no converge actions are queued" do
before do
allow(@provider).to receive(:whyrun_supported?).and_return(true)
allow(@provider).to receive(:load_current_resource)
end
it "does not mark the new resource as updated" do
expect(@resource).not_to be_updated
expect(@resource).not_to be_updated_by_last_action
end
end
context "when converge actions have been added to the queue" do
describe "and provider supports whyrun mode" do
before do
@provider = ConvergeActionDemonstrator.new(@resource, @run_context)
end
it "should tell us that it does support whyrun" do
expect(@provider).to be_whyrun_supported
end
it "queues up converge actions" do
@provider.action_foo
expect(@provider.send(:converge_actions).actions.size).to eq(1)
end
it "executes pending converge actions to converge the system" do
@provider.run_action(:foo)
expect(@provider.instance_variable_get(:@system_state_altered)).to be_truthy
end
it "marks the resource as updated" do
@provider.run_action(:foo)
expect(@resource).to be_updated
expect(@resource).to be_updated_by_last_action
end
end
describe "and provider does not support whyrun mode" do
before do
Chef::Config[:why_run] = true
@provider = NoWhyrunDemonstrator.new(@resource, @run_context)
end
after do
Chef::Config[:why_run] = false
end
it "should tell us that it doesn't support whyrun" do
expect(@provider).not_to be_whyrun_supported
end
it "should automatically generate a converge_by block on the provider's behalf" do
@provider.run_action(:foo)
expect(@provider.send(:converge_actions).actions.size).to eq(0)
expect(@provider.system_state_altered).to be_falsey
end
it "should automatically execute the generated converge_by block" do
@provider.run_action(:foo)
expect(@provider.system_state_altered).to be_falsey
expect(@resource).not_to be_updated
expect(@resource).not_to be_updated_by_last_action
end
end
describe "and the resource is invalid" do
let(:provider) { CheckResourceSemanticsDemonstrator.new(@resource, @run_context) }
it "fails with InvalidResourceSpecification when run" do
expect { provider.run_action(:foo) }.to raise_error(Chef::Exceptions::InvalidResourceSpecification)
end
end
end
end
| 31.611399 | 115 | 0.720865 |
4aa4a2fa446b8c57e7e41ff7fc17be53b04fc618 | 157 | # frozen_string_literal: true
require 'moji_nested_hash/version'
require 'moji_nested_hash/hash/keys'
module MojiNestedHash
# Your code goes here...
end
| 17.444444 | 36 | 0.796178 |
1d1a8fe2ac1ac25f04b4c09762a44ec4777f3422 | 4,905 | Pod::Spec.new do |s|
s.name = 'FirebaseFirestore'
s.version = '8.3.0'
s.summary = 'Google Cloud Firestore'
s.description = <<-DESC
Google Cloud Firestore is a NoSQL document database built for automatic scaling, high performance, and ease of application development.
DESC
s.homepage = 'https://developers.google.com/'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-' + s.version.to_s
}
s.ios.deployment_target = '10.0'
s.osx.deployment_target = '10.12'
s.tvos.deployment_target = '10.0'
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
# Header files that constitute the interface to this module. Only Objective-C
# headers belong here, since FirebaseFirestore is primarily an Objective-C
# framework.
s.public_header_files = 'Firestore/Source/Public/FirebaseFirestore/*.h'
# source_files contains most of the header and source files for the project.
# This includes files named in `public_header_files`.
#
# Each header in this list must be globally unique, even within customer
# projects. This generally means that only Objective-C Headers with a `FIR`
# or `FST` prefix can be in `source_files`. Non-public C++ headers that have
# no filename prefix must be in `preserve_paths`. See
# https://github.com/firebase/firebase-ios-sdk/issues/4035 for more details.
#
# Note: headers from FirebaseCore can be in this list because while they're
# not globally unique, each copy will be the same. It doesn't matter which
# version wins in the global header map. The benefit of keeping them here is
# that "quick open" by filename in Xcode will continue to work.
s.source_files = [
'FirebaseCore/Sources/Private/*.h',
'Firestore/Source/Public/FirebaseFirestore/*.h',
'Firestore/Source/**/*.{m,mm}',
'Firestore/Protos/nanopb/**/*.cc',
'Firestore/core/include/**/*.{cc,mm}',
'Firestore/core/src/**/*.{cc,mm}',
'Interop/Auth/Public/*.h',
]
# Internal headers that aren't necessarily globally unique. Most C++ internal
# headers should be here to avoid polluting the global header map with
# unprefixed filenames.
#
# These filenames won't be available in Xcode's "quick open" but the types
# inside these files will be available.
s.preserve_paths = [
'Firestore/Source/API/*.h',
'Firestore/Source/Core/*.h',
'Firestore/Source/Local/*.h',
'Firestore/Source/Remote/*.h',
'Firestore/Source/Util/*.h',
'Firestore/Protos/nanopb/**/*.h',
'Firestore/core/include/**/*.h',
'Firestore/core/src/**/*.h',
'Firestore/third_party/nlohmann_json/json.hpp',
]
s.requires_arc = [
'Firestore/Source/**/*',
'Firestore/core/src/**/*.mm',
]
# Exclude alternate implementations for other platforms. These types depend
# upon link-time substitution, and there's no provision within CocoaPods for
# selecting files dynamically.
s.exclude_files = [
'Firestore/core/src/api/input_validation_std.cc',
'Firestore/core/src/remote/connectivity_monitor_noop.cc',
'Firestore/core/src/util/filesystem_win.cc',
'Firestore/core/src/util/hard_assert_stdio.cc',
'Firestore/core/src/util/log_stdio.cc',
'Firestore/core/src/util/secure_random_openssl.cc'
]
s.dependency 'FirebaseCore', '~> 8.0'
abseil_version = '0.20200225.0'
s.dependency 'abseil/algorithm', abseil_version
s.dependency 'abseil/base', abseil_version
s.dependency 'abseil/memory', abseil_version
s.dependency 'abseil/meta', abseil_version
s.dependency 'abseil/strings/strings', abseil_version
s.dependency 'abseil/time', abseil_version
s.dependency 'abseil/types', abseil_version
s.dependency 'gRPC-C++', '~> 1.28.0'
s.dependency 'leveldb-library', '~> 1.22'
s.dependency 'nanopb', '~> 2.30908.0'
s.ios.frameworks = 'SystemConfiguration', 'UIKit'
s.osx.frameworks = 'SystemConfiguration'
s.tvos.frameworks = 'SystemConfiguration', 'UIKit'
s.library = 'c++'
s.pod_target_xcconfig = {
'CLANG_CXX_LANGUAGE_STANDARD' => 'c++0x',
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
"FIRFirestore_VERSION=#{s.version} " +
# The nanopb pod sets these defs, so we must too. (We *do* require 16bit
# (or larger) fields, so we'd have to set at least PB_FIELD_16BIT
# anyways.)
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' =>
'"${PODS_TARGET_SRCROOT}" ' +
'"${PODS_TARGET_SRCROOT}/Firestore/Source/Public/FirebaseFirestore" ' +
'"${PODS_ROOT}/nanopb" ' +
'"${PODS_TARGET_SRCROOT}/Firestore/Protos/nanopb"'
}
s.compiler_flags = '$(inherited) -Wreorder -Werror=reorder -Wno-comma'
end
| 38.928571 | 135 | 0.683384 |
7a0c1aa93c1be76aab5fe2f58688d21f1367189a | 306 | # card_href is the stored href for the Card
# order_href is the stored href for the Order
card = Balanced::Card.fetch(card_href)
card.debit(
:amount => 5000,
:appears_on_statement_as => 'Statement text',
:description => 'Some descriptive text for the debit in the dashboard',
:order => order_href
) | 34 | 73 | 0.735294 |
3941bc6bf5c929636fed54fef50c0e7d53440a25 | 415 | RSpec.describe "Relaton Extract" do
describe "relaton extract" do
it "sends extract message to the extractor" do
allow(Relaton::Cli::RelatonFile).to receive(:extract)
command = %w(extract spec/fixtures ./tmp -x rxml)
Relaton::Cli.start(command)
expect(Relaton::Cli::RelatonFile).to have_received(:extract).
with("spec/fixtures", "./tmp", extension: "rxml")
end
end
end
| 29.642857 | 67 | 0.672289 |
911d56139d18fc7babad78688834f6e5a17fdca7 | 862 | module SessionsHelper
def log_in(user)
session[:user_id] = user.id
end
def current_user
if (user_id = session[:user_id])
@current_user ||= User.find_by(id: user_id)
elsif (user_id = cookies.encrypted[:user_id])
user = User.find_by(id: user_id)
if user && user.authenticated?(cookies[:remember_token])
log_in user
@current_user = user
end
end
end
def logged_in?
!current_user.nil?
end
def log_out
forget(current_user)
session.delete(:user_id)
@current_user = nil
end
def remember(user)
user.remember
cookies.permanent.encrypted[:user_id] = user.id
cookies.permanent[:remember_token] = user.remember_token
end
# Forgets a persistent session.
def forget(user)
user.forget
cookies.delete(:user_id)
cookies.delete(:remember_token)
end
end
| 20.52381 | 62 | 0.669374 |
b91afd94ec93e7b5fdf734fd29f9149180d8b221 | 3,874 | include InstructorInterfaceHelperSpec
def create_assignment_questionnaire survey_name
visit '/questionnaires/new?model=Assignment+SurveyQuestionnaire&private=0'
fill_in 'questionnaire_name', with: survey_name
find('input[name="commit"]').click
end
def deploy_survey(start_date, end_date, survey_name)
login_as('instructor6')
expect(page).to have_content('Manage content')
create_assignment_questionnaire survey_name
survey = Questionnaire.where(name: survey_name)
instructor = User.where(name: 'instructor6').first
assignment = Assignment.where(instructor_id: instructor.id).first
visit '/survey_deployment/new?id=' + assignment.id.to_s + '&type=AssignmentSurveyDeployment'
expect(page).to have_content('New Survey Deployment')
fill_in 'survey_deployment_start_date', with: start_date
fill_in 'survey_deployment_end_date', with: end_date
select survey.name, from: "survey_deployment_questionnaire_id"
find('input[name="commit"]').click
end
describe "Survey questionnaire tests for instructor interface" do
before(:each) do
assignment_setup
@previous_day = (Time.now.getlocal - 1 * 86_400).strftime("%Y-%m-%d %H:%M:%S")
@next_day = (Time.now.getlocal + 1 * 86_400).strftime("%Y-%m-%d %H:%M:%S")
@next_to_next_day = (Time.now.getlocal + 2 * 86_400).strftime("%Y-%m-%d %H:%M:%S")
end
it "is able to create a survey" do
login_as('instructor6')
survey_name = "Survey Questionnaire 1"
create_assignment_questionnaire survey_name
expect(Questionnaire.where(name: survey_name)).to exist
end
it "is able to deploy a survey with valid dates" do
survey_name = 'Survey Questionnaire 1'
# passing current time + 1 day for start date and current time + 2 days for end date
deploy_survey(@next_day, @next_to_next_day, survey_name)
expect(page).to have_content(survey_name)
end
it "is not able to deploy a survey with invalid dates" do
survey_name = 'Survey Questionnaire 1'
# passing current time - 1 day for start date and current time + 2 days for end date
deploy_survey(@previous_day, @next_day, survey_name)
expect(page).to have_content(survey_name)
end
it "is able to view statistics of a survey" do
survey_name = 'Survey Questionnaire 1'
deploy_survey(@next_day, @next_to_next_day, survey_name)
survey_questionnaire_1 = Questionnaire.where(name: survey_name).first
# adding some questions for the deployed survey
visit '/questionnaires/' + survey_questionnaire_1.id.to_s + '/edit'
fill_in('question_total_num', with: '1')
select('Criterion', from: 'question_type')
click_button "Add"
expect(page).to have_content('Remove')
fill_in "Edit question content here", with: "Test question 1"
click_button "Save assignment survey questionnaire"
expect(page).to have_content('All questions have been successfully saved!')
survey_deployment = SurveyDeployment.where(questionnaire_id: survey_questionnaire_1.id).first
question = Question.find_by_sql("select * from questions where questionnaire_id = " + survey_questionnaire_1.id.to_s +
" and (type = 'Criterion' OR type = 'Checkbox')")
visit '/survey_deployment/generate_statistics/' + survey_deployment.id.to_s
question.each do |q|
expect(page).to have_content(q.txt)
end
expect(page).to have_content("No responses for this question")
end
it "is able to view responses of a survey" do
survey_name = 'Survey Questionnaire 1'
deploy_survey(@next_day, @next_to_next_day, survey_name)
survey_questionnaire_1 = Questionnaire.where(name: survey_name).first
survey_deployment = SurveyDeployment.where(questionnaire_id: survey_questionnaire_1.id).first
# after adding a response:
visit '/survey_deployment/view_responses/' + survey_deployment.id.to_s
expect(page).to have_content(survey_name)
end
end
| 40.778947 | 122 | 0.744966 |
1c3225b21ee85c3e1c98bf1c21f4ccc1852c0ea7 | 284 | class CreateDanvanthiriCoreQuotations < ActiveRecord::Migration
def change
create_table :danvanthiri_core_quotations do |t|
t.integer :owner_id
t.string :owner_type
t.integer :pharmacy_id
t.string :photo
t.timestamps null: false
end
end
end
| 21.846154 | 63 | 0.707746 |
e86e5e924625cbcb1f8acd460c96b196d9dd7a02 | 159 | #!/usr/bin/ruby
=begin
Cette version se contente de recuperer le nom de l'utilisateur et de l'afficher directement
=end
user_name = gets.chomp
puts user_name | 22.714286 | 92 | 0.786164 |
6a264e733b4de8ac95b010f04e3842e164b6edb8 | 3,419 | require 'test_helper'
class UserTest < ActiveSupport::TestCase
def setup
@user = User.new(name: "Example User", email: "[email protected]", password: "foobar", password_confirmation: "foobar")
end
# 有効性に対するテスト
test "should be valid" do
assert @user.valid?
end
# 存在性の検証に対するテスト
test "name should be present" do
@user.name = " "
assert_not @user.valid?
end
test "email should be present" do
@user.email = " "
assert_not @user.valid?
end
# 長さの検証に対するテスト
test "name should not be too long" do
@user.name = "a" * 51
assert_not @user.valid?
end
test "email should not be too long" do
@user.email = "a" * 244 + "@example.com"
assert_not @user.valid?
end
# 有効なメールフォーマットをテスト
test "email validation should accept valid addresses" do
valid_addresses = %w[[email protected] [email protected] [email protected] [email protected] [email protected]]
valid_addresses.each do |valid_address|
@user.email = valid_address
assert @user.valid?, "#{valid_address.inspect} should be valid"
end
end
test "email validation should reject invalid addresses" do
invalid_addresses = %w[user@example,com user_at_foo.org user.name@example. foo@bar_baz.com foo@bar+baz.com [email protected]]
invalid_addresses.each do |invalid_address|
@user.email = invalid_address
assert_not @user.valid?, "#{invalid_address.inspect} should be invalid"
end
end
# 重複するメールアドレス拒否のテスト
test "email addresses should be unique" do
duplicate_user = @user.dup # 属性がおなじuserを複製する
duplicate_user.email = @user.email.upcase # メールアドレスでは大文字小文字が区別されないから
@user.save
assert_not duplicate_user.valid?
end
# メールアドレスの小文字化に対するテスト
test "email addresses should be saved as lower-case" do
mixed_case_email = "[email protected]"
@user.email = mixed_case_email
@user.save
assert_equal mixed_case_email.downcase, @user.reload.email
end
test "password should be present (nonblank)" do
@user.password = @user.password_confirmation = " " * 6
assert_not @user.valid?
end
test "password should have a minimum length" do
@user.password = @user.password_confirmation = "a" * 5
assert_not @user.valid?
end
test "authenticated? should return false for a user with nil digest" do
assert_not @user.authenticated?(:remember, '')
end
test "associated microposts should be destroyed" do
@user.save
@user.microposts.create!(content: "Lorem ipsum")
assert_difference 'Micropost.count', -1 do
@user.destroy
end
end
test "should follow and unfollow a user" do
michael = users(:michael)
archer = users(:archer)
assert_not michael.following?(archer)
michael.follow(archer)
assert michael.following?(archer)
assert archer.followers.include?(michael)
michael.unfollow(archer)
assert_not michael.following?(archer)
end
test "feed should have the right posts" do
michael = users(:michael)
archer = users(:archer)
lana = users(:lana)
# フォローしているユーザーの投稿を確認
lana.microposts.each do |post_following|
assert michael.feed.include?(post_following)
end
# 自分自身の投稿を確認
michael.microposts.each do |post_self|
assert michael.feed.include?(post_self)
end
# フォローしていないユーザーの投稿を確認
archer.microposts.each do |post_unfollowed|
assert_not michael.feed.include?(post_unfollowed)
end
end
end
| 28.491667 | 124 | 0.697865 |
1c842fc30764dba6058b01c6df0f7b587e00c9f7 | 28,069 | require "#{File.dirname(__FILE__)}/../abstract_unit"
silence_warnings do
Post = Struct.new(:title, :author_name, :body, :secret, :written_on, :cost)
Post.class_eval do
alias_method :title_before_type_cast, :title unless respond_to?(:title_before_type_cast)
alias_method :body_before_type_cast, :body unless respond_to?(:body_before_type_cast)
alias_method :author_name_before_type_cast, :author_name unless respond_to?(:author_name_before_type_cast)
def new_record=(boolean)
@new_record = boolean
end
def new_record?
@new_record
end
end
class Comment
attr_reader :id
attr_reader :post_id
def save; @id = 1; @post_id = 1 end
def new_record?; @id.nil? end
def name
@id.nil? ? 'new comment' : "comment ##{@id}"
end
end
end
class Comment::Nested < Comment; end
class FormHelperTest < Test::Unit::TestCase
include ActionView::Helpers::FormHelper
include ActionView::Helpers::FormTagHelper
include ActionView::Helpers::UrlHelper
include ActionView::Helpers::TagHelper
include ActionView::Helpers::TextHelper
include ActionView::Helpers::ActiveRecordHelper
include ActionView::Helpers::RecordIdentificationHelper
include ActionController::PolymorphicRoutes
def setup
@post = Post.new
@comment = Comment.new
def @post.errors()
Class.new{
def on(field); "can't be empty" if field == "author_name"; end
def empty?() false end
def count() 1 end
def full_messages() [ "Author name can't be empty" ] end
}.new
end
def @post.id; 123; end
def @post.id_before_type_cast; 123; end
@post.title = "Hello World"
@post.author_name = ""
@post.body = "Back to the hill and over it again!"
@post.secret = 1
@post.written_on = Date.new(2004, 6, 15)
@controller = Class.new do
attr_reader :url_for_options
def url_for(options)
@url_for_options = options
"http://www.example.com"
end
end
@controller = @controller.new
end
def test_label
assert_dom_equal('<label for="post_title">Title</label>', label("post", "title"))
assert_dom_equal('<label for="post_title">The title goes here</label>', label("post", "title", "The title goes here"))
assert_dom_equal(
'<label class="title_label" for="post_title">Title</label>',
label("post", "title", nil, :class => 'title_label')
)
end
def test_label_with_symbols
assert_dom_equal('<label for="post_title">Title</label>', label(:post, :title))
end
def test_text_field
assert_dom_equal(
'<input id="post_title" name="post[title]" size="30" type="text" value="Hello World" />', text_field("post", "title")
)
assert_dom_equal(
'<input id="post_title" name="post[title]" size="30" type="password" value="Hello World" />', password_field("post", "title")
)
assert_dom_equal(
'<input id="person_name" name="person[name]" size="30" type="password" />', password_field("person", "name")
)
end
def test_text_field_with_escapes
@post.title = "<b>Hello World</b>"
assert_dom_equal(
'<input id="post_title" name="post[title]" size="30" type="text" value="<b>Hello World</b>" />', text_field("post", "title")
)
end
def test_text_field_with_options
expected = '<input id="post_title" name="post[title]" size="35" type="text" value="Hello World" />'
assert_dom_equal expected, text_field("post", "title", "size" => 35)
assert_dom_equal expected, text_field("post", "title", :size => 35)
end
def test_text_field_assuming_size
expected = '<input id="post_title" maxlength="35" name="post[title]" size="35" type="text" value="Hello World" />'
assert_dom_equal expected, text_field("post", "title", "maxlength" => 35)
assert_dom_equal expected, text_field("post", "title", :maxlength => 35)
end
def test_text_field_removing_size
expected = '<input id="post_title" maxlength="35" name="post[title]" type="text" value="Hello World" />'
assert_dom_equal expected, text_field("post", "title", "maxlength" => 35, "size" => nil)
assert_dom_equal expected, text_field("post", "title", :maxlength => 35, :size => nil)
end
def test_text_field_doesnt_change_param_values
object_name = 'post[]'
expected = '<input id="post_123_title" name="post[123][title]" size="30" type="text" value="Hello World" />'
assert_equal expected, text_field(object_name, "title")
assert_equal object_name, "post[]"
end
def test_hidden_field
assert_dom_equal '<input id="post_title" name="post[title]" type="hidden" value="Hello World" />',
hidden_field("post", "title")
end
def test_hidden_field_with_escapes
@post.title = "<b>Hello World</b>"
assert_dom_equal '<input id="post_title" name="post[title]" type="hidden" value="<b>Hello World</b>" />',
hidden_field("post", "title")
end
def test_text_field_with_options
assert_dom_equal '<input id="post_title" name="post[title]" type="hidden" value="Something Else" />',
hidden_field("post", "title", :value => "Something Else")
end
def test_check_box
assert_dom_equal(
'<input checked="checked" id="post_secret" name="post[secret]" type="checkbox" value="1" /><input name="post[secret]" type="hidden" value="0" />',
check_box("post", "secret")
)
@post.secret = 0
assert_dom_equal(
'<input id="post_secret" name="post[secret]" type="checkbox" value="1" /><input name="post[secret]" type="hidden" value="0" />',
check_box("post", "secret")
)
assert_dom_equal(
'<input checked="checked" id="post_secret" name="post[secret]" type="checkbox" value="1" /><input name="post[secret]" type="hidden" value="0" />',
check_box("post", "secret" ,{"checked"=>"checked"})
)
@post.secret = true
assert_dom_equal(
'<input checked="checked" id="post_secret" name="post[secret]" type="checkbox" value="1" /><input name="post[secret]" type="hidden" value="0" />',
check_box("post", "secret")
)
end
def test_check_box_with_explicit_checked_and_unchecked_values
@post.secret = "on"
assert_dom_equal(
'<input checked="checked" id="post_secret" name="post[secret]" type="checkbox" value="on" /><input name="post[secret]" type="hidden" value="off" />',
check_box("post", "secret", {}, "on", "off")
)
end
def test_radio_button
assert_dom_equal('<input checked="checked" id="post_title_hello_world" name="post[title]" type="radio" value="Hello World" />',
radio_button("post", "title", "Hello World")
)
assert_dom_equal('<input id="post_title_goodbye_world" name="post[title]" type="radio" value="Goodbye World" />',
radio_button("post", "title", "Goodbye World")
)
end
def test_radio_button_is_checked_with_integers
assert_dom_equal('<input checked="checked" id="post_secret_1" name="post[secret]" type="radio" value="1" />',
radio_button("post", "secret", "1")
)
end
def test_radio_button_respects_passed_in_id
assert_dom_equal('<input checked="checked" id="foo" name="post[secret]" type="radio" value="1" />',
radio_button("post", "secret", "1", :id=>"foo")
)
end
def test_text_area
assert_dom_equal(
'<textarea cols="40" id="post_body" name="post[body]" rows="20">Back to the hill and over it again!</textarea>',
text_area("post", "body")
)
end
def test_text_area_with_escapes
@post.body = "Back to <i>the</i> hill and over it again!"
assert_dom_equal(
'<textarea cols="40" id="post_body" name="post[body]" rows="20">Back to <i>the</i> hill and over it again!</textarea>',
text_area("post", "body")
)
end
def test_text_area_with_alternate_value
assert_dom_equal(
'<textarea cols="40" id="post_body" name="post[body]" rows="20">Testing alternate values.</textarea>',
text_area("post", "body", :value => 'Testing alternate values.')
)
end
def test_text_area_with_size_option
assert_dom_equal(
'<textarea cols="183" id="post_body" name="post[body]" rows="820">Back to the hill and over it again!</textarea>',
text_area("post", "body", :size => "183x820")
)
end
def test_explicit_name
assert_dom_equal(
'<input id="post_title" name="dont guess" size="30" type="text" value="Hello World" />', text_field("post", "title", "name" => "dont guess")
)
assert_dom_equal(
'<textarea cols="40" id="post_body" name="really!" rows="20">Back to the hill and over it again!</textarea>',
text_area("post", "body", "name" => "really!")
)
assert_dom_equal(
'<input checked="checked" id="post_secret" name="i mean it" type="checkbox" value="1" /><input name="i mean it" type="hidden" value="0" />',
check_box("post", "secret", "name" => "i mean it")
)
assert_dom_equal text_field("post", "title", "name" => "dont guess"),
text_field("post", "title", :name => "dont guess")
assert_dom_equal text_area("post", "body", "name" => "really!"),
text_area("post", "body", :name => "really!")
assert_dom_equal check_box("post", "secret", "name" => "i mean it"),
check_box("post", "secret", :name => "i mean it")
end
def test_explicit_id
assert_dom_equal(
'<input id="dont guess" name="post[title]" size="30" type="text" value="Hello World" />', text_field("post", "title", "id" => "dont guess")
)
assert_dom_equal(
'<textarea cols="40" id="really!" name="post[body]" rows="20">Back to the hill and over it again!</textarea>',
text_area("post", "body", "id" => "really!")
)
assert_dom_equal(
'<input checked="checked" id="i mean it" name="post[secret]" type="checkbox" value="1" /><input name="post[secret]" type="hidden" value="0" />',
check_box("post", "secret", "id" => "i mean it")
)
assert_dom_equal text_field("post", "title", "id" => "dont guess"),
text_field("post", "title", :id => "dont guess")
assert_dom_equal text_area("post", "body", "id" => "really!"),
text_area("post", "body", :id => "really!")
assert_dom_equal check_box("post", "secret", "id" => "i mean it"),
check_box("post", "secret", :id => "i mean it")
end
def test_auto_index
pid = @post.id
assert_dom_equal(
"<input id=\"post_#{pid}_title\" name=\"post[#{pid}][title]\" size=\"30\" type=\"text\" value=\"Hello World\" />", text_field("post[]","title")
)
assert_dom_equal(
"<textarea cols=\"40\" id=\"post_#{pid}_body\" name=\"post[#{pid}][body]\" rows=\"20\">Back to the hill and over it again!</textarea>",
text_area("post[]", "body")
)
assert_dom_equal(
"<input checked=\"checked\" id=\"post_#{pid}_secret\" name=\"post[#{pid}][secret]\" type=\"checkbox\" value=\"1\" /><input name=\"post[#{pid}][secret]\" type=\"hidden\" value=\"0\" />",
check_box("post[]", "secret")
)
assert_dom_equal(
"<input checked=\"checked\" id=\"post_#{pid}_title_hello_world\" name=\"post[#{pid}][title]\" type=\"radio\" value=\"Hello World\" />",
radio_button("post[]", "title", "Hello World")
)
assert_dom_equal("<input id=\"post_#{pid}_title_goodbye_world\" name=\"post[#{pid}][title]\" type=\"radio\" value=\"Goodbye World\" />",
radio_button("post[]", "title", "Goodbye World")
)
end
def test_form_for
_erbout = ''
form_for(:post, @post, :html => { :id => 'create-post' }) do |f|
_erbout.concat f.label(:title)
_erbout.concat f.text_field(:title)
_erbout.concat f.text_area(:body)
_erbout.concat f.check_box(:secret)
_erbout.concat f.submit('Create post')
end
expected =
"<form action='http://www.example.com' id='create-post' method='post'>" +
"<label for='post_title'>Title</label>" +
"<input name='post[title]' size='30' type='text' id='post_title' value='Hello World' />" +
"<textarea name='post[body]' id='post_body' rows='20' cols='40'>Back to the hill and over it again!</textarea>" +
"<input name='post[secret]' checked='checked' type='checkbox' id='post_secret' value='1' />" +
"<input name='post[secret]' type='hidden' value='0' />" +
"<input name='commit' id='post_submit' type='submit' value='Create post' />" +
"</form>"
assert_dom_equal expected, _erbout
end
def test_form_for_with_method
_erbout = ''
form_for(:post, @post, :html => { :id => 'create-post', :method => :put }) do |f|
_erbout.concat f.text_field(:title)
_erbout.concat f.text_area(:body)
_erbout.concat f.check_box(:secret)
end
expected =
"<form action='http://www.example.com' id='create-post' method='post'>" +
"<div style='margin:0;padding:0'><input name='_method' type='hidden' value='put' /></div>" +
"<input name='post[title]' size='30' type='text' id='post_title' value='Hello World' />" +
"<textarea name='post[body]' id='post_body' rows='20' cols='40'>Back to the hill and over it again!</textarea>" +
"<input name='post[secret]' checked='checked' type='checkbox' id='post_secret' value='1' />" +
"<input name='post[secret]' type='hidden' value='0' />" +
"</form>"
assert_dom_equal expected, _erbout
end
def test_form_for_without_object
_erbout = ''
form_for(:post, :html => { :id => 'create-post' }) do |f|
_erbout.concat f.text_field(:title)
_erbout.concat f.text_area(:body)
_erbout.concat f.check_box(:secret)
end
expected =
"<form action='http://www.example.com' id='create-post' method='post'>" +
"<input name='post[title]' size='30' type='text' id='post_title' value='Hello World' />" +
"<textarea name='post[body]' id='post_body' rows='20' cols='40'>Back to the hill and over it again!</textarea>" +
"<input name='post[secret]' checked='checked' type='checkbox' id='post_secret' value='1' />" +
"<input name='post[secret]' type='hidden' value='0' />" +
"</form>"
assert_dom_equal expected, _erbout
end
def test_form_for_with_index
_erbout = ''
form_for("post[]", @post) do |f|
_erbout.concat f.text_field(:title)
_erbout.concat f.text_area(:body)
_erbout.concat f.check_box(:secret)
end
expected =
"<form action='http://www.example.com' method='post'>" +
"<input name='post[123][title]' size='30' type='text' id='post_123_title' value='Hello World' />" +
"<textarea name='post[123][body]' id='post_123_body' rows='20' cols='40'>Back to the hill and over it again!</textarea>" +
"<input name='post[123][secret]' checked='checked' type='checkbox' id='post_123_secret' value='1' />" +
"<input name='post[123][secret]' type='hidden' value='0' />" +
"</form>"
assert_dom_equal expected, _erbout
end
def test_nested_fields_for
_erbout = ''
form_for(:post, @post) do |f|
f.fields_for(:comment, @post) do |c|
_erbout.concat c.text_field(:title)
end
end
expected = "<form action='http://www.example.com' method='post'>" +
"<input name='post[comment][title]' size='30' type='text' id='post_comment_title' value='Hello World' />" +
"</form>"
assert_dom_equal expected, _erbout
end
def test_fields_for
_erbout = ''
fields_for(:post, @post) do |f|
_erbout.concat f.text_field(:title)
_erbout.concat f.text_area(:body)
_erbout.concat f.check_box(:secret)
end
expected =
"<input name='post[title]' size='30' type='text' id='post_title' value='Hello World' />" +
"<textarea name='post[body]' id='post_body' rows='20' cols='40'>Back to the hill and over it again!</textarea>" +
"<input name='post[secret]' checked='checked' type='checkbox' id='post_secret' value='1' />" +
"<input name='post[secret]' type='hidden' value='0' />"
assert_dom_equal expected, _erbout
end
def test_fields_for_without_object
_erbout = ''
fields_for(:post) do |f|
_erbout.concat f.text_field(:title)
_erbout.concat f.text_area(:body)
_erbout.concat f.check_box(:secret)
end
expected =
"<input name='post[title]' size='30' type='text' id='post_title' value='Hello World' />" +
"<textarea name='post[body]' id='post_body' rows='20' cols='40'>Back to the hill and over it again!</textarea>" +
"<input name='post[secret]' checked='checked' type='checkbox' id='post_secret' value='1' />" +
"<input name='post[secret]' type='hidden' value='0' />"
assert_dom_equal expected, _erbout
end
def test_fields_for_object_with_bracketed_name
_erbout = ''
fields_for("author[post]", @post) do |f|
_erbout.concat f.text_field(:title)
end
assert_dom_equal "<input name='author[post][title]' size='30' type='text' id='author_post_title' value='Hello World' />",
_erbout
end
def test_form_builder_does_not_have_form_for_method
assert ! ActionView::Helpers::FormBuilder.instance_methods.include?('form_for')
end
def test_form_for_and_fields_for
_erbout = ''
form_for(:post, @post, :html => { :id => 'create-post' }) do |post_form|
_erbout.concat post_form.text_field(:title)
_erbout.concat post_form.text_area(:body)
fields_for(:parent_post, @post) do |parent_fields|
_erbout.concat parent_fields.check_box(:secret)
end
end
expected =
"<form action='http://www.example.com' id='create-post' method='post'>" +
"<input name='post[title]' size='30' type='text' id='post_title' value='Hello World' />" +
"<textarea name='post[body]' id='post_body' rows='20' cols='40'>Back to the hill and over it again!</textarea>" +
"<input name='parent_post[secret]' checked='checked' type='checkbox' id='parent_post_secret' value='1' />" +
"<input name='parent_post[secret]' type='hidden' value='0' />" +
"</form>"
assert_dom_equal expected, _erbout
end
class LabelledFormBuilder < ActionView::Helpers::FormBuilder
(field_helpers - %w(hidden_field)).each do |selector|
src = <<-END_SRC
def #{selector}(field, *args, &proc)
"<label for='\#{field}'>\#{field.to_s.humanize}:</label> " + super + "<br/>"
end
END_SRC
class_eval src, __FILE__, __LINE__
end
end
def test_form_for_with_labelled_builder
_erbout = ''
form_for(:post, @post, :builder => LabelledFormBuilder) do |f|
_erbout.concat f.text_field(:title)
_erbout.concat f.text_area(:body)
_erbout.concat f.check_box(:secret)
end
expected =
"<form action='http://www.example.com' method='post'>" +
"<label for='title'>Title:</label> <input name='post[title]' size='30' type='text' id='post_title' value='Hello World' /><br/>" +
"<label for='body'>Body:</label> <textarea name='post[body]' id='post_body' rows='20' cols='40'>Back to the hill and over it again!</textarea><br/>" +
"<label for='secret'>Secret:</label> <input name='post[secret]' checked='checked' type='checkbox' id='post_secret' value='1' />" +
"<input name='post[secret]' type='hidden' value='0' /><br/>" +
"</form>"
assert_dom_equal expected, _erbout
end
def test_default_form_builder
old_default_form_builder, ActionView::Base.default_form_builder =
ActionView::Base.default_form_builder, LabelledFormBuilder
_erbout = ''
form_for(:post, @post) do |f|
_erbout.concat f.text_field(:title)
_erbout.concat f.text_area(:body)
_erbout.concat f.check_box(:secret)
end
expected =
"<form action='http://www.example.com' method='post'>" +
"<label for='title'>Title:</label> <input name='post[title]' size='30' type='text' id='post_title' value='Hello World' /><br/>" +
"<label for='body'>Body:</label> <textarea name='post[body]' id='post_body' rows='20' cols='40'>Back to the hill and over it again!</textarea><br/>" +
"<label for='secret'>Secret:</label> <input name='post[secret]' checked='checked' type='checkbox' id='post_secret' value='1' />" +
"<input name='post[secret]' type='hidden' value='0' /><br/>" +
"</form>"
assert_dom_equal expected, _erbout
ensure
ActionView::Base.default_form_builder = old_default_form_builder
end
def test_default_form_builder_with_active_record_helpers
_erbout = ''
form_for(:post, @post) do |f|
_erbout.concat f.error_message_on('author_name')
_erbout.concat f.error_messages
end
expected = %(<form action='http://www.example.com' method='post'>) +
%(<div class='formError'>can't be empty</div>) +
%(<div class="errorExplanation" id="errorExplanation"><h2>1 error prohibited this post from being saved</h2><p>There were problems with the following fields:</p><ul><li>Author name can't be empty</li></ul></div>) +
%(</form>)
assert_dom_equal expected, _erbout
end
# Perhaps this test should be moved to prototype helper tests.
def test_remote_form_for_with_labelled_builder
self.extend ActionView::Helpers::PrototypeHelper
_erbout = ''
remote_form_for(:post, @post, :builder => LabelledFormBuilder) do |f|
_erbout.concat f.text_field(:title)
_erbout.concat f.text_area(:body)
_erbout.concat f.check_box(:secret)
end
expected =
%(<form action="http://www.example.com" onsubmit="new Ajax.Request('http://www.example.com', {asynchronous:true, evalScripts:true, parameters:Form.serialize(this)}); return false;" method="post">) +
"<label for='title'>Title:</label> <input name='post[title]' size='30' type='text' id='post_title' value='Hello World' /><br/>" +
"<label for='body'>Body:</label> <textarea name='post[body]' id='post_body' rows='20' cols='40'>Back to the hill and over it again!</textarea><br/>" +
"<label for='secret'>Secret:</label> <input name='post[secret]' checked='checked' type='checkbox' id='post_secret' value='1' />" +
"<input name='post[secret]' type='hidden' value='0' /><br/>" +
"</form>"
assert_dom_equal expected, _erbout
end
def test_fields_for_with_labelled_builder
_erbout = ''
fields_for(:post, @post, :builder => LabelledFormBuilder) do |f|
_erbout.concat f.text_field(:title)
_erbout.concat f.text_area(:body)
_erbout.concat f.check_box(:secret)
end
expected =
"<label for='title'>Title:</label> <input name='post[title]' size='30' type='text' id='post_title' value='Hello World' /><br/>" +
"<label for='body'>Body:</label> <textarea name='post[body]' id='post_body' rows='20' cols='40'>Back to the hill and over it again!</textarea><br/>" +
"<label for='secret'>Secret:</label> <input name='post[secret]' checked='checked' type='checkbox' id='post_secret' value='1' />" +
"<input name='post[secret]' type='hidden' value='0' /><br/>"
assert_dom_equal expected, _erbout
end
def test_form_for_with_html_options_adds_options_to_form_tag
_erbout = ''
form_for(:post, @post, :html => {:id => 'some_form', :class => 'some_class'}) do |f| end
expected = "<form action=\"http://www.example.com\" class=\"some_class\" id=\"some_form\" method=\"post\"></form>"
assert_dom_equal expected, _erbout
end
def test_form_for_with_string_url_option
_erbout = ''
form_for(:post, @post, :url => 'http://www.otherdomain.com') do |f| end
assert_equal '<form action="http://www.otherdomain.com" method="post"></form>', _erbout
end
def test_form_for_with_hash_url_option
_erbout = ''
form_for(:post, @post, :url => {:controller => 'controller', :action => 'action'}) do |f| end
assert_equal 'controller', @controller.url_for_options[:controller]
assert_equal 'action', @controller.url_for_options[:action]
end
def test_form_for_with_record_url_option
_erbout = ''
form_for(:post, @post, :url => @post) do |f| end
expected = "<form action=\"/posts/123\" method=\"post\"></form>"
assert_equal expected, _erbout
end
def test_form_for_with_existing_object
_erbout = ''
form_for(@post) do |f| end
expected = "<form action=\"/posts/123\" class=\"edit_post\" id=\"edit_post_123\" method=\"post\"><div style=\"margin:0;padding:0\"><input name=\"_method\" type=\"hidden\" value=\"put\" /></div></form>"
assert_equal expected, _erbout
end
def test_form_for_with_new_object
_erbout = ''
post = Post.new
post.new_record = true
def post.id() nil end
form_for(post) do |f| end
expected = "<form action=\"/posts\" class=\"new_post\" id=\"new_post\" method=\"post\"></form>"
assert_equal expected, _erbout
end
def test_form_for_with_existing_object_in_list
@post.new_record = false
@comment.save
_erbout = ''
form_for([@post, @comment]) {}
expected = %(<form action="#{comment_path(@post, @comment)}" class="edit_comment" id="edit_comment_1" method="post"><div style="margin:0;padding:0"><input name="_method" type="hidden" value="put" /></div></form>)
assert_dom_equal expected, _erbout
end
def test_form_for_with_new_object_in_list
@post.new_record = false
_erbout = ''
form_for([@post, @comment]) {}
expected = %(<form action="#{comments_path(@post)}" class="new_comment" id="new_comment" method="post"></form>)
assert_dom_equal expected, _erbout
end
def test_form_for_with_existing_object_and_namespace_in_list
@post.new_record = false
@comment.save
_erbout = ''
form_for([:admin, @post, @comment]) {}
expected = %(<form action="#{admin_comment_path(@post, @comment)}" class="edit_comment" id="edit_comment_1" method="post"><div style="margin:0;padding:0"><input name="_method" type="hidden" value="put" /></div></form>)
assert_dom_equal expected, _erbout
end
def test_form_for_with_new_object_and_namespace_in_list
@post.new_record = false
_erbout = ''
form_for([:admin, @post, @comment]) {}
expected = %(<form action="#{admin_comments_path(@post)}" class="new_comment" id="new_comment" method="post"></form>)
assert_dom_equal expected, _erbout
end
def test_form_for_with_existing_object_and_custom_url
_erbout = ''
form_for(@post, :url => "/super_posts") do |f| end
expected = "<form action=\"/super_posts\" class=\"edit_post\" id=\"edit_post_123\" method=\"post\"><div style=\"margin:0;padding:0\"><input name=\"_method\" type=\"hidden\" value=\"put\" /></div></form>"
assert_equal expected, _erbout
end
def test_remote_form_for_with_html_options_adds_options_to_form_tag
self.extend ActionView::Helpers::PrototypeHelper
_erbout = ''
remote_form_for(:post, @post, :html => {:id => 'some_form', :class => 'some_class'}) do |f| end
expected = "<form action=\"http://www.example.com\" class=\"some_class\" id=\"some_form\" method=\"post\" onsubmit=\"new Ajax.Request('http://www.example.com', {asynchronous:true, evalScripts:true, parameters:Form.serialize(this)}); return false;\"></form>"
assert_dom_equal expected, _erbout
end
protected
def comments_path(post)
"/posts/#{post.id}/comments"
end
alias_method :post_comments_path, :comments_path
def comment_path(post, comment)
"/posts/#{post.id}/comments/#{comment.id}"
end
alias_method :post_comment_path, :comment_path
def admin_comments_path(post)
"/admin/posts/#{post.id}/comments"
end
alias_method :admin_post_comments_path, :admin_comments_path
def admin_comment_path(post, comment)
"/admin/posts/#{post.id}/comments/#{comment.id}"
end
alias_method :admin_post_comment_path, :admin_comment_path
def posts_path
"/posts"
end
def post_path(post)
"/posts/#{post.id}"
end
def protect_against_forgery?
false
end
end
| 39.038943 | 261 | 0.644091 |
f89395fccaf8fab6bea80801eb14cd0d4274ab43 | 44,119 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Git LFS API and storage' do
include LfsHttpHelpers
include ProjectForksHelper
include WorkhorseHelpers
let_it_be(:project, reload: true) { create(:project, :empty_repo) }
let_it_be(:user) { create(:user) }
context 'with projects' do
it_behaves_like 'LFS http requests' do
let_it_be(:other_project, reload: true) { create(:project, :empty_repo) }
let(:container) { project }
let(:authorize_guest) { project.add_guest(user) }
let(:authorize_download) { project.add_reporter(user) }
let(:authorize_upload) { project.add_developer(user) }
context 'project specific LFS settings' do
let(:body) { upload_body(sample_object) }
before do
authorize_upload
project.update_attribute(:lfs_enabled, project_lfs_enabled)
subject
end
describe 'LFS disabled in project' do
let(:project_lfs_enabled) { false }
context 'when uploading' do
subject(:request) { post_lfs_json(batch_url(project), body, headers) }
it_behaves_like 'LFS http 404 response'
end
context 'when downloading' do
subject(:request) { get(objects_url(project, sample_oid), params: {}, headers: headers) }
it_behaves_like 'LFS http 404 response'
end
end
describe 'LFS enabled in project' do
let(:project_lfs_enabled) { true }
context 'when uploading' do
subject(:request) { post_lfs_json(batch_url(project), body, headers) }
it_behaves_like 'LFS http 200 response'
end
context 'when downloading' do
subject(:request) { get(objects_url(project, sample_oid), params: {}, headers: headers) }
it_behaves_like 'LFS http 200 blob response'
end
end
end
describe 'when fetching LFS object' do
subject(:request) { get objects_url(project, sample_oid), params: {}, headers: headers }
let(:response) { request && super() }
before do
project.lfs_objects << lfs_object
end
context 'when LFS uses object storage' do
before do
authorize_download
end
context 'when proxy download is enabled' do
before do
stub_lfs_object_storage(proxy_download: true)
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with the workhorse send-url' do
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
end
end
context 'when proxy download is disabled' do
before do
stub_lfs_object_storage(proxy_download: false)
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect' do
expect(response).to have_gitlab_http_status(:found)
end
it 'responds with the file location' do
expect(response.location).to include(lfs_object.reload.file.path)
end
end
end
context 'when deploy key is authorized' do
let_it_be(:key) { create(:deploy_key) }
let(:authorization) { authorize_deploy_key }
before do
project.deploy_keys << key
end
it_behaves_like 'LFS http 200 blob response'
end
context 'when using a user key (LFSToken)' do
let(:authorization) { authorize_user_key }
context 'when user allowed' do
before do
authorize_download
end
it_behaves_like 'LFS http 200 blob response'
context 'when user password is expired' do
let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago)}
it_behaves_like 'LFS http 401 response'
end
context 'when user is blocked' do
let_it_be(:user) { create(:user, :blocked)}
it_behaves_like 'LFS http 401 response'
end
end
context 'when user not allowed' do
it_behaves_like 'LFS http 404 response'
end
end
context 'when build is authorized as' do
let(:authorization) { authorize_ci_project }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
shared_examples 'can download LFS only from own projects' do
context 'for owned project' do
let_it_be(:project) { create(:project, namespace: user.namespace) }
it_behaves_like 'LFS http 200 blob response'
end
context 'for member of project' do
before do
authorize_download
end
it_behaves_like 'LFS http 200 blob response'
end
context 'for other project' do
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
it 'rejects downloading code' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'administrator', :enable_admin_mode do
let_it_be(:user) { create(:admin) }
it_behaves_like 'can download LFS only from own projects'
end
context 'regular user' do
it_behaves_like 'can download LFS only from own projects'
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
it_behaves_like 'can download LFS only from own projects'
end
end
end
describe 'when handling LFS batch request' do
subject(:request) { post_lfs_json batch_url(project), body, headers }
let(:response) { request && super() }
before do
project.lfs_objects << lfs_object
end
shared_examples 'process authorization header' do |renew_authorization:|
let(:response_authorization) do
authorization_in_action(lfs_actions.first)
end
if renew_authorization
context 'when the authorization comes from a user' do
it 'returns a new valid LFS token authorization' do
expect(response_authorization).not_to eq(authorization)
end
it 'returns a valid token' do
username, token = ::Base64.decode64(response_authorization.split(' ', 2).last).split(':', 2)
expect(username).to eq(user.username)
expect(Gitlab::LfsToken.new(user).token_valid?(token)).to be_truthy
end
it 'generates only one new token per each request' do
authorizations = lfs_actions.map do |action|
authorization_in_action(action)
end.compact
expect(authorizations.uniq.count).to eq 1
end
end
else
context 'when the authorization comes from a token' do
it 'returns the same authorization header' do
expect(response_authorization).to eq(authorization)
end
end
end
def lfs_actions
json_response['objects'].map { |a| a['actions'] }.compact
end
def authorization_in_action(action)
(action['upload'] || action['download']).dig('header', 'Authorization')
end
end
describe 'download' do
let(:body) { download_body(sample_object) }
shared_examples 'an authorized request' do |renew_authorization:|
context 'when downloading an LFS object that is assigned to our project' do
it_behaves_like 'LFS http 200 response'
it 'with href to download' do
expect(json_response['objects'].first).to include(sample_object)
expect(json_response['objects'].first['actions']['download']['href']).to eq(objects_url(project, sample_oid))
end
it_behaves_like 'process authorization header', renew_authorization: renew_authorization
end
context 'when downloading an LFS object that is assigned to other project' do
before do
lfs_object.update!(projects: [other_project])
end
it_behaves_like 'LFS http 200 response'
it 'with an 404 for specific object' do
expect(json_response['objects'].first).to include(sample_object)
expect(json_response['objects'].first['error']).to include('code' => 404, 'message' => "Object does not exist on the server or you don't have permissions to access it")
end
end
context 'when downloading a LFS object that does not exist' do
let(:body) { download_body(non_existing_object) }
it_behaves_like 'LFS http 200 response'
it 'with an 404 for specific object' do
expect(json_response['objects'].first).to include(non_existing_object)
expect(json_response['objects'].first['error']).to include('code' => 404, 'message' => "Object does not exist on the server or you don't have permissions to access it")
end
end
context 'when downloading one existing and one missing LFS object' do
let(:body) { download_body(multiple_objects) }
it_behaves_like 'LFS http 200 response'
it 'responds with download hypermedia link for the existing object' do
expect(json_response['objects'].first).to include(sample_object)
expect(json_response['objects'].first['actions']['download']).to include('href' => objects_url(project, sample_oid))
expect(json_response['objects'].last).to eq({
'oid' => non_existing_object_oid,
'size' => non_existing_object_size,
'error' => {
'code' => 404,
'message' => "Object does not exist on the server or you don't have permissions to access it"
}
})
end
it_behaves_like 'process authorization header', renew_authorization: renew_authorization
end
context 'when downloading two existing LFS objects' do
let(:body) { download_body(multiple_objects) }
let(:other_object) { create(:lfs_object, :with_file, oid: non_existing_object_oid, size: non_existing_object_size) }
before do
project.lfs_objects << other_object
end
it 'responds with the download hypermedia link for each object' do
expect(json_response['objects'].first).to include(sample_object)
expect(json_response['objects'].first['actions']['download']).to include('href' => objects_url(project, sample_oid))
expect(json_response['objects'].last).to include(non_existing_object)
expect(json_response['objects'].last['actions']['download']).to include('href' => objects_url(project, non_existing_object_oid))
end
it_behaves_like 'process authorization header', renew_authorization: renew_authorization
end
end
context 'when user is authenticated' do
before do
project.add_role(user, role) if role
end
it_behaves_like 'an authorized request', renew_authorization: true do
let(:role) { :reporter }
end
context 'when user is not a member of the project' do
let(:role) { nil }
it_behaves_like 'LFS http 404 response'
end
context 'when user does not have download access' do
let(:role) { :guest }
it_behaves_like 'LFS http 404 response'
end
context 'when user password is expired' do
let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago)}
let(:role) { :reporter}
it_behaves_like 'LFS http 401 response'
end
context 'when user is blocked' do
let_it_be(:user) { create(:user, :blocked)}
let(:role) { :reporter}
it_behaves_like 'LFS http 401 response'
end
end
context 'when using Deploy Tokens' do
let(:authorization) { authorize_deploy_token }
context 'when Deploy Token is not valid' do
let(:deploy_token) { create(:deploy_token, projects: [project], read_repository: false) }
it_behaves_like 'LFS http 401 response'
end
context 'when Deploy Token is not related to the project' do
let(:deploy_token) { create(:deploy_token, projects: [other_project]) }
it_behaves_like 'LFS http 401 response'
end
# TODO: We should fix this test case that causes flakyness by alternating the result of the above test cases.
context 'when Deploy Token is valid' do
let(:deploy_token) { create(:deploy_token, projects: [project]) }
it_behaves_like 'an authorized request', renew_authorization: false
end
end
context 'when build is authorized as' do
let(:authorization) { authorize_ci_project }
shared_examples 'can download LFS only from own projects' do |renew_authorization:|
context 'for own project' do
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
before do
authorize_download
end
it_behaves_like 'an authorized request', renew_authorization: renew_authorization
end
context 'for other project' do
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
it 'rejects downloading code' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'administrator', :enable_admin_mode do
let_it_be(:user) { create(:admin) }
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
it_behaves_like 'can download LFS only from own projects', renew_authorization: true
end
context 'regular user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
it_behaves_like 'can download LFS only from own projects', renew_authorization: true
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
it_behaves_like 'can download LFS only from own projects', renew_authorization: false
end
end
context 'when user is not authenticated' do
let(:authorization) { nil }
describe 'is accessing public project' do
let_it_be(:project) { create(:project, :public) }
it_behaves_like 'LFS http 200 response'
it 'returns href to download' do
expect(json_response).to eq({
'objects' => [
{
'oid' => sample_oid,
'size' => sample_size,
'authenticated' => true,
'actions' => {
'download' => {
'href' => objects_url(project, sample_oid),
'header' => {}
}
}
}
]
})
end
end
describe 'is accessing non-public project' do
it_behaves_like 'LFS http 401 response'
end
end
end
describe 'upload' do
let_it_be(:project) { create(:project, :public) }
let(:body) { upload_body(sample_object) }
shared_examples 'pushes new LFS objects' do |renew_authorization:|
let(:sample_size) { 150.megabytes }
let(:sample_oid) { non_existing_object_oid }
it_behaves_like 'LFS http 200 response'
it 'responds with upload hypermedia link' do
expect(json_response['objects']).to be_kind_of(Array)
expect(json_response['objects'].first).to include(sample_object)
expect(json_response['objects'].first['actions']['upload']['href']).to eq(objects_url(project, sample_oid, sample_size))
headers = json_response['objects'].first['actions']['upload']['header']
expect(headers['Content-Type']).to eq('application/octet-stream')
expect(headers['Transfer-Encoding']).to eq('chunked')
end
it_behaves_like 'process authorization header', renew_authorization: renew_authorization
end
describe 'when request is authenticated' do
describe 'when user has project push access' do
before do
authorize_upload
end
context 'when pushing an LFS object that already exists' do
shared_examples_for 'batch upload with existing LFS object' do
it_behaves_like 'LFS http 200 response'
it 'responds with links to the object in the project' do
expect(json_response['objects']).to be_kind_of(Array)
expect(json_response['objects'].first).to include(sample_object)
expect(lfs_object.projects.pluck(:id)).not_to include(project.id)
expect(lfs_object.projects.pluck(:id)).to include(other_project.id)
expect(json_response['objects'].first['actions']['upload']['href']).to eq(objects_url(project, sample_oid, sample_size))
headers = json_response['objects'].first['actions']['upload']['header']
expect(headers['Content-Type']).to eq('application/octet-stream')
expect(headers['Transfer-Encoding']).to eq('chunked')
end
it_behaves_like 'process authorization header', renew_authorization: true
end
context 'in another project' do
before do
lfs_object.update!(projects: [other_project])
end
it_behaves_like 'batch upload with existing LFS object'
end
context 'in source of fork project' do
let(:other_project) { create(:project, :empty_repo) }
let(:project) { fork_project(other_project) }
before do
lfs_object.update!(projects: [other_project])
end
context 'when user has access to both the parent and fork' do
before do
project.add_developer(user)
other_project.add_developer(user)
end
it 'links existing LFS objects to other project' do
expect(Gitlab::AppJsonLogger).to receive(:info).with(
message: "LFS object auto-linked to forked project",
lfs_object_oid: lfs_object.oid,
lfs_object_size: lfs_object.size,
source_project_id: other_project.id,
source_project_path: other_project.full_path,
target_project_id: project.id,
target_project_path: project.full_path).and_call_original
expect(json_response['objects']).to be_kind_of(Array)
expect(json_response['objects'].first).to include(sample_object)
expect(json_response['objects'].first).not_to have_key('actions')
expect(lfs_object.reload.projects.pluck(:id)).to match_array([other_project.id, project.id])
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(lfs_auto_link_fork_source: false)
end
it_behaves_like 'batch upload with existing LFS object'
end
end
context 'when user does not have access to parent' do
before do
project.add_developer(user)
end
it_behaves_like 'batch upload with existing LFS object'
end
end
end
context 'when pushing a LFS object that does not exist' do
it_behaves_like 'pushes new LFS objects', renew_authorization: true
end
context 'when pushing one new and one existing LFS object' do
let(:body) { upload_body(multiple_objects) }
it_behaves_like 'LFS http 200 response'
it 'responds with upload hypermedia link for the new object' do
expect(json_response['objects']).to be_kind_of(Array)
expect(json_response['objects'].first).to include(sample_object)
expect(json_response['objects'].first).not_to have_key('actions')
expect(json_response['objects'].last).to include(non_existing_object)
expect(json_response['objects'].last['actions']['upload']['href']).to eq(objects_url(project, non_existing_object_oid, non_existing_object_size))
headers = json_response['objects'].last['actions']['upload']['header']
expect(headers['Content-Type']).to eq('application/octet-stream')
expect(headers['Transfer-Encoding']).to eq('chunked')
end
it_behaves_like 'process authorization header', renew_authorization: true
end
end
context 'when user does not have push access' do
it_behaves_like 'LFS http 403 response'
end
context 'when build is authorized' do
let(:authorization) { authorize_ci_project }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
context 'build has an user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
context 'tries to push to own project' do
it_behaves_like 'LFS http 403 response'
end
context 'tries to push to other project' do
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
# I'm not sure what this tests that is different from the previous test
it_behaves_like 'LFS http 403 response'
end
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
it_behaves_like 'LFS http 403 response'
end
end
context 'when deploy key has project push access' do
let(:key) { create(:deploy_key) }
let(:authorization) { authorize_deploy_key }
before do
project.deploy_keys_projects.create!(deploy_key: key, can_push: true)
end
it_behaves_like 'pushes new LFS objects', renew_authorization: false
end
end
context 'when user is not authenticated' do
let(:authorization) { nil }
context 'when user has push access' do
before do
authorize_upload
end
it_behaves_like 'LFS http 401 response'
end
context 'when user does not have push access' do
it_behaves_like 'LFS http 401 response'
end
end
end
describe 'unsupported' do
let(:body) { request_body('other', sample_object) }
it_behaves_like 'LFS http 404 response'
end
end
describe 'when handling LFS batch request on a read-only GitLab instance' do
subject { post_lfs_json(batch_url(project), body, headers) }
before do
allow(Gitlab::Database).to receive(:read_only?) { true }
project.add_maintainer(user)
subject
end
context 'when downloading' do
let(:body) { download_body(sample_object) }
it_behaves_like 'LFS http 200 response'
end
context 'when uploading' do
let(:body) { upload_body(sample_object) }
it_behaves_like 'LFS http expected response code and message' do
let(:response_code) { 403 }
let(:message) { 'You cannot write to this read-only GitLab instance.' }
end
end
end
describe 'when pushing a LFS object' do
let(:include_workhorse_jwt_header) { true }
shared_examples 'unauthorized' do
context 'and request is sent by gitlab-workhorse to authorize the request' do
before do
put_authorize
end
it_behaves_like 'LFS http 401 response'
end
context 'and request is sent by gitlab-workhorse to finalize the upload' do
before do
put_finalize
end
it_behaves_like 'LFS http 401 response'
end
context 'and request is sent with a malformed headers' do
before do
put_finalize('/etc/passwd')
end
it_behaves_like 'LFS http 401 response'
end
end
shared_examples 'forbidden' do
context 'and request is sent by gitlab-workhorse to authorize the request' do
before do
put_authorize
end
it_behaves_like 'LFS http 403 response'
end
context 'and request is sent by gitlab-workhorse to finalize the upload' do
before do
put_finalize
end
it_behaves_like 'LFS http 403 response'
end
context 'and request is sent with a malformed headers' do
before do
put_finalize('/etc/passwd')
end
it_behaves_like 'LFS http 403 response'
end
end
describe 'to one project' do
describe 'when user is authenticated' do
describe 'when user has push access to the project' do
before do
project.add_developer(user)
end
context 'and the request bypassed workhorse' do
it 'raises an exception' do
expect { put_authorize(verified: false) }.to raise_error JWT::DecodeError
end
end
context 'and request is sent by gitlab-workhorse to authorize the request' do
shared_examples 'a valid response' do
before do
put_authorize
end
it_behaves_like 'LFS http 200 workhorse response'
end
shared_examples 'a local file' do
it_behaves_like 'a valid response' do
it 'responds with status 200, location of LFS store and object details' do
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
end
end
context 'when using local storage' do
it_behaves_like 'a local file'
end
context 'when using remote storage' do
context 'when direct upload is enabled' do
before do
stub_lfs_object_storage(enabled: true, direct_upload: true)
end
it_behaves_like 'a valid response' do
it 'responds with status 200, location of LFS remote store and object details' do
expect(json_response).not_to have_key('TempPath')
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
expect(json_response['RemoteObject']).to have_key('MultipartUpload')
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
end
end
context 'when direct upload is disabled' do
before do
stub_lfs_object_storage(enabled: true, direct_upload: false)
end
it_behaves_like 'a local file'
end
end
end
context 'and request is sent by gitlab-workhorse to finalize the upload' do
before do
put_finalize
end
it_behaves_like 'LFS http 200 response'
it 'LFS object is linked to the project' do
expect(lfs_object.projects.pluck(:id)).to include(project.id)
end
end
context 'and request to finalize the upload is not sent by gitlab-workhorse' do
it 'fails with a JWT decode error' do
expect { put_finalize(lfs_tmp_file, verified: false) }.to raise_error(JWT::DecodeError)
end
end
context 'and workhorse requests upload finalize for a new LFS object' do
before do
lfs_object.destroy!
end
context 'with object storage disabled' do
it "doesn't attempt to migrate file to object storage" do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
put_finalize(with_tempfile: true)
end
end
context 'with object storage enabled' do
context 'and direct upload enabled' do
let!(:fog_connection) do
stub_lfs_object_storage(direct_upload: true)
end
let(:tmp_object) do
fog_connection.directories.new(key: 'lfs-objects').files.create( # rubocop: disable Rails/SaveBang
key: 'tmp/uploads/12312300',
body: 'content'
)
end
['123123', '../../123123'].each do |remote_id|
context "with invalid remote_id: #{remote_id}" do
subject do
put_finalize(remote_object: tmp_object, args: {
'file.remote_id' => remote_id
})
end
it 'responds with status 403' do
subject
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
context 'with valid remote_id' do
subject do
put_finalize(remote_object: tmp_object, args: {
'file.remote_id' => '12312300',
'file.name' => 'name'
})
end
it 'responds with status 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
object = LfsObject.find_by_oid(sample_oid)
expect(object).to be_present
expect(object.file.read).to eq(tmp_object.body)
end
it 'schedules migration of file to object storage' do
subject
expect(LfsObject.last.projects).to include(project)
end
it 'have valid file' do
subject
expect(LfsObject.last.file_store).to eq(ObjectStorage::Store::REMOTE)
expect(LfsObject.last.file).to be_exists
end
end
end
context 'and background upload enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'schedules migration of file to object storage' do
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
put_finalize(with_tempfile: true)
end
end
end
end
context 'without the lfs object' do
before do
lfs_object.destroy!
end
it 'rejects slashes in the tempfile name (path traversal)' do
put_finalize('../bar', with_tempfile: true)
expect(response).to have_gitlab_http_status(:bad_request)
end
context 'not sending the workhorse jwt header' do
let(:include_workhorse_jwt_header) { false }
it 'rejects the request' do
put_finalize(with_tempfile: true)
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
end
describe 'and user does not have push access' do
before do
project.add_reporter(user)
end
it_behaves_like 'forbidden'
end
end
context 'when build is authorized' do
let(:authorization) { authorize_ci_project }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
context 'build has an user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
context 'tries to push to own project' do
before do
project.add_developer(user)
put_authorize
end
it_behaves_like 'LFS http 403 response'
end
context 'tries to push to other project' do
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
before do
put_authorize
end
it_behaves_like 'LFS http 404 response'
end
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
before do
put_authorize
end
it_behaves_like 'LFS http 403 response'
end
end
describe 'when using a user key (LFSToken)' do
let(:authorization) { authorize_user_key }
context 'when user allowed' do
before do
project.add_developer(user)
put_authorize
end
it_behaves_like 'LFS http 200 workhorse response'
context 'when user password is expired' do
let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago) }
it_behaves_like 'LFS http 401 response'
end
context 'when user is blocked' do
let_it_be(:user) { create(:user, :blocked)}
it_behaves_like 'LFS http 401 response'
end
end
context 'when user not allowed' do
before do
put_authorize
end
it_behaves_like 'LFS http 404 response'
end
end
context 'for unauthenticated' do
let(:authorization) { nil }
it_behaves_like 'unauthorized'
end
end
describe 'to a forked project' do
let_it_be(:upstream_project) { create(:project, :public) }
let_it_be(:project_owner) { create(:user) }
let(:project) { fork_project(upstream_project, project_owner) }
describe 'when user is authenticated' do
describe 'when user has push access to the project' do
before do
project.add_developer(user)
end
context 'and request is sent by gitlab-workhorse to authorize the request' do
before do
put_authorize
end
it_behaves_like 'LFS http 200 workhorse response'
it 'with location of LFS store and object details' do
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
end
context 'and request is sent by gitlab-workhorse to finalize the upload' do
before do
put_finalize
end
it_behaves_like 'LFS http 200 response'
it 'LFS object is linked to the forked project' do
expect(lfs_object.projects.pluck(:id)).to include(project.id)
end
end
end
describe 'and user does not have push access' do
it_behaves_like 'forbidden'
end
end
context 'when build is authorized' do
let(:authorization) { authorize_ci_project }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
before do
put_authorize
end
context 'build has an user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
context 'tries to push to own project' do
it_behaves_like 'LFS http 403 response'
end
context 'tries to push to other project' do
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
# I'm not sure what this tests that is different from the previous test
it_behaves_like 'LFS http 403 response'
end
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
it_behaves_like 'LFS http 403 response'
end
end
context 'for unauthenticated' do
let(:authorization) { nil }
it_behaves_like 'unauthorized'
end
describe 'and second project not related to fork or a source project' do
let_it_be(:second_project) { create(:project) }
before do
second_project.add_maintainer(user)
upstream_project.lfs_objects << lfs_object
end
context 'when pushing the same LFS object to the second project' do
before do
finalize_headers = headers
.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp_file)
.merge(workhorse_internal_api_request_header)
put objects_url(second_project, sample_oid, sample_size),
params: {},
headers: finalize_headers
end
it_behaves_like 'LFS http 200 response'
it 'links the LFS object to the project' do
expect(lfs_object.projects.pluck(:id)).to include(second_project.id, upstream_project.id)
end
end
end
end
def put_authorize(verified: true)
authorize_headers = headers
authorize_headers.merge!(workhorse_internal_api_request_header) if verified
put authorize_url(project, sample_oid, sample_size), params: {}, headers: authorize_headers
end
def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false, verified: true, remote_object: nil, args: {})
uploaded_file = nil
if with_tempfile
upload_path = LfsObjectUploader.workhorse_local_upload_path
file_path = upload_path + '/' + lfs_tmp if lfs_tmp
FileUtils.mkdir_p(upload_path)
FileUtils.touch(file_path)
uploaded_file = UploadedFile.new(file_path, filename: File.basename(file_path))
elsif remote_object
uploaded_file = fog_to_uploaded_file(remote_object)
end
finalize_headers = headers
finalize_headers.merge!(workhorse_internal_api_request_header) if verified
workhorse_finalize(
objects_url(project, sample_oid, sample_size),
method: :put,
file_key: :file,
params: args.merge(file: uploaded_file),
headers: finalize_headers,
send_rewritten_field: include_workhorse_jwt_header
)
end
def lfs_tmp_file
"#{sample_oid}012345678"
end
end
end
end
context 'with project wikis' do
it_behaves_like 'LFS http requests' do
let(:container) { create(:project_wiki, :empty_repo, project: project) }
let(:authorize_guest) { project.add_guest(user) }
let(:authorize_download) { project.add_reporter(user) }
let(:authorize_upload) { project.add_developer(user) }
end
end
context 'with snippets' do
# LFS is not supported on snippets, so we override the shared examples
# to expect 404 responses instead.
[
'LFS http 200 response',
'LFS http 200 blob response',
'LFS http 403 response'
].each do |examples|
shared_examples_for(examples) { it_behaves_like 'LFS http 404 response' }
end
context 'with project snippets' do
it_behaves_like 'LFS http requests' do
let(:container) { create(:project_snippet, :empty_repo, project: project) }
let(:authorize_guest) { project.add_guest(user) }
let(:authorize_download) { project.add_reporter(user) }
let(:authorize_upload) { project.add_developer(user) }
end
end
context 'with personal snippets' do
it_behaves_like 'LFS http requests' do
let(:container) { create(:personal_snippet, :empty_repo) }
let(:authorize_upload) { container.update!(author: user) }
end
end
end
end
| 36.341845 | 184 | 0.553775 |
038234e494136ad656254c99f28969ec87b19794 | 11,318 | require File.dirname(__FILE__) + '/../spec_helper'
require 'active_support/time'
Time.zone = 'Eastern Time (US & Canada)'
describe IceCube::Schedule, 'to_yaml' do
[:yearly, :monthly, :weekly, :daily, :hourly, :minutely, :secondly].each do |type|
it "should make a #{type} round trip with to_yaml - github issue 47" do
s = Schedule.new(Time.now)
s.add_recurrence_rule Rule.send(type, 3)
Schedule.from_yaml(s.to_yaml).first(3).should == s.first(3)
end
end
it 'should be able to let rules take round trips to yaml' do
schedule = IceCube::Schedule.new
schedule.add_recurrence_rule IceCube::Rule.monthly
schedule = IceCube::Schedule.from_yaml schedule.to_yaml
rule = schedule.rrules.first
rule.is_a?(IceCube::MonthlyRule)
end
it 'should respond to .to_yaml' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.daily.until(Time.now)
#check assumption
schedule.should respond_to('to_yaml')
end
it 'should be able to make a round-trip to YAML' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.daily.until(Time.now + 10)
result1 = schedule.all_occurrences
yaml_string = schedule.to_yaml
schedule2 = IceCube::Schedule.from_yaml(yaml_string)
result2 = schedule2.all_occurrences
# compare without usecs
result1.map { |r| r.to_s }.should == result2.map { |r| r.to_s }
end
it 'should be able to make a round-trip to YAML with .day' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.daily.day(:monday, :wednesday)
yaml_string = schedule.to_yaml
schedule2 = IceCube::Schedule.from_yaml(yaml_string)
# compare without usecs
schedule.first(10).map { |r| r.to_s }.should == schedule2.first(10).map { |r| r.to_s }
end
it 'should be able to make a round-trip to YAML with .day_of_month' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.monthly.day_of_month(10, 20)
yaml_string = schedule.to_yaml
schedule2 = IceCube::Schedule.from_yaml(yaml_string)
# compare without usecs
schedule.first(10).map { |r| r.to_s }.should == schedule2.first(10).map { |r| r.to_s }
end
it 'should be able to make a round-trip to YAML with .day_of_week' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.weekly.day_of_week(:monday => [1, -2])
yaml_string = schedule.to_yaml
schedule2 = IceCube::Schedule.from_yaml(yaml_string)
# compare without usecs
schedule.first(10).map { |r| r.to_s }.should == schedule2.first(10).map { |r| r.to_s }
end
it 'should be able to make a round-trip to YAML with .day_of_year' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.yearly.day_of_year(100, 200)
yaml_string = schedule.to_yaml
schedule2 = IceCube::Schedule.from_yaml(yaml_string)
# compare without usecs
schedule.first(10).map { |r| r.to_s }.should == schedule2.first(10).map { |r| r.to_s }
end
it 'should be able to make a round-trip to YAML with .hour_of_day' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.daily.hour_of_day(1, 2)
yaml_string = schedule.to_yaml
schedule2 = IceCube::Schedule.from_yaml(yaml_string)
# compare without usecs
schedule.first(10).map { |r| r.to_s }.should == schedule2.first(10).map { |r| r.to_s }
end
it 'should be able to make a round-trip to YAML with .minute_of_hour' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.daily.minute_of_hour(0, 30)
yaml_string = schedule.to_yaml
schedule2 = IceCube::Schedule.from_yaml(yaml_string)
# compare without usecs
schedule.first(10).map { |r| r.to_s }.should == schedule2.first(10).map { |r| r.to_s }
end
it 'should be able to make a round-trip to YAML with .month_of_year' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.yearly.month_of_year(:april, :may)
yaml_string = schedule.to_yaml
schedule2 = IceCube::Schedule.from_yaml(yaml_string)
# compare without usecs
schedule.first(10).map { |r| r.to_s }.should == schedule2.first(10).map { |r| r.to_s }
end
it 'should be able to make a round-trip to YAML with .second_of_minute' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.daily.second_of_minute(1, 2)
yaml_string = schedule.to_yaml
schedule2 = IceCube::Schedule.from_yaml(yaml_string)
# compare without usecs
schedule.first(10).map { |r| r.to_s }.should == schedule2.first(10).map { |r| r.to_s }
end
it 'should have a to_yaml representation of a rule that does not contain ruby objects' do
rule = IceCube::Rule.daily.day_of_week(:monday => [1, -1]).month_of_year(:april)
rule.to_yaml.include?('object').should be_false
end
it 'should have a to_yaml representation of a schedule that does not contain ruby objects' do
schedule = IceCube::Schedule.new(Time.now)
schedule.add_recurrence_rule IceCube::Rule.daily.day_of_week(:monday => [1, -1]).month_of_year(:april)
schedule.to_yaml.include?('object').should be_false
end
# This test will fail when not run in Eastern Time
# This is a bug because to_datetime will always convert to system local time
it 'should be able to roll forward times and get back times in an array - TimeWithZone' do
Time.zone = "Eastern Time (US & Canada)"
start_date = Time.zone.local(2011, 11, 5, 12, 0, 0)
schedule = IceCube::Schedule.new(start_date)
schedule = IceCube::Schedule.from_yaml(schedule.to_yaml) # round trip
ice_cube_start_date = schedule.start_time
ice_cube_start_date.should == start_date
ice_cube_start_date.utc_offset.should == start_date.utc_offset
end
it 'should be able to roll forward times and get back times in an array - Time' do
start_date = Time.now
schedule = IceCube::Schedule.new(start_date)
schedule = IceCube::Schedule.from_yaml(schedule.to_yaml) # round trip
ice_cube_start_date = schedule.start_time
ice_cube_start_date.to_s.should == start_date.to_s
ice_cube_start_date.class.should == Time
ice_cube_start_date.utc_offset.should == start_date.utc_offset
end
it 'should be able to go back and forth to yaml and then call occurrences' do
start_date = Time.local(2011, 5, 10, 12, 0, 0)
schedule1 = IceCube::Schedule.new(start_date)
schedule1.add_recurrence_date start_date
schedule2 = IceCube::Schedule.from_yaml(schedule1.to_yaml) # round trip
end_time = Time.now + IceCube::ONE_DAY
schedule1.occurrences(end_time).should == schedule2.occurrences(end_time)
end
it 'should be able to make a round trip with an exdate' do
schedule = IceCube::Schedule.new
schedule.add_exception_time(time = Time.now)
schedule = IceCube::Schedule.from_yaml schedule.to_yaml
schedule.exdates.map(&:to_s).should == [time.to_s]
end
it 'crazy shit' do
start_date = Time.zone.now
schedule = IceCube::Schedule.new(start_date)
schedule.add_recurrence_rule IceCube::Rule.weekly.day(:wednesday)
schedule.add_recurrence_date start_date
schedule = IceCube::Schedule.from_hash(schedule.to_hash)
schedule = IceCube::Schedule.from_yaml(schedule.to_yaml)
schedule.occurrences(start_date + IceCube::ONE_DAY * 14)
end
it 'should be able to make a round trip to hash with a duration' do
schedule = IceCube::Schedule.new Time.now, :duration => 3600
IceCube::Schedule.from_hash(schedule.to_hash).duration.should == 3600
end
it 'should be able to be serialized to yaml as part of a hash' do
schedule = IceCube::Schedule.new Time.now
hash = { :schedule => schedule }
lambda do
hash.to_yaml
end.should_not raise_error
end
it 'should be able to roll forward and back in time' do
schedule = IceCube::Schedule.new(Time.now)
rt_schedule = IceCube::Schedule.from_yaml(schedule.to_yaml)
rt_schedule.start_time.utc_offset.should == schedule.start_time.utc_offset
end
it 'should be backward compatible with old yaml Time format' do
pacific_time = 'Pacific Time (US & Canada)'
yaml = "---\n:end_time:\n:rdates: []\n:rrules: []\n:duration:\n:exdates: []\n:exrules: []\n:start_date: 2010-10-18T14:35:47-07:00"
schedule = IceCube::Schedule.from_yaml(yaml)
schedule.start_time.should be_a(Time)
end
it 'should work to_yaml with non-TimeWithZone' do
schedule = IceCube::Schedule.new(Time.now)
schedule.to_yaml.length.should be < 200
end
it 'should work with occurs_on and TimeWithZone' do
pacific_time = 'Pacific Time (US & Canada)'
Time.zone = pacific_time
schedule = IceCube::Schedule.new(Time.zone.now)
schedule.add_recurrence_rule IceCube::Rule.weekly
schedule.occurs_on?(schedule.start_time.to_date + 6).should be_false
schedule.occurs_on?(schedule.start_time.to_date + 7).should be_true
schedule.occurs_on?(schedule.start_time.to_date + 8).should be_false
end
it 'should work with occurs_on and TimeWithZone' do
start_time = Time.zone.local(2012, 7, 15, 12, 0, 0)
pacific_time = 'Pacific Time (US & Canada)'
Time.zone = pacific_time
schedule = IceCube::Schedule.new(start_time)
schedule.add_recurrence_time start_time + 7 * IceCube::ONE_DAY
schedule.occurs_on?(schedule.start_time.to_date + 6).should be_false
schedule.occurs_on?(schedule.start_time.to_date + 7).should be_true
schedule.occurs_on?(schedule.start_time.to_date + 8).should be_false
end
it 'should crazy patch' do
Time.zone = 'Pacific Time (US & Canada)'
day = Time.zone.parse('21 Oct 2010 02:00:00')
schedule = IceCube::Schedule.new(day)
schedule.add_recurrence_date(day)
schedule.occurs_on?(Date.new(2010, 10, 20)).should be_false
schedule.occurs_on?(Date.new(2010, 10, 21)).should be_true
schedule.occurs_on?(Date.new(2010, 10, 22)).should be_false
end
it 'should be able to bring a Rule to_yaml and back with a timezone' do
Time.zone = 'Pacific Time (US & Canada)'
time = Time.now
offset = time.utc_offset
rule = IceCube::Rule.daily.until(time)
rule = IceCube::Rule.from_yaml(rule.to_yaml)
rule.until_date.utc_offset.should == offset
end
it 'should be able to bring a Rule to_yaml and back with a count' do
rule = IceCube::Rule.daily.count(5)
rule = IceCube::Rule.from_yaml rule.to_yaml
rule.occurrence_count.should == 5
end
it 'should be able to bring in a schedule with a rule from hash with symbols or strings' do
time = Time.zone.now
symbol_data = { :start_date => time, :rrules => [ { :validations => { :day => [1] }, :rule_type => "IceCube::DailyRule", :interval => 1 } ], :exrules => [], :rtimes => [], :extimes => [] }
string_data = { 'start_date' => time, 'rrules' => [ { 'validations' => { 'day' => [1] }, 'rule_type' => "IceCube::DailyRule", 'interval' => 1 } ], 'exrules' => [], 'rtimes' => [], 'extimes' => [] }
symbol_yaml = IceCube::Schedule.from_hash(symbol_data).to_yaml
string_yaml = IceCube::Schedule.from_hash(string_data).to_yaml
symbol_yaml.should == string_yaml
end
end
| 39.852113 | 201 | 0.710108 |
bf3d8fc6d7fd931debd08b63f42ebdc3bf736aab | 5,461 | require 'cfndsl'
require 'digest/md5'
require 'cfnguardian/cloudwatch'
module CfnGuardian
module Stacks
class Resources
include CfnDsl::CloudFormation
attr_reader :template
def initialize(parameters,stack_id)
@stack_id = stack_id
@template = CloudFormation("Guardian nested - stack-id:stk#{@stack_id}")
parameters.each do |name|
parameter = @template.Parameter(name)
parameter.Type 'String'
end
end
def build_template(resources)
resources.each do |resource|
case resource.type
when 'Alarm'
add_alarm(resource)
when 'Event'
add_event(resource)
when 'Composite'
add_composite_alarm(resource)
when 'MetricFilter'
add_metric_filter(resource)
when 'EventSubscription'
add_event_subscription(resource)
else
puts "Warn: #{resource.type} is a unsuported resource type"
end
end
end
def add_alarm(alarm)
actions = alarm.alarm_action.kind_of?(Array) ? alarm.alarm_action.map{|action| Ref(action)} : [Ref(alarm.alarm_action)]
actions.concat alarm.maintenance_groups.map {|mg| Ref(mg)} if alarm.maintenance_groups.any?
stack_id = @stack_id
@template.declare do
CloudWatch_Alarm("#{alarm.resource_hash}#{alarm.group}#{alarm.name.gsub(/[^0-9a-zA-Z]/i, '')}#{alarm.type}"[0..255]) do
ActionsEnabled true
AlarmDescription "Guardian alarm #{alarm.name} for the resource #{alarm.resource_id} in alarm group #{alarm.group}"
AlarmName CfnGuardian::CloudWatch.get_alarm_name(alarm) + "-stk#{stack_id}"
ComparisonOperator alarm.comparison_operator
Dimensions alarm.dimensions.map {|k,v| {Name: k, Value: v}} unless alarm.dimensions.nil?
EvaluationPeriods alarm.evaluation_periods
Statistic alarm.statistic if alarm.extended_statistic.nil?
Period alarm.period
Threshold alarm.threshold
MetricName alarm.metric_name
Namespace alarm.namespace
AlarmActions actions
OKActions actions
TreatMissingData alarm.treat_missing_data unless alarm.treat_missing_data.nil?
DatapointsToAlarm alarm.datapoints_to_alarm unless alarm.datapoints_to_alarm.nil?
ExtendedStatistic alarm.extended_statistic unless alarm.extended_statistic.nil?
EvaluateLowSampleCountPercentile alarm.evaluate_low_sample_count_percentile unless alarm.evaluate_low_sample_count_percentile.nil?
Unit alarm.unit unless alarm.unit.nil?
end
end
end
def add_event(event)
@template.declare do
Events_Rule("#{event.group}#{event.type}#{event.hash}"[0..255]) do
State 'ENABLED'
Description "Guardian scheduled #{event.group} #{event.type}"
ScheduleExpression "cron(#{event.cron})"
Targets([
{
Arn: Ref(event.target),
Id: event.hash,
Input: FnSub(event.payload)
}
])
end
end
end
def add_composite_alarm(alarm)
stack_id = @stack_id
@template.declare do
CloudWatch_CompositeAlarm(alarm.name.gsub(/[^0-9a-zA-Z]/i, '')) do
AlarmDescription alarm.description
AlarmName "guardian-#{alarm.name}-stk#{stack_id}"
AlarmRule alarm.rule
unless alarm.alarm_action.nil?
ActionsEnabled true
AlarmActions [Ref(alarm.alarm_action)]
# InsufficientDataActions [Ref(alarm.alarm_action)]
# OKActions [Ref(alarm.alarm_action)]
end
end
end
end
def add_metric_filter(filter)
@template.declare do
Logs_MetricFilter("#{filter.name.gsub(/[^0-9a-zA-Z]/i, '')}#{filter.type}") do
LogGroupName filter.log_group
FilterPattern filter.pattern
MetricTransformations([
{
MetricValue: filter.metric_value,
MetricName: filter.metric_name,
MetricNamespace: filter.metric_namespace
}
])
end
end
end
def add_event_subscription(subscription)
event_pattern = {}
event_pattern['detail-type'] = [subscription.detail_type]
event_pattern['source'] = [subscription.source]
event_pattern['resources'] = [subscription.resource_arn] unless subscription.resource_arn.empty?
event_pattern['detail'] = subscription.detail unless subscription.detail.empty?
@template.declare do
Events_Rule("#{subscription.group}#{subscription.name}#{subscription.hash}"[0..255]) do
State subscription.enabled ? 'ENABLED' : 'DISABLED'
Description "Guardian event subscription #{subscription.group} #{subscription.name} for resource #{subscription.resource_id}"
EventPattern FnSub(event_pattern.to_json)
Targets [
{
Arn: Ref(subscription.topic),
Id: "#{subscription.topic}Notifier"
}
]
end
end
end
end
end
end
| 36.651007 | 142 | 0.598425 |
61c91694a3670570b8a1d91397365ffbf32561d7 | 2,708 | require 'renegade/branch_name'
require 'net/https'
require 'uri'
require 'nokogiri'
module Xambassador
# Update manifest after merge complete
class UpdateTitle
def initialize(connection, pull_request, action)
@connection = connection
@pull_request = pull_request
# only update newly opened requests
run(pull_request) if action == 'opened' || action == 'reopened'
end
def run(pull_request)
data = Renegade::BranchName.extract_id(pull_request['head']['ref'])
if data['type'] == 'story'
fetch_story_data(story_url(data['id']))
elsif data['type'] == 'bug'
fetch_bug_data(bug_url(data['id']))
end
end
def bug_url(bug_id)
ENV['BUGZILLA_URL'] + "/bug/#{bug_id}?"\
"Bugzilla_api_key=#{ENV['BUGZILLA_API_KEY']}"
end
def story_url(story_id)
ENV['VERSION_ONE_URL'] + "/Data/Story"\
"?sel=Number,Estimate,Name&where=Number='B-#{story_id}'"
end
def fetch_story_data(url)
uri = URI.parse(url)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
request = Net::HTTP::Get.new(url)
request['Authorization'] = "Bearer #{ENV['VERSION_ONE_TOKEN']}"
response = http.request(request)
title = extract_story_title(response.body)
update_title(title)
end
def update_title(title)
number = @pull_request['number']
repo = @pull_request['head']['repo']['full_name']
@connection.client.update_pull_request(repo, number, title: title)
end
def extract_story_title(xml)
doc = Nokogiri::XML(xml)
if doc.at_xpath('//Assets')['total'] == '1'
story_title(doc)
else
'INVALID STORY ID'
end
end
def estimate_format(estimate)
", #{estimate} pts" unless estimate.nil?
end
def story_title(xml_doc)
story_name = xml_doc.css('Attribute[name=Name]').text
estimate = estimate_format(xml_doc.css('Attribute[name=Estimate]').text)
number = xml_doc.css('Attribute[name=Number]').text
"(Story: #{number}) #{story_name}#{estimate}"
end
def fetch_bug_data(url)
uri = URI.parse(url)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = false
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
request = Net::HTTP::Get.new(url)
response = http.request(request)
update_title(bug_title(JSON.parse(response.body)))
end
def bug_title(json)
bug = json['bugs'][0]
severity = bug['severity']
severity = " [#{severity}]" unless severity == ''
"(Bug: #{bug['id']}) #{bug['summary']}#{severity}"
end
end
end
| 26.811881 | 78 | 0.627031 |
28c249253f90bab5e25325a614dca3f3b998b13d | 2,500 | require 'spec_helper'
describe Automata::DFA do
context "Initializing from a valid file" do
before do
@dfa = Automata::DFA.new(file: 'examples/dfa_sample.yml')
end
it "should be valid" do
@dfa.should be_valid
end
it "should accept '00'" do
@dfa.accepts?('00').should == true
end
it "should accept '001101'" do
@dfa.accepts?('001101').should == true
end
it "should not accept the empty string" do
@dfa.accepts?('').should == false
end
it "should not accept '0'" do
@dfa.accepts?('0').should == false
end
it "should not accept '1'" do
@dfa.accepts?('1').should == false
end
it "should not accept '01'" do
@dfa.accepts?('01').should == false
end
it "should not accept '100'" do
@dfa.accepts?('100').should == false
end
end
context "Initializing an empty DFA" do
before do
@dfa = Automata::DFA.new
end
it "should be created successfully" do
@dfa.should be_an_instance_of Automata::DFA
end
end
context "Initializing a DFA by params" do
before do
states = %w( A B C D )
alphabet = %w( 0 1 )
start = 'A'
accept = %w( C )
transitions = {
'A' => {
'0' => 'B',
'1' => 'D'
},
'B' => {
'0' => 'C',
'1' => 'D'
},
'C' => {
'0' => 'C',
'1' => 'C'
},
'D' => {
'0' => 'D',
'1' => 'D'
}
}
params = {
states: states,
alphabet: alphabet,
start: start,
accept: accept,
transitions: transitions
}
@dfa = Automata::DFA.new(params)
end
it "should create a valid DFA" do
@dfa.should be_valid
end
it "should accept '00'" do
@dfa.accepts?('00').should == true
end
it "should accept '001101'" do
@dfa.accepts?('001101').should == true
end
it "should not accept the empty string" do
@dfa.accepts?('').should == false
end
it "should not accept '0'" do
@dfa.accepts?('0').should == false
end
it "should not accept '1'" do
@dfa.accepts?('1').should == false
end
it "should not accept '01'" do
@dfa.accepts?('01').should == false
end
it "should not accept '100'" do
@dfa.accepts?('100').should == false
end
end
end | 21.008403 | 63 | 0.4948 |
623478d094c1b24ef45f3af16723174c78018d0a | 355 | class Supplier < ActiveRecord::Base
default_scope { order('supplier_name') }
has_many :products, :dependent => :nullify
store :address, accessors: [ :name , :street , :city , :country , :phone ] #, coder: JSON
validates :supplier_name, :presence => true
def whole_address
[ name , street , city , country , phone ].join(" ")
end
end
| 22.1875 | 91 | 0.659155 |
79284cf18443f75a3ebf31a8c88070739bac49f4 | 947 | # OK, that’s it for this lesson!
#
# ## Quick Review
#
# * We parsed an Avro schema, prepped some hashes to be compatible with that
# schema, serialized those hashes with that schema, and then produced those
# serialized hashes to a Kafka topic. Pretty rad!
#
# ## Links and Further Reading
#
# * How I Learned to Stop Worrying and Love the Schema:
# https://www.confluent.io/blog/how-i-learned-to-stop-worrying-and-love-the-schema-part-1/
#
# * Avro:
# http://avro.apache.org/
#
# * Avro docs:
# http://avro.apache.org/docs/current/
#
# * Martin Kleppmann explains and compares how Avro, Protocol Buffers, and
# Thrift approach schema evolution in “Schema evolution in Avro, Protocol
# Buffers and Thrift”. Schema evolution is a fairly advanced topic, but
# Kleppmann’s introduction is broadly helpful, so it’s recommended:
# https://martin.kleppmann.com/2012/12/05/schema-evolution-in-avro-protocol-buffers-thrift.html
#
| 36.423077 | 98 | 0.724393 |
1c0bd7c8fd52b50519c1160bf1530fe95248587e | 60 | class Mascot < ActiveRecord::Base
belongs_to :company
end
| 15 | 33 | 0.783333 |
1a1ff63d215b631b7046a3f092cee0a2dcf20968 | 3,247 | require 'tempfile'
require 'shellwords'
module Reviewit
class Action
def initialize(app, api)
@api = api
@linter = app.linter
inject_default_params(app) if app.action_name != 'config'
@options = self.class.parse_options
end
protected
NO_COLOR = "\033[0m"
RED = "\033[0;31m"
GREEN = "\033[0;32m"
WHITE = "\033[1;37m"
CYAN = "\033[0;36m"
MR_STAMP = 'Reviewit-MR-id:'
MR_STAMP_REGEX = /^#{MR_STAMP} (?<id>\d+)$/
attr_reader :api
attr_reader :options
attr_reader :linter
def commit_message
@commit_message ||= `git show -s --format="%B"`.strip
end
def mr_id_from_head
match = MR_STAMP_REGEX.match(commit_message)
match[:id] if match
end
def read_user_single_line_message(prompt)
print prompt
STDIN.readline.strip
end
def read_user_message
return @options[:message] if @options[:message_given]
editor = (ENV['EDITOR'] or ENV['VISUAL'] or 'nano')
message_file = Tempfile.new 'reviewit'
message_file.puts '# Write something about your changes.'
message_file.flush
res = system("#{editor} #{message_file.path}")
raise 'Can\'t open an editor, set eht EDITOR or VISUAL environment variables. Or just install nano :-)' if res.nil?
comments = File.read message_file.path
comments = comments.lines.select { |line| line =~ /^[^#]/ }
comments.join.strip
end
def check_dirty_working_copy!
git_status = `git status --porcelain`
return if git_status.empty?
if @options[:'allow-dirty']
puts "#{RED}Your workingcopy is dirty! The following files wont be sent in this merge request:#{NO_COLOR}\n\n"
git_status.lines.each do |line|
puts " #{RED}#{line.split(' ', 2)[1].strip}#{NO_COLOR}"
end
puts
else
raise 'Your working copy is dirty, use git stash and try again.'
end
end
def self.parse_options
Optimist.options
end
def root_dir
@root_dir ||= `git rev-parse --show-toplevel`.strip
end
def copy_to_clipboard(text)
text = Shellwords.escape(text)
case RUBY_PLATFORM
when /linux/
copy_to_clipboard_linux(text)
when /darwin/
copy_to_clipboard_mac(text)
end
rescue StandardError
false
end
def copy_ruby_platform_linux(text)
IO.popen('xclip -selection clipboard', 'w') { |f| f << text }
end
def copy_ruby_platform_mac(text)
IO.popen('pbcopy', 'w') { |f| f << text }
end
private
def inject_default_params(app)
return if ARGV.include?('--help') || ARGV.include?('-h')
raw_default_params = app.git_config("reviewit.config#{app.action_name}")
return if raw_default_params.empty?
default_params = JSON.parse(raw_default_params)
puts "#{RED}Buggy default params! Ignoring them.#{NO_COLOR}" unless default_params.is_a?(Array)
puts "Using custom default params: #{GREEN}#{default_params.join(' ')}#{NO_COLOR}"
default_params.each { |param| ARGV << param }
ARGV.uniq!
rescue JSON::ParserError
raise 'JSON load error while loading default params for this action.'
end
end
end
| 26.614754 | 121 | 0.635664 |
794e64d752e76940000e60c48e550a4f9780397e | 632 | name 'hadoop'
maintainer 'Cask Data, Inc.'
maintainer_email '[email protected]'
license 'Apache 2.0'
description 'Installs/Configures Hadoop (HDFS/YARN/MRv2), HBase, Hive, Flume, Oozie, Pig, and ZooKeeper'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.8.2'
depends 'yum', '>= 3.0'
depends 'apt'
recommends 'java', '~> 1.21'
%w(amazon centos debian redhat scientific ubuntu).each do |os|
supports os
end
%w(hadoop_cluster hadoop_cluster_rpm hadoop_for_hbase hbase hbase_cluster hive pig spark zookeeper zookeeper_cluster).each do |cb|
conflicts cb
end
| 30.095238 | 130 | 0.707278 |
0833a59b7992428ee523a0671e3648d608a64e8a | 217 | class DropViewsTable < ActiveRecord::Migration[5.1]
def up
drop_table :views
end
def down
create_table :views do |t|
t.references :article, foreign_key: true
t.timestamps
end
end
end
| 15.5 | 51 | 0.668203 |
33b03cb6afa126a15709b9670acd356d2c3f33b7 | 10,574 | #
# a language data file for Ruby/CLDR
#
# Generated by: CLDR::Generator
#
# CLDR version: 1.3
#
# Original file name: common/main/ar_DZ.xml
# Original file revision: 1.28 $
#
# Copyright (C) 2006 Masao Mutoh
#
# This file is distributed under the same license as the Ruby/CLDR.
#
private
def init_data
@hourformat = "+HH:mm;-HH:mm"
@hoursformat = "{0}/{1}"
@regionformat = "{0}"
@fallbackformat = "{0} ({1})"
@abbreviationfallback = "standard"
@preferenceordering = ""
@singlecountries = "Africa/Bamako America/Godthab America/Santiago America/Guayaquil Asia/Shanghai Asia/Tashkent Asia/Kuala_Lumpur Europe/Madrid Europe/Lisbon Europe/London Pacific/Auckland Pacific/Tahiti"
@exemplarcities = {}
@exemplarcities["Africa/Bamako"] = "باماكو"
@exemplarcities["Africa/Ceuta"] = "سيتا"
@exemplarcities["Africa/Kinshasa"] = "كينشاسا"
@exemplarcities["Africa/Lubumbashi"] = "لومبباشا"
@exemplarcities["Africa/Timbuktu"] = "تيمبيوكتو"
@exemplarcities["America/Adak"] = "أداك"
@exemplarcities["America/Anchorage"] = "أنشوراج"
@exemplarcities["America/Araguaina"] = "أروجوانيا"
@exemplarcities["America/Argentina/ComodRivadavia"] = "كومودريفادافيا"
@exemplarcities["America/Argentina/La_Rioja"] = "لا ريوجا"
@exemplarcities["America/Argentina/Rio_Gallegos"] = "ريو جالييوس"
@exemplarcities["America/Argentina/San_Juan"] = "سان جوان"
@exemplarcities["America/Argentina/Tucuman"] = "تاكمان"
@exemplarcities["America/Argentina/Ushuaia"] = "أشوا"
@exemplarcities["America/Bahia"] = "باهيا"
@exemplarcities["America/Belem"] = "بلم"
@exemplarcities["America/Boa_Vista"] = "باو فيستا"
@exemplarcities["America/Boise"] = "بويس"
@exemplarcities["America/Buenos_Aires"] = "بوينوس أيرس"
@exemplarcities["America/Cambridge_Bay"] = "كامبرديج باي"
@exemplarcities["America/Campo_Grande"] = "كومبو جراند"
@exemplarcities["America/Cancun"] = "كانكن"
@exemplarcities["America/Catamarca"] = "كاتاماركا"
@exemplarcities["America/Chicago"] = "شيكاجو"
@exemplarcities["America/Chihuahua"] = "تشيواوا"
@exemplarcities["America/Cordoba"] = "كوردوبا"
@exemplarcities["America/Cuiaba"] = "كيابا"
@exemplarcities["America/Danmarkshavn"] = "دانمرك شافن"
@exemplarcities["America/Dawson"] = "داوسان"
@exemplarcities["America/Dawson_Creek"] = "داوسن كريك"
@exemplarcities["America/Denver"] = "دنفر"
@exemplarcities["America/Detroit"] = "ديترويت"
@exemplarcities["America/Edmonton"] = "ايدمونتون"
@exemplarcities["America/Eirunepe"] = "ايرونبي"
@exemplarcities["America/Fortaleza"] = "فورتاليزا"
@exemplarcities["America/Glace_Bay"] = "جلاس باي"
@exemplarcities["America/Godthab"] = "جودثاب"
@exemplarcities["America/Goose_Bay"] = "جوس باي"
@exemplarcities["America/Guayaquil"] = "جواياكيل"
@exemplarcities["America/Halifax"] = "هاليفاكس"
@exemplarcities["America/Hermosillo"] = "هيرموسيلو"
@exemplarcities["America/Indiana/Knox"] = "كونكس"
@exemplarcities["America/Indiana/Marengo"] = "مارنجو"
@exemplarcities["America/Indiana/Vevay"] = "فيفاي"
@exemplarcities["America/Indianapolis"] = "انديانانابوليس"
@exemplarcities["America/Inuvik"] = "اينوفيك"
@exemplarcities["America/Iqaluit"] = "اكويلت"
@exemplarcities["America/Jujuy"] = "جوجو"
@exemplarcities["America/Juneau"] = "جوني"
@exemplarcities["America/Kentucky/Monticello"] = "مونتيسيلو"
@exemplarcities["America/Los_Angeles"] = "لوس انجلوس"
@exemplarcities["America/Louisville"] = "لويس فيل"
@exemplarcities["America/Maceio"] = "ماشيو"
@exemplarcities["America/Manaus"] = "ماناوس"
@exemplarcities["America/Mazatlan"] = "مازاتلان"
@exemplarcities["America/Mendoza"] = "ميندوزا"
@exemplarcities["America/Menominee"] = "مينوميني"
@exemplarcities["America/Merida"] = "ميريدا"
@exemplarcities["America/Mexico_City"] = "مدينة المكسيك"
@exemplarcities["America/Monterrey"] = "مونتيري"
@exemplarcities["America/Montreal"] = "مونتريال"
@exemplarcities["America/New_York"] = "نيويورك"
@exemplarcities["America/Nipigon"] = "نيبيجون"
@exemplarcities["America/Nome"] = "نوم"
@exemplarcities["America/Noronha"] = "نوروناه"
@exemplarcities["America/North_Dakota/Center"] = "سنتر"
@exemplarcities["America/Pangnirtung"] = "بانجينتينج"
@exemplarcities["America/Phoenix"] = "فونكس"
@exemplarcities["America/Porto_Velho"] = "بورتو فيلو"
@exemplarcities["America/Rainy_River"] = "راني ريفر"
@exemplarcities["America/Rankin_Inlet"] = "رانكن انلت"
@exemplarcities["America/Recife"] = "ريسيف"
@exemplarcities["America/Regina"] = "ريجينا"
@exemplarcities["America/Rio_Branco"] = "ريوبرانكو"
@exemplarcities["America/Santiago"] = "سانتيجو"
@exemplarcities["America/Sao_Paulo"] = "ساو باولو"
@exemplarcities["America/Scoresbysund"] = "سكورسبيسند"
@exemplarcities["America/Shiprock"] = "شيبروك"
@exemplarcities["America/St_Johns"] = "عيد سانت جونس"
@exemplarcities["America/Swift_Current"] = "سوفت كارنت"
@exemplarcities["America/Thule"] = "ثيل"
@exemplarcities["America/Thunder_Bay"] = "ثندر باي"
@exemplarcities["America/Tijuana"] = "تيجوانا"
@exemplarcities["America/Toronto"] = "تورونتو"
@exemplarcities["America/Vancouver"] = "فانكوفر"
@exemplarcities["America/Whitehorse"] = "هوايت هورس"
@exemplarcities["America/Winnipeg"] = "وينيبيج"
@exemplarcities["America/Yakutat"] = "ياكوتات"
@exemplarcities["America/Yellowknife"] = "يلونيف"
@exemplarcities["Antarctica/Casey"] = "كاساي"
@exemplarcities["Antarctica/Davis"] = "دافيز"
@exemplarcities["Antarctica/DumontDUrville"] = "دي مونت دو روفيل"
@exemplarcities["Antarctica/Mawson"] = "ماوسون"
@exemplarcities["Antarctica/McMurdo"] = "ماك موردو"
@exemplarcities["Antarctica/Palmer"] = "بالميرا"
@exemplarcities["Antarctica/Rothera"] = "روثيرا"
@exemplarcities["Antarctica/South_Pole"] = "القطب الجنوبي"
@exemplarcities["Antarctica/Syowa"] = "سايووا"
@exemplarcities["Antarctica/Vostok"] = "فوستوك"
@exemplarcities["Arctic/Longyearbyen"] = "لونجيربن"
@exemplarcities["Asia/Almaty"] = "ألماتي"
@exemplarcities["Asia/Anadyr"] = "أندير"
@exemplarcities["Asia/Aqtau"] = "أكتاو"
@exemplarcities["Asia/Aqtobe"] = "أكتوب"
@exemplarcities["Asia/Choibalsan"] = "تشوبالسان"
@exemplarcities["Asia/Chongqing"] = "تشونجكينج"
@exemplarcities["Asia/Harbin"] = "هاربين"
@exemplarcities["Asia/Hovd"] = "هوفد"
@exemplarcities["Asia/Irkutsk"] = "ايركيتسك"
@exemplarcities["Asia/Jakarta"] = "جاكرتا"
@exemplarcities["Asia/Jayapura"] = "جايابيورا"
@exemplarcities["Asia/Kamchatka"] = "كامتشاتكا"
@exemplarcities["Asia/Kashgar"] = "كاشجار"
@exemplarcities["Asia/Krasnoyarsk"] = "كراسنويارسك"
@exemplarcities["Asia/Kuala_Lumpur"] = "كوالالمبور"
@exemplarcities["Asia/Kuching"] = "كيشينج"
@exemplarcities["Asia/Magadan"] = "مجادن"
@exemplarcities["Asia/Makassar"] = "ماكسار"
@exemplarcities["Asia/Novosibirsk"] = "نوفوسبيرسك"
@exemplarcities["Asia/Omsk"] = "أومسك"
@exemplarcities["Asia/Oral"] = "أورال"
@exemplarcities["Asia/Pontianak"] = "بونتيانك"
@exemplarcities["Asia/Qyzylorda"] = "كيزيلوردا"
@exemplarcities["Asia/Sakhalin"] = "سكالين"
@exemplarcities["Asia/Samarkand"] = "سمرقند"
@exemplarcities["Asia/Shanghai"] = "العالمية"
@exemplarcities["Asia/Tashkent"] = "طشقند"
@exemplarcities["Asia/Ulaanbaatar"] = "آلانباتار"
@exemplarcities["Asia/Urumqi"] = "أرومكي"
@exemplarcities["Asia/Vladivostok"] = "فلاديفوستك"
@exemplarcities["Asia/Yakutsk"] = "ياكتسك"
@exemplarcities["Asia/Yekaterinburg"] = "يكاترنبيرج"
@exemplarcities["Atlantic/Azores"] = "أزورس"
@exemplarcities["Atlantic/Canary"] = "كناري"
@exemplarcities["Atlantic/Jan_Mayen"] = "جان ماين"
@exemplarcities["Atlantic/Madeira"] = "ماديرا"
@exemplarcities["Australia/Adelaide"] = "أدليادا"
@exemplarcities["Australia/Brisbane"] = "برسيبان"
@exemplarcities["Australia/Broken_Hill"] = "بروكن هيل"
@exemplarcities["Australia/Darwin"] = "دارون"
@exemplarcities["Australia/Hobart"] = "هوبارت"
@exemplarcities["Australia/Lindeman"] = "ليندمان"
@exemplarcities["Australia/Lord_Howe"] = "لورد هاو"
@exemplarcities["Australia/Melbourne"] = "ميلبورن"
@exemplarcities["Australia/Perth"] = "برثا"
@exemplarcities["Australia/Sydney"] = "سيدني"
@exemplarcities["Europe/Belfast"] = "بلفاست"
@exemplarcities["Europe/Kaliningrad"] = "كالينجراد"
@exemplarcities["Europe/Kiev"] = "كيف"
@exemplarcities["Europe/Lisbon"] = "ليسبون"
@exemplarcities["Europe/London"] = "لندن"
@exemplarcities["Europe/Madrid"] = "مدريد"
@exemplarcities["Europe/Moscow"] = "موسكو"
@exemplarcities["Europe/Samara"] = "سمراء"
@exemplarcities["Europe/Simferopol"] = "سيمفروبول"
@exemplarcities["Europe/Uzhgorod"] = "أوزجرود"
@exemplarcities["Europe/Zaporozhye"] = "زابوروزي"
@exemplarcities["Pacific/Auckland"] = "أوكلاند"
@exemplarcities["Pacific/Chatham"] = "تشاثام"
@exemplarcities["Pacific/Easter"] = "استر"
@exemplarcities["Pacific/Enderbury"] = "اندربيرج"
@exemplarcities["Pacific/Galapagos"] = "جلاباجوس"
@exemplarcities["Pacific/Gambier"] = "جامبير"
@exemplarcities["Pacific/Honolulu"] = "هونولولو"
@exemplarcities["Pacific/Johnston"] = "جونستون"
@exemplarcities["Pacific/Kiritimati"] = "كيريتي ماتي"
@exemplarcities["Pacific/Kosrae"] = "كوسرا"
@exemplarcities["Pacific/Kwajalein"] = "كواجالين"
@exemplarcities["Pacific/Majuro"] = "ماجورو"
@exemplarcities["Pacific/Marquesas"] = "ماركيساس"
@exemplarcities["Pacific/Midway"] = "ميدواي"
@exemplarcities["Pacific/Ponape"] = "باناب"
@exemplarcities["Pacific/Tahiti"] = "تاهيتي"
@exemplarcities["Pacific/Tarawa"] = "تاراوا"
@exemplarcities["Pacific/Truk"] = "ترك"
@exemplarcities["Pacific/Wake"] = "واك"
@exemplarcities["Pacific/Yap"] = "ياب"
@long_generics = {}
@long_standards = {}
@long_daylights = {}
@short_generics = {}
@short_standards = {}
@short_daylights = {}
end
public
attr_reader :hourformat
attr_reader :hoursformat
attr_reader :regionformat
attr_reader :fallbackformat
attr_reader :abbreviationfallback
attr_reader :preferenceordering
attr_reader :singlecountries
attr_reader :exemplarcities
attr_reader :long_generics
attr_reader :long_standards
attr_reader :long_daylights
attr_reader :short_generics
attr_reader :short_standards
attr_reader :short_daylights
| 46.787611 | 217 | 0.693021 |
bb59d1042665ad3593e73b312c89d40f1f79ce0f | 2,299 | require File.dirname(__FILE__) + '/spec_helper'
describe "generating a random base32 secret" do
it "should be 16 characters by default" do
ROTP::Base32.random_base32.length.should == 16
ROTP::Base32.random_base32.should match /[a-z2-7].+/
end
it "should be allow a specific length" do
ROTP::Base32.random_base32(32).length.should == 32
end
end
describe "HOTP example values from the rfc" do
it "should match the RFC" do
# 12345678901234567890 in Bas32
# GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ
hotp = ROTP::HOTP.new("GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ")
hotp.at(0).should ==(755224)
hotp.at(1).should ==(287082)
hotp.at(2).should ==(359152)
hotp.at(3).should ==(969429)
hotp.at(4).should ==(338314)
hotp.at(5).should ==(254676)
hotp.at(6).should ==(287922)
hotp.at(7).should ==(162583)
hotp.at(8).should ==(399871)
hotp.at(9).should ==(520489)
end
it "should verify an OTP and now allow reuse" do
hotp = ROTP::HOTP.new("GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ")
hotp.verify(520489, 9).should be_true
hotp.verify(520489, 10).should be_false
end
it "should output its provisioning URI" do
hotp = ROTP::HOTP.new("wrn3pqx5uqxqvnqr")
hotp.provisioning_uri('mark@percival').should == "otpauth://hotp/mark@percival?secret=wrn3pqx5uqxqvnqr&counter=0"
end
end
describe "TOTP example values from the rfc" do
it "should match the RFC" do
totp = ROTP::TOTP.new("GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ")
totp.at(1111111111).should ==(50471)
totp.at(1234567890).should ==(5924)
totp.at(2000000000).should ==(279037)
end
it "should match the Google Authenticator output" do
totp = ROTP::TOTP.new("wrn3pqx5uqxqvnqr")
Timecop.freeze(Time.at(1297553958)) do
totp.now.should ==(102705)
end
end
it "should validate a time based OTP" do
totp = ROTP::TOTP.new("wrn3pqx5uqxqvnqr")
Timecop.freeze(Time.at(1297553958)) do
totp.verify(102705).should be_true
end
Timecop.freeze(Time.at(1297553958 + 30)) do
totp.verify(102705).should be_false
end
end
it "should output its provisioning URI" do
totp = ROTP::TOTP.new("wrn3pqx5uqxqvnqr")
totp.provisioning_uri('mark@percival').should == "otpauth://totp/mark@percival?secret=wrn3pqx5uqxqvnqr"
end
end
| 32.842857 | 117 | 0.69552 |
918bc45df9b52d5828bc5c6d39e255cbfe4c2cda | 993 | class Qd < Formula
desc "C++/Fortran-90 double-double and quad-double package"
homepage "https://crd.lbl.gov/~dhbailey/mpdist/"
url "https://crd.lbl.gov/~dhbailey/mpdist/qd-2.3.17.tar.gz"
sha256 "c58e276f6fcf5f2f442c525f3de42ea00004734572b29c74028bbda0ad81096d"
bottle do
cellar :any
sha256 "02f2e11cae957f20fee46218a559368ac2c44cfdf6edd042c7430c3f5e3c5227" => :el_capitan
sha256 "bd53e8612f09d48ffcfd9d981717e94ae5c617c08c0e2b0e8250ea085a75dd57" => :yosemite
sha256 "35c7acae6a87c02301cde8c5d76b59bb696d9c3dd04970948c5fdbe3c1c6776e" => :mavericks
end
depends_on fortran: :recommended
def install
args = ["--disable-dependency-tracking", "--enable-shared", "--prefix=#{prefix}"]
args << "--enable-fortran=no" if build.without? :fortran
system "./configure", *args
system "make"
system "make", "check"
system "make", "install"
end
test do
assert_match version.to_s, shell_output("#{bin}/qd-config --configure-args")
end
end
| 34.241379 | 92 | 0.736153 |
d5c96fb8ca17396eb10199303a0e0d46faebff14 | 115 | class HomeController < ApplicationController
before_action :authenticate_user!
def index
render
end
end
| 14.375 | 44 | 0.782609 |
f8c29ab1c017586510dfb1459f33a3c9bb1195a5 | 770 | require "rails_helper"
describe SendVaccinationCenterConfirmationEmailJob do
let!(:partner) { create(:partner) }
let!(:vaccination_center) { create(:vaccination_center, lat: 42, lon: 2, partner: partner) }
subject { SendVaccinationCenterConfirmationEmailJob.new.perform(vaccination_center.id) }
context "vaccination center is confirmed by a volunteer" do
it "sends the email" do
mail = double(:mail)
allow(VaccinationCenterMailer).to receive_message_chain("with.confirmed_vaccination_center_onboarding").and_return(mail)
expect(mail).to receive(:deliver_now)
subject
end
it "set confirmation_mail_sent_at" do
subject
expect(vaccination_center.reload.confirmation_mail_sent_at).to_not be(nil)
end
end
end
| 33.478261 | 126 | 0.754545 |
b9c8d61ac31441c7d05b552460b7b6a538392a03 | 254 | module ApplicationHelper
def full_title(page_title = '')
base_title = "Ruby on Rails Tutorial Sample App"
if page_title.empty?
base_title
else
page_title + " | " + base_title
end
end
end
| 21.166667 | 56 | 0.566929 |
e96b850747c202bea8bf5f7ef5cb4a9c189954ee | 1,478 | # frozen_string_literal: true
module RatingChgkV2
module Request
include RatingChgkV2::Connection
include RatingChgkV2::JsonHandler
def get(path, client, params = {})
respond_with(
connection(client).get(prepare(path), params),
client
)
end
def post(path, client, params = {})
respond_with(
connection(client).post(prepare(path), custom_dump(params)),
client
)
end
def put(path, client, params = {})
respond_with(
connection(client).put(prepare(path), custom_dump(params)),
client
)
end
def delete(path, client, _params = {})
respond_with(
# Rubocop tries to replace `delete` with `gsub` but that's a different method here!
connection(client).delete(prepare(path)),
client
)
end
private
# Get rid of double slashes in the `path`, leading and trailing slash
def prepare(path)
path.delete_prefix('/').gsub(%r{//}, '/').gsub(%r{/+\z}, '')
end
def respond_with(response, _client)
body = response.body.empty? ? response.body : custom_load(response.body)
status = response.status
respond_with_error status, body if status.between?(400, 599)
body
end
def respond_with_error(code, body)
raise(RatingChgkV2::Error, body) unless RatingChgkV2::Error::ERRORS.key? code
raise RatingChgkV2::Error::ERRORS[code].from_response(body)
end
end
end
| 25.050847 | 91 | 0.632612 |
1ca5d9f950aa0671388dd62ddcf8952b1d6c2417 | 849 | module TD::Types
# Contains information about the total amount of data that was used to send and receive files.
#
# @attr file_type [TD::Types::FileType, nil] Type of the file the data is part of; pass null if the data isn't
# related to files.
# @attr network_type [TD::Types::NetworkType] Type of the network the data was sent through.
# Call setNetworkType to maintain the actual network type.
# @attr sent_bytes [Integer] Total number of bytes sent.
# @attr received_bytes [Integer] Total number of bytes received.
class NetworkStatisticsEntry::File < NetworkStatisticsEntry
attribute :file_type, TD::Types::FileType.optional.default(nil)
attribute :network_type, TD::Types::NetworkType
attribute :sent_bytes, TD::Types::Coercible::Integer
attribute :received_bytes, TD::Types::Coercible::Integer
end
end
| 49.941176 | 112 | 0.745583 |
9109eb72b99afd67b1951373e2ceff4fd19857ec | 158 | require 'bundler/setup'
require 'goroutine'
chan = Channel.new
go do
loop do
chan << :hello!
sleep 1
end
end
while chan.ready
p chan.pop
end
| 9.875 | 23 | 0.664557 |
1c97a5e914fdd4b5feaedca53f5f6d55548e8810 | 451 | Rails.application.routes.draw do
root'static_pages#home'
get '/help', to: 'static_pages#help'
get '/about', to: 'static_pages#about'
get '/contact', to: 'static_pages#contact'
get '/signup', to: 'users#new'
post '/signup', to: 'users#create'
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#destroy'
resources :users
resources :microposts, only: [:create, :destroy]
end
| 32.214286 | 51 | 0.667406 |
1a0732c14b7a65db979717e8b463ad17873ba595 | 59,506 | # frozen_string_literal: true
require "cases/helper"
require "models/author"
require "models/bird"
require "models/post"
require "models/comment"
require "models/company"
require "models/contract"
require "models/customer"
require "models/developer"
require "models/computer"
require "models/invoice"
require "models/line_item"
require "models/order"
require "models/parrot"
require "models/pirate"
require "models/project"
require "models/ship"
require "models/ship_part"
require "models/tag"
require "models/tagging"
require "models/treasure"
require "models/eye"
require "models/electron"
require "models/molecule"
require "models/member"
require "models/member_detail"
require "models/organization"
require "models/guitar"
require "models/tuning_peg"
require "models/reply"
class TestAutosaveAssociationsInGeneral < ActiveRecord::TestCase
def test_autosave_validation
person = Class.new(ActiveRecord::Base) {
self.table_name = "people"
validate :should_be_cool, on: :create
def self.name; "Person"; end
private
def should_be_cool
unless first_name == "cool"
errors.add :first_name, "not cool"
end
end
}
reference = Class.new(ActiveRecord::Base) {
self.table_name = "references"
def self.name; "Reference"; end
belongs_to :person, autosave: true, anonymous_class: person
}
u = person.create!(first_name: "cool")
u.update!(first_name: "nah") # still valid because validation only applies on 'create'
assert_predicate reference.create!(person: u), :persisted?
end
def test_should_not_add_the_same_callbacks_multiple_times_for_has_one
assert_no_difference_when_adding_callbacks_twice_for Pirate, :ship
end
def test_should_not_add_the_same_callbacks_multiple_times_for_belongs_to
assert_no_difference_when_adding_callbacks_twice_for Ship, :pirate
end
def test_should_not_add_the_same_callbacks_multiple_times_for_has_many
assert_no_difference_when_adding_callbacks_twice_for Pirate, :birds
end
def test_should_not_add_the_same_callbacks_multiple_times_for_has_and_belongs_to_many
assert_no_difference_when_adding_callbacks_twice_for Pirate, :parrots
end
def test_cyclic_autosaves_do_not_add_multiple_validations
ship = ShipWithoutNestedAttributes.new
ship.prisoners.build
assert_not_predicate ship, :valid?
assert_equal 1, ship.errors[:name].length
end
private
def assert_no_difference_when_adding_callbacks_twice_for(model, association_name)
reflection = model.reflect_on_association(association_name)
assert_no_difference "callbacks_for_model(#{model.name}).length" do
model.send(:add_autosave_association_callbacks, reflection)
end
end
def callbacks_for_model(model)
model.instance_variables.grep(/_callbacks$/).flat_map do |ivar|
model.instance_variable_get(ivar)
end
end
end
class TestDefaultAutosaveAssociationOnAHasOneAssociation < ActiveRecord::TestCase
fixtures :companies, :accounts
def test_should_save_parent_but_not_invalid_child
firm = Firm.new(name: "GlobalMegaCorp")
assert_predicate firm, :valid?
firm.build_account_using_primary_key
assert_not_predicate firm.build_account_using_primary_key, :valid?
assert firm.save
assert_not_predicate firm.account_using_primary_key, :persisted?
end
def test_save_fails_for_invalid_has_one
firm = Firm.first
assert_predicate firm, :valid?
firm.build_account
assert_not_predicate firm.account, :valid?
assert_not_predicate firm, :valid?
assert_not firm.save
assert_equal ["is invalid"], firm.errors["account"]
end
def test_save_succeeds_for_invalid_has_one_with_validate_false
firm = Firm.first
assert_predicate firm, :valid?
firm.build_unvalidated_account
assert_not_predicate firm.unvalidated_account, :valid?
assert_predicate firm, :valid?
assert firm.save
end
def test_build_before_child_saved
firm = Firm.find(1)
account = firm.build_account("credit_limit" => 1000)
assert_equal account, firm.account
assert_not_predicate account, :persisted?
assert firm.save
assert_equal account, firm.account
assert_predicate account, :persisted?
end
def test_build_before_either_saved
firm = Firm.new("name" => "GlobalMegaCorp")
firm.account = account = Account.new("credit_limit" => 1000)
assert_equal account, firm.account
assert_not_predicate account, :persisted?
assert firm.save
assert_equal account, firm.account
assert_predicate account, :persisted?
end
def test_assignment_before_parent_saved
firm = Firm.new("name" => "GlobalMegaCorp")
firm.account = a = Account.find(1)
assert_not_predicate firm, :persisted?
assert_equal a, firm.account
assert firm.save
assert_equal a, firm.account
firm.association(:account).reload
assert_equal a, firm.account
end
def test_assignment_before_either_saved
firm = Firm.new("name" => "GlobalMegaCorp")
firm.account = a = Account.new("credit_limit" => 1000)
assert_not_predicate firm, :persisted?
assert_not_predicate a, :persisted?
assert_equal a, firm.account
assert firm.save
assert_predicate firm, :persisted?
assert_predicate a, :persisted?
assert_equal a, firm.account
firm.association(:account).reload
assert_equal a, firm.account
end
def test_not_resaved_when_unchanged
firm = Firm.all.merge!(includes: :account).first
firm.name += "-changed"
assert_queries(1) { firm.save! }
firm = Firm.first
firm.account = Account.first
assert_queries(Firm.partial_writes? ? 0 : 1) { firm.save! }
firm = Firm.first.dup
firm.account = Account.first
assert_queries(2) { firm.save! }
firm = Firm.first.dup
firm.account = Account.first.dup
assert_queries(2) { firm.save! }
end
def test_callbacks_firing_order_on_create
eye = Eye.create(iris_attributes: { color: "honey" })
assert_equal [true, false], eye.after_create_callbacks_stack
end
def test_callbacks_firing_order_on_update
eye = Eye.create(iris_attributes: { color: "honey" })
eye.update(iris_attributes: { color: "green" })
assert_equal [true, false], eye.after_update_callbacks_stack
end
def test_callbacks_firing_order_on_save
eye = Eye.create(iris_attributes: { color: "honey" })
assert_equal [false, false], eye.after_save_callbacks_stack
eye.update(iris_attributes: { color: "blue" })
assert_equal [false, false, false, false], eye.after_save_callbacks_stack
end
end
class TestDefaultAutosaveAssociationOnABelongsToAssociation < ActiveRecord::TestCase
fixtures :companies, :posts, :tags, :taggings
def test_should_save_parent_but_not_invalid_child
client = Client.new(name: "Joe (the Plumber)")
assert_predicate client, :valid?
client.build_firm
assert_not_predicate client.firm, :valid?
assert client.save
assert_not_predicate client.firm, :persisted?
end
def test_save_fails_for_invalid_belongs_to
# Oracle saves empty string as NULL therefore :message changed to one space
assert log = AuditLog.create(developer_id: 0, message: " ")
log.developer = Developer.new
assert_not_predicate log.developer, :valid?
assert_not_predicate log, :valid?
assert_not log.save
assert_equal ["is invalid"], log.errors["developer"]
end
def test_save_succeeds_for_invalid_belongs_to_with_validate_false
# Oracle saves empty string as NULL therefore :message changed to one space
assert log = AuditLog.create(developer_id: 0, message: " ")
log.unvalidated_developer = Developer.new
assert_not_predicate log.unvalidated_developer, :valid?
assert_predicate log, :valid?
assert log.save
end
def test_assignment_before_parent_saved
client = Client.first
apple = Firm.new("name" => "Apple")
client.firm = apple
assert_equal apple, client.firm
assert_not_predicate apple, :persisted?
assert client.save
assert apple.save
assert_predicate apple, :persisted?
assert_equal apple, client.firm
client.association(:firm).reload
assert_equal apple, client.firm
end
def test_assignment_before_either_saved
final_cut = Client.new("name" => "Final Cut")
apple = Firm.new("name" => "Apple")
final_cut.firm = apple
assert_not_predicate final_cut, :persisted?
assert_not_predicate apple, :persisted?
assert final_cut.save
assert_predicate final_cut, :persisted?
assert_predicate apple, :persisted?
assert_equal apple, final_cut.firm
final_cut.association(:firm).reload
assert_equal apple, final_cut.firm
end
def test_store_two_association_with_one_save
num_orders = Order.count
num_customers = Customer.count
order = Order.new
customer1 = order.billing = Customer.new
customer2 = order.shipping = Customer.new
assert order.save
assert_equal customer1, order.billing
assert_equal customer2, order.shipping
order.reload
assert_equal customer1, order.billing
assert_equal customer2, order.shipping
assert_equal num_orders + 1, Order.count
assert_equal num_customers + 2, Customer.count
end
def test_store_association_in_two_relations_with_one_save
num_orders = Order.count
num_customers = Customer.count
order = Order.new
customer = order.billing = order.shipping = Customer.new
assert order.save
assert_equal customer, order.billing
assert_equal customer, order.shipping
order.reload
assert_equal customer, order.billing
assert_equal customer, order.shipping
assert_equal num_orders + 1, Order.count
assert_equal num_customers + 1, Customer.count
end
def test_store_association_in_two_relations_with_one_save_in_existing_object
num_orders = Order.count
num_customers = Customer.count
order = Order.create
customer = order.billing = order.shipping = Customer.new
assert order.save
assert_equal customer, order.billing
assert_equal customer, order.shipping
order.reload
assert_equal customer, order.billing
assert_equal customer, order.shipping
assert_equal num_orders + 1, Order.count
assert_equal num_customers + 1, Customer.count
end
def test_store_association_in_two_relations_with_one_save_in_existing_object_with_values
num_orders = Order.count
num_customers = Customer.count
order = Order.create
customer = order.billing = order.shipping = Customer.new
assert order.save
assert_equal customer, order.billing
assert_equal customer, order.shipping
order.reload
customer = order.billing = order.shipping = Customer.new
assert order.save
order.reload
assert_equal customer, order.billing
assert_equal customer, order.shipping
assert_equal num_orders + 1, Order.count
assert_equal num_customers + 2, Customer.count
end
def test_store_association_with_a_polymorphic_relationship
num_tagging = Tagging.count
tags(:misc).create_tagging(taggable: posts(:thinking))
assert_equal num_tagging + 1, Tagging.count
end
def test_build_and_then_save_parent_should_not_reload_target
client = Client.first
apple = client.build_firm(name: "Apple")
client.save!
assert_no_queries { assert_equal apple, client.firm }
end
def test_validation_does_not_validate_stale_association_target
valid_developer = Developer.create!(name: "Dude", salary: 50_000)
invalid_developer = Developer.new()
auditlog = AuditLog.new(message: "foo")
auditlog.developer = invalid_developer
auditlog.developer_id = valid_developer.id
assert_predicate auditlog, :valid?
end
end
class TestDefaultAutosaveAssociationOnAHasManyAssociationWithAcceptsNestedAttributes < ActiveRecord::TestCase
def test_invalid_adding_with_nested_attributes
molecule = Molecule.new
valid_electron = Electron.new(name: "electron")
invalid_electron = Electron.new
molecule.electrons = [valid_electron, invalid_electron]
molecule.save
assert_not_predicate invalid_electron, :valid?
assert_predicate valid_electron, :valid?
assert_not molecule.persisted?, "Molecule should not be persisted when its electrons are invalid"
end
def test_errors_should_be_indexed_when_passed_as_array
guitar = Guitar.new
tuning_peg_valid = TuningPeg.new
tuning_peg_valid.pitch = 440.0
tuning_peg_invalid = TuningPeg.new
guitar.tuning_pegs = [tuning_peg_valid, tuning_peg_invalid]
assert_not_predicate tuning_peg_invalid, :valid?
assert_predicate tuning_peg_valid, :valid?
assert_not_predicate guitar, :valid?
assert_equal ["is not a number"], guitar.errors["tuning_pegs[1].pitch"]
assert_not_equal ["is not a number"], guitar.errors["tuning_pegs.pitch"]
end
def test_errors_should_be_indexed_when_global_flag_is_set
old_attribute_config = ActiveRecord::Base.index_nested_attribute_errors
ActiveRecord::Base.index_nested_attribute_errors = true
molecule = Molecule.new
valid_electron = Electron.new(name: "electron")
invalid_electron = Electron.new
molecule.electrons = [valid_electron, invalid_electron]
assert_not_predicate invalid_electron, :valid?
assert_predicate valid_electron, :valid?
assert_not_predicate molecule, :valid?
assert_equal ["can't be blank"], molecule.errors["electrons[1].name"]
assert_not_equal ["can't be blank"], molecule.errors["electrons.name"]
ensure
ActiveRecord::Base.index_nested_attribute_errors = old_attribute_config
end
def test_errors_details_should_be_set
molecule = Molecule.new
valid_electron = Electron.new(name: "electron")
invalid_electron = Electron.new
molecule.electrons = [valid_electron, invalid_electron]
assert_not_predicate invalid_electron, :valid?
assert_predicate valid_electron, :valid?
assert_not_predicate molecule, :valid?
assert_equal [{ error: :blank }], molecule.errors.details[:"electrons.name"]
end
def test_errors_details_should_be_indexed_when_passed_as_array
guitar = Guitar.new
tuning_peg_valid = TuningPeg.new
tuning_peg_valid.pitch = 440.0
tuning_peg_invalid = TuningPeg.new
guitar.tuning_pegs = [tuning_peg_valid, tuning_peg_invalid]
assert_not_predicate tuning_peg_invalid, :valid?
assert_predicate tuning_peg_valid, :valid?
assert_not_predicate guitar, :valid?
assert_equal [{ error: :not_a_number, value: nil }], guitar.errors.details[:"tuning_pegs[1].pitch"]
assert_equal [], guitar.errors.details[:"tuning_pegs.pitch"]
end
def test_errors_details_should_be_indexed_when_global_flag_is_set
old_attribute_config = ActiveRecord::Base.index_nested_attribute_errors
ActiveRecord::Base.index_nested_attribute_errors = true
molecule = Molecule.new
valid_electron = Electron.new(name: "electron")
invalid_electron = Electron.new
molecule.electrons = [valid_electron, invalid_electron]
assert_not_predicate invalid_electron, :valid?
assert_predicate valid_electron, :valid?
assert_not_predicate molecule, :valid?
assert_equal [{ error: :blank }], molecule.errors.details[:"electrons[1].name"]
assert_equal [], molecule.errors.details[:"electrons.name"]
ensure
ActiveRecord::Base.index_nested_attribute_errors = old_attribute_config
end
def test_valid_adding_with_nested_attributes
molecule = Molecule.new
valid_electron = Electron.new(name: "electron")
molecule.electrons = [valid_electron]
molecule.save
assert_predicate valid_electron, :valid?
assert_predicate molecule, :persisted?
assert_equal 1, molecule.electrons.count
end
end
class TestDefaultAutosaveAssociationOnAHasManyAssociation < ActiveRecord::TestCase
fixtures :companies, :developers
def test_invalid_adding
firm = Firm.find(1)
assert_not (firm.clients_of_firm << c = Client.new)
assert_not_predicate c, :persisted?
assert_not_predicate firm, :valid?
assert_not firm.save
assert_not_predicate c, :persisted?
end
def test_invalid_adding_before_save
new_firm = Firm.new("name" => "A New Firm, Inc")
new_firm.clients_of_firm.concat([c = Client.new, Client.new("name" => "Apple")])
assert_not_predicate c, :persisted?
assert_not_predicate c, :valid?
assert_not_predicate new_firm, :valid?
assert_not new_firm.save
assert_not_predicate c, :persisted?
assert_not_predicate new_firm, :persisted?
end
def test_adding_unsavable_association
new_firm = Firm.new("name" => "A New Firm, Inc")
client = new_firm.clients.new("name" => "Apple")
client.throw_on_save = true
assert_predicate client, :valid?
assert_predicate new_firm, :valid?
assert_not new_firm.save
assert_not_predicate new_firm, :persisted?
assert_not_predicate client, :persisted?
end
def test_invalid_adding_with_validate_false
firm = Firm.first
client = Client.new
firm.unvalidated_clients_of_firm << client
assert_predicate firm, :valid?
assert_not_predicate client, :valid?
assert firm.save
assert_not_predicate client, :persisted?
end
def test_valid_adding_with_validate_false
no_of_clients = Client.count
firm = Firm.first
client = Client.new("name" => "Apple")
assert_predicate firm, :valid?
assert_predicate client, :valid?
assert_not_predicate client, :persisted?
firm.unvalidated_clients_of_firm << client
assert firm.save
assert_predicate client, :persisted?
assert_equal no_of_clients + 1, Client.count
end
def test_parent_should_save_children_record_with_foreign_key_validation_set_in_before_save_callback
company = NewlyContractedCompany.new(name: "test")
assert company.save
assert_not_empty company.reload.new_contracts
end
def test_parent_should_not_get_saved_with_duplicate_children_records
assert_no_difference "Reply.count" do
assert_no_difference "SillyUniqueReply.count" do
reply = Reply.new
reply.silly_unique_replies.build([
{ content: "Best content" },
{ content: "Best content" }
])
assert_not reply.save
assert_equal ["is invalid"], reply.errors[:silly_unique_replies]
assert_empty reply.silly_unique_replies.first.errors
assert_equal(
["has already been taken"],
reply.silly_unique_replies.last.errors[:content]
)
end
end
end
def test_invalid_build
new_client = companies(:first_firm).clients_of_firm.build
assert_not_predicate new_client, :persisted?
assert_not_predicate new_client, :valid?
assert_equal new_client, companies(:first_firm).clients_of_firm.last
assert_not companies(:first_firm).save
assert_not_predicate new_client, :persisted?
assert_equal 2, companies(:first_firm).clients_of_firm.reload.size
end
def test_adding_before_save
no_of_firms = Firm.count
no_of_clients = Client.count
new_firm = Firm.new("name" => "A New Firm, Inc")
c = Client.new("name" => "Apple")
new_firm.clients_of_firm.push Client.new("name" => "Natural Company")
assert_equal 1, new_firm.clients_of_firm.size
new_firm.clients_of_firm << c
assert_equal 2, new_firm.clients_of_firm.size
assert_equal no_of_firms, Firm.count # Firm was not saved to database.
assert_equal no_of_clients, Client.count # Clients were not saved to database.
assert new_firm.save
assert_predicate new_firm, :persisted?
assert_predicate c, :persisted?
assert_equal new_firm, c.firm
assert_equal no_of_firms + 1, Firm.count # Firm was saved to database.
assert_equal no_of_clients + 2, Client.count # Clients were saved to database.
assert_equal 2, new_firm.clients_of_firm.size
assert_equal 2, new_firm.clients_of_firm.reload.size
end
def test_assign_ids
firm = Firm.new("name" => "Apple")
firm.client_ids = [companies(:first_client).id, companies(:second_client).id]
firm.save
firm.reload
assert_equal 2, firm.clients.length
assert_includes firm.clients, companies(:second_client)
end
def test_assign_ids_for_through_a_belongs_to
firm = Firm.new("name" => "Apple")
firm.developer_ids = [developers(:david).id, developers(:jamis).id]
firm.save
firm.reload
assert_equal 2, firm.developers.length
assert_includes firm.developers, developers(:david)
end
def test_build_before_save
company = companies(:first_firm)
# Load schema information so we don't query below if running just this test.
Client.define_attribute_methods
new_client = assert_no_queries { company.clients_of_firm.build("name" => "Another Client") }
assert_not_predicate company.clients_of_firm, :loaded?
company.name += "-changed"
assert_queries(2) { assert company.save }
assert_predicate new_client, :persisted?
assert_equal 3, company.clients_of_firm.reload.size
end
def test_build_many_before_save
company = companies(:first_firm)
# Load schema information so we don't query below if running just this test.
Client.define_attribute_methods
assert_no_queries { company.clients_of_firm.build([{ "name" => "Another Client" }, { "name" => "Another Client II" }]) }
company.name += "-changed"
assert_queries(3) { assert company.save }
assert_equal 4, company.clients_of_firm.reload.size
end
def test_build_via_block_before_save
company = companies(:first_firm)
# Load schema information so we don't query below if running just this test.
Client.define_attribute_methods
new_client = assert_no_queries { company.clients_of_firm.build { |client| client.name = "Another Client" } }
assert_not_predicate company.clients_of_firm, :loaded?
company.name += "-changed"
assert_queries(2) { assert company.save }
assert_predicate new_client, :persisted?
assert_equal 3, company.clients_of_firm.reload.size
end
def test_build_many_via_block_before_save
company = companies(:first_firm)
# Load schema information so we don't query below if running just this test.
Client.define_attribute_methods
assert_no_queries do
company.clients_of_firm.build([{ "name" => "Another Client" }, { "name" => "Another Client II" }]) do |client|
client.name = "changed"
end
end
company.name += "-changed"
assert_queries(3) { assert company.save }
assert_equal 4, company.clients_of_firm.reload.size
end
def test_replace_on_new_object
firm = Firm.new("name" => "New Firm")
firm.clients = [companies(:second_client), Client.new("name" => "New Client")]
assert firm.save
firm.reload
assert_equal 2, firm.clients.length
assert_includes firm.clients, Client.find_by_name("New Client")
end
end
class TestDefaultAutosaveAssociationOnNewRecord < ActiveRecord::TestCase
def test_autosave_new_record_on_belongs_to_can_be_disabled_per_relationship
new_account = Account.new("credit_limit" => 1000)
new_firm = Firm.new("name" => "some firm")
assert_not_predicate new_firm, :persisted?
new_account.firm = new_firm
new_account.save!
assert_predicate new_firm, :persisted?
new_account = Account.new("credit_limit" => 1000)
new_autosaved_firm = Firm.new("name" => "some firm")
assert_not_predicate new_autosaved_firm, :persisted?
new_account.unautosaved_firm = new_autosaved_firm
new_account.save!
assert_not_predicate new_autosaved_firm, :persisted?
end
def test_autosave_new_record_on_has_one_can_be_disabled_per_relationship
firm = Firm.new("name" => "some firm")
account = Account.new("credit_limit" => 1000)
assert_not_predicate account, :persisted?
firm.account = account
firm.save!
assert_predicate account, :persisted?
firm = Firm.new("name" => "some firm")
account = Account.new("credit_limit" => 1000)
firm.unautosaved_account = account
assert_not_predicate account, :persisted?
firm.unautosaved_account = account
firm.save!
assert_not_predicate account, :persisted?
end
def test_autosave_new_record_on_has_many_can_be_disabled_per_relationship
firm = Firm.new("name" => "some firm")
account = Account.new("credit_limit" => 1000)
assert_not_predicate account, :persisted?
firm.accounts << account
firm.save!
assert_predicate account, :persisted?
firm = Firm.new("name" => "some firm")
account = Account.new("credit_limit" => 1000)
assert_not_predicate account, :persisted?
firm.unautosaved_accounts << account
firm.save!
assert_not_predicate account, :persisted?
end
def test_autosave_new_record_with_after_create_callback
post = PostWithAfterCreateCallback.new(title: "Captain Murphy", body: "is back")
post.comments.build(body: "foo")
post.save!
assert_not_nil post.author_id
end
end
class TestDestroyAsPartOfAutosaveAssociation < ActiveRecord::TestCase
self.use_transactional_tests = false
setup do
@pirate = Pirate.create(catchphrase: "Don' botharrr talkin' like one, savvy?")
@ship = @pirate.create_ship(name: "Nights Dirty Lightning")
end
teardown do
# We are running without transactional tests and need to cleanup.
Bird.delete_all
Parrot.delete_all
@ship.delete
@pirate.delete
end
# reload
def test_a_marked_for_destruction_record_should_not_be_be_marked_after_reload
@pirate.mark_for_destruction
@pirate.ship.mark_for_destruction
assert_not_predicate @pirate.reload, :marked_for_destruction?
assert_not_predicate @pirate.ship.reload, :marked_for_destruction?
end
# has_one
def test_should_destroy_a_child_association_as_part_of_the_save_transaction_if_it_was_marked_for_destruction
assert_not_predicate @pirate.ship, :marked_for_destruction?
@pirate.ship.mark_for_destruction
id = @pirate.ship.id
assert_predicate @pirate.ship, :marked_for_destruction?
assert Ship.find_by_id(id)
@pirate.save
assert_nil @pirate.reload.ship
assert_nil Ship.find_by_id(id)
end
def test_should_skip_validation_on_a_child_association_if_marked_for_destruction
@pirate.ship.name = ""
assert_not_predicate @pirate, :valid?
@pirate.ship.mark_for_destruction
assert_not_called(@pirate.ship, :valid?) do
assert_difference("Ship.count", -1) { @pirate.save! }
end
end
def test_a_child_marked_for_destruction_should_not_be_destroyed_twice
@pirate.ship.mark_for_destruction
assert @pirate.save
class << @pirate.ship
def destroy; raise "Should not be called" end
end
assert @pirate.save
end
def test_should_rollback_destructions_if_an_exception_occurred_while_saving_a_child
# Stub the save method of the @pirate.ship instance to destroy and then raise an exception
class << @pirate.ship
def save(*args)
super
destroy
raise "Oh noes!"
end
end
@ship.pirate.catchphrase = "Changed Catchphrase"
@ship.name_will_change!
assert_raise(RuntimeError) { assert_not @pirate.save }
assert_not_nil @pirate.reload.ship
end
def test_should_save_changed_has_one_changed_object_if_child_is_saved
@pirate.ship.name = "NewName"
assert @pirate.save
assert_equal "NewName", @pirate.ship.reload.name
end
def test_should_not_save_changed_has_one_unchanged_object_if_child_is_saved
assert_not_called(@pirate.ship, :save) do
assert @pirate.save
end
end
# belongs_to
def test_should_destroy_a_parent_association_as_part_of_the_save_transaction_if_it_was_marked_for_destruction
assert_not_predicate @ship.pirate, :marked_for_destruction?
@ship.pirate.mark_for_destruction
id = @ship.pirate.id
assert_predicate @ship.pirate, :marked_for_destruction?
assert Pirate.find_by_id(id)
@ship.save
assert_nil @ship.reload.pirate
assert_nil Pirate.find_by_id(id)
end
def test_should_skip_validation_on_a_parent_association_if_marked_for_destruction
@ship.pirate.catchphrase = ""
assert_not_predicate @ship, :valid?
@ship.pirate.mark_for_destruction
assert_not_called(@ship.pirate, :valid?) do
assert_difference("Pirate.count", -1) { @ship.save! }
end
end
def test_a_parent_marked_for_destruction_should_not_be_destroyed_twice
@ship.pirate.mark_for_destruction
assert @ship.save
class << @ship.pirate
def destroy; raise "Should not be called" end
end
assert @ship.save
end
def test_should_rollback_destructions_if_an_exception_occurred_while_saving_a_parent
# Stub the save method of the @ship.pirate instance to destroy and then raise an exception
class << @ship.pirate
def save(*args)
super
destroy
raise "Oh noes!"
end
end
@ship.pirate.catchphrase = "Changed Catchphrase"
assert_raise(RuntimeError) { assert_not @ship.save }
assert_not_nil @ship.reload.pirate
end
def test_should_save_changed_child_objects_if_parent_is_saved
@pirate = @ship.create_pirate(catchphrase: "Don' botharrr talkin' like one, savvy?")
@parrot = @pirate.parrots.create!(name: "Posideons Killer")
@parrot.name = "NewName"
@ship.save
assert_equal "NewName", @parrot.reload.name
end
def test_should_destroy_has_many_as_part_of_the_save_transaction_if_they_were_marked_for_destruction
2.times { |i| @pirate.birds.create!(name: "birds_#{i}") }
assert_not @pirate.birds.any?(&:marked_for_destruction?)
@pirate.birds.each(&:mark_for_destruction)
klass = @pirate.birds.first.class
ids = @pirate.birds.map(&:id)
assert @pirate.birds.all?(&:marked_for_destruction?)
ids.each { |id| assert klass.find_by_id(id) }
@pirate.save
assert_empty @pirate.reload.birds
ids.each { |id| assert_nil klass.find_by_id(id) }
end
def test_should_not_resave_destroyed_association
@pirate.birds.create!(name: :parrot)
@pirate.birds.first.destroy
@pirate.save!
assert_empty @pirate.reload.birds
end
def test_should_skip_validation_on_has_many_if_marked_for_destruction
2.times { |i| @pirate.birds.create!(name: "birds_#{i}") }
@pirate.birds.each { |bird| bird.name = "" }
assert_not_predicate @pirate, :valid?
@pirate.birds.each(&:mark_for_destruction)
assert_not_called(@pirate.birds.first, :valid?) do
assert_not_called(@pirate.birds.last, :valid?) do
assert_difference("Bird.count", -2) { @pirate.save! }
end
end
end
def test_should_skip_validation_on_has_many_if_destroyed
@pirate.birds.create!(name: "birds_1")
@pirate.birds.each { |bird| bird.name = "" }
assert_not_predicate @pirate, :valid?
@pirate.birds.each(&:destroy)
assert_predicate @pirate, :valid?
end
def test_a_child_marked_for_destruction_should_not_be_destroyed_twice_while_saving_has_many
@pirate.birds.create!(name: "birds_1")
@pirate.birds.each(&:mark_for_destruction)
assert @pirate.save
@pirate.birds.each do |bird|
assert_not_called(bird, :destroy) do
assert @pirate.save
end
end
end
def test_should_rollback_destructions_if_an_exception_occurred_while_saving_has_many
2.times { |i| @pirate.birds.create!(name: "birds_#{i}") }
before = @pirate.birds.map { |c| c.mark_for_destruction ; c }
# Stub the destroy method of the second child to raise an exception
class << before.last
def destroy(*args)
super
raise "Oh noes!"
end
end
assert_raise(RuntimeError) { assert_not @pirate.save }
assert_equal before, @pirate.reload.birds
end
def test_when_new_record_a_child_marked_for_destruction_should_not_affect_other_records_from_saving
@pirate = @ship.build_pirate(catchphrase: "Arr' now I shall keep me eye on you matey!") # new record
3.times { |i| @pirate.birds.build(name: "birds_#{i}") }
@pirate.birds[1].mark_for_destruction
@pirate.save!
assert_equal 2, @pirate.birds.reload.length
end
def test_should_save_new_record_that_has_same_value_as_existing_record_marked_for_destruction_on_field_that_has_unique_index
Bird.connection.add_index :birds, :name, unique: true
3.times { |i| @pirate.birds.create(name: "unique_birds_#{i}") }
@pirate.birds[0].mark_for_destruction
@pirate.birds.build(name: @pirate.birds[0].name)
@pirate.save!
assert_equal 3, @pirate.birds.reload.length
ensure
Bird.connection.remove_index :birds, column: :name
end
# Add and remove callbacks tests for association collections.
%w{ method proc }.each do |callback_type|
define_method("test_should_run_add_callback_#{callback_type}s_for_has_many") do
association_name_with_callbacks = "birds_with_#{callback_type}_callbacks"
pirate = Pirate.new(catchphrase: "Arr")
pirate.send(association_name_with_callbacks).build(name: "Crowe the One-Eyed")
expected = [
"before_adding_#{callback_type}_bird_<new>",
"after_adding_#{callback_type}_bird_<new>"
]
assert_equal expected, pirate.ship_log
end
define_method("test_should_run_remove_callback_#{callback_type}s_for_has_many") do
association_name_with_callbacks = "birds_with_#{callback_type}_callbacks"
@pirate.send(association_name_with_callbacks).create!(name: "Crowe the One-Eyed")
@pirate.send(association_name_with_callbacks).each(&:mark_for_destruction)
child_id = @pirate.send(association_name_with_callbacks).first.id
@pirate.ship_log.clear
@pirate.save
expected = [
"before_removing_#{callback_type}_bird_#{child_id}",
"after_removing_#{callback_type}_bird_#{child_id}"
]
assert_equal expected, @pirate.ship_log
end
end
def test_should_destroy_habtm_as_part_of_the_save_transaction_if_they_were_marked_for_destruction
2.times { |i| @pirate.parrots.create!(name: "parrots_#{i}") }
assert_not @pirate.parrots.any?(&:marked_for_destruction?)
@pirate.parrots.each(&:mark_for_destruction)
assert_no_difference "Parrot.count" do
@pirate.save
end
assert_empty @pirate.reload.parrots
join_records = Pirate.connection.select_all("SELECT * FROM parrots_pirates WHERE pirate_id = #{@pirate.id}")
assert_empty join_records
end
def test_should_skip_validation_on_habtm_if_marked_for_destruction
2.times { |i| @pirate.parrots.create!(name: "parrots_#{i}") }
@pirate.parrots.each { |parrot| parrot.name = "" }
assert_not_predicate @pirate, :valid?
@pirate.parrots.each { |parrot| parrot.mark_for_destruction }
assert_not_called(@pirate.parrots.first, :valid?) do
assert_not_called(@pirate.parrots.last, :valid?) do
@pirate.save!
end
end
assert_empty @pirate.reload.parrots
end
def test_should_skip_validation_on_habtm_if_destroyed
@pirate.parrots.create!(name: "parrots_1")
@pirate.parrots.each { |parrot| parrot.name = "" }
assert_not_predicate @pirate, :valid?
@pirate.parrots.each(&:destroy)
assert_predicate @pirate, :valid?
end
def test_a_child_marked_for_destruction_should_not_be_destroyed_twice_while_saving_habtm
@pirate.parrots.create!(name: "parrots_1")
@pirate.parrots.each(&:mark_for_destruction)
assert @pirate.save
Pirate.transaction do
assert_no_queries do
assert @pirate.save
end
end
end
def test_should_rollback_destructions_if_an_exception_occurred_while_saving_habtm
2.times { |i| @pirate.parrots.create!(name: "parrots_#{i}") }
before = @pirate.parrots.map { |c| c.mark_for_destruction ; c }
class << @pirate.association(:parrots)
def destroy(*args)
super
raise "Oh noes!"
end
end
assert_raise(RuntimeError) { assert_not @pirate.save }
assert_equal before, @pirate.reload.parrots
end
# Add and remove callbacks tests for association collections.
%w{ method proc }.each do |callback_type|
define_method("test_should_run_add_callback_#{callback_type}s_for_habtm") do
association_name_with_callbacks = "parrots_with_#{callback_type}_callbacks"
pirate = Pirate.new(catchphrase: "Arr")
pirate.send(association_name_with_callbacks).build(name: "Crowe the One-Eyed")
expected = [
"before_adding_#{callback_type}_parrot_<new>",
"after_adding_#{callback_type}_parrot_<new>"
]
assert_equal expected, pirate.ship_log
end
define_method("test_should_run_remove_callback_#{callback_type}s_for_habtm") do
association_name_with_callbacks = "parrots_with_#{callback_type}_callbacks"
@pirate.send(association_name_with_callbacks).create!(name: "Crowe the One-Eyed")
@pirate.send(association_name_with_callbacks).each(&:mark_for_destruction)
child_id = @pirate.send(association_name_with_callbacks).first.id
@pirate.ship_log.clear
@pirate.save
expected = [
"before_removing_#{callback_type}_parrot_#{child_id}",
"after_removing_#{callback_type}_parrot_#{child_id}"
]
assert_equal expected, @pirate.ship_log
end
end
end
class TestAutosaveAssociationOnAHasOneAssociation < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def setup
super
@pirate = Pirate.create(catchphrase: "Don' botharrr talkin' like one, savvy?")
@ship = @pirate.create_ship(name: "Nights Dirty Lightning")
end
def test_should_still_work_without_an_associated_model
@ship.destroy
@pirate.reload.catchphrase = "Arr"
@pirate.save
assert_equal "Arr", @pirate.reload.catchphrase
end
def test_should_automatically_save_the_associated_model
@pirate.ship.name = "The Vile Insanity"
@pirate.save
assert_equal "The Vile Insanity", @pirate.reload.ship.name
end
def test_changed_for_autosave_should_handle_cycles
@ship.pirate = @pirate
assert_no_queries { @ship.save! }
@parrot = @pirate.parrots.create(name: "some_name")
@parrot.name = "changed_name"
assert_queries(1) { @ship.save! }
assert_no_queries { @ship.save! }
end
def test_should_automatically_save_bang_the_associated_model
@pirate.ship.name = "The Vile Insanity"
@pirate.save!
assert_equal "The Vile Insanity", @pirate.reload.ship.name
end
def test_should_automatically_validate_the_associated_model
@pirate.ship.name = ""
assert_predicate @pirate, :invalid?
assert_predicate @pirate.errors[:"ship.name"], :any?
end
def test_should_merge_errors_on_the_associated_models_onto_the_parent_even_if_it_is_not_valid
@pirate.ship.name = nil
@pirate.catchphrase = nil
assert_predicate @pirate, :invalid?
assert_predicate @pirate.errors[:"ship.name"], :any?
assert_predicate @pirate.errors[:catchphrase], :any?
end
def test_should_not_ignore_different_error_messages_on_the_same_attribute
old_validators = Ship._validators.deep_dup
old_callbacks = Ship._validate_callbacks.deep_dup
Ship.validates_format_of :name, with: /\w/
@pirate.ship.name = ""
@pirate.catchphrase = nil
assert_predicate @pirate, :invalid?
assert_equal ["can't be blank", "is invalid"], @pirate.errors[:"ship.name"]
ensure
Ship._validators = old_validators if old_validators
Ship._validate_callbacks = old_callbacks if old_callbacks
end
def test_should_still_allow_to_bypass_validations_on_the_associated_model
@pirate.catchphrase = ""
@pirate.ship.name = ""
@pirate.save(validate: false)
# Oracle saves empty string as NULL
if current_adapter?(:OracleAdapter)
assert_equal [nil, nil], [@pirate.reload.catchphrase, @pirate.ship.name]
else
assert_equal ["", ""], [@pirate.reload.catchphrase, @pirate.ship.name]
end
end
def test_should_allow_to_bypass_validations_on_associated_models_at_any_depth
2.times { |i| @pirate.ship.parts.create!(name: "part #{i}") }
@pirate.catchphrase = ""
@pirate.ship.name = ""
@pirate.ship.parts.each { |part| part.name = "" }
@pirate.save(validate: false)
values = [@pirate.reload.catchphrase, @pirate.ship.name, *@pirate.ship.parts.map(&:name)]
# Oracle saves empty string as NULL
if current_adapter?(:OracleAdapter)
assert_equal [nil, nil, nil, nil], values
else
assert_equal ["", "", "", ""], values
end
end
def test_should_still_raise_an_ActiveRecordRecord_Invalid_exception_if_we_want_that
@pirate.ship.name = ""
assert_raise(ActiveRecord::RecordInvalid) do
@pirate.save!
end
end
def test_should_not_save_and_return_false_if_a_callback_cancelled_saving
pirate = Pirate.new(catchphrase: "Arr")
ship = pirate.build_ship(name: "The Vile Insanity")
ship.cancel_save_from_callback = true
assert_no_difference "Pirate.count" do
assert_no_difference "Ship.count" do
assert_not pirate.save
end
end
end
def test_should_rollback_any_changes_if_an_exception_occurred_while_saving
before = [@pirate.catchphrase, @pirate.ship.name]
@pirate.catchphrase = "Arr"
@pirate.ship.name = "The Vile Insanity"
# Stub the save method of the @pirate.ship instance to raise an exception
class << @pirate.ship
def save(*args)
super
raise "Oh noes!"
end
end
assert_raise(RuntimeError) { assert_not @pirate.save }
assert_equal before, [@pirate.reload.catchphrase, @pirate.ship.name]
end
def test_should_not_load_the_associated_model
assert_queries(1) { @pirate.catchphrase = "Arr"; @pirate.save! }
end
def test_mark_for_destruction_is_ignored_without_autosave_true
ship = ShipWithoutNestedAttributes.new(name: "The Black Flag")
ship.parts.build.mark_for_destruction
assert_not_predicate ship, :valid?
end
end
class TestAutosaveAssociationOnAHasOneThroughAssociation < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def create_member_with_organization
organization = Organization.create
member = Member.create
MemberDetail.create(organization: organization, member: member)
member
end
def test_should_not_has_one_through_model
member = create_member_with_organization
class << member.organization
def save(*args)
super
raise "Oh noes!"
end
end
assert_nothing_raised { member.save }
end
def create_author_with_post_with_comment
Author.create! name: "David" # make comment_id not match author_id
author = Author.create! name: "Sergiy"
post = Post.create! author: author, title: "foo", body: "bar"
Comment.create! post: post, body: "cool comment"
author
end
def test_should_not_reversed_has_one_through_model
author = create_author_with_post_with_comment
class << author.comment_on_first_post
def save(*args)
super
raise "Oh noes!"
end
end
assert_nothing_raised { author.save }
end
end
class TestAutosaveAssociationOnABelongsToAssociation < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def setup
super
@ship = Ship.create(name: "Nights Dirty Lightning")
@pirate = @ship.create_pirate(catchphrase: "Don' botharrr talkin' like one, savvy?")
end
def test_should_still_work_without_an_associated_model
@pirate.destroy
@ship.reload.name = "The Vile Insanity"
@ship.save
assert_equal "The Vile Insanity", @ship.reload.name
end
def test_should_automatically_save_the_associated_model
@ship.pirate.catchphrase = "Arr"
@ship.save
assert_equal "Arr", @ship.reload.pirate.catchphrase
end
def test_should_automatically_save_bang_the_associated_model
@ship.pirate.catchphrase = "Arr"
@ship.save!
assert_equal "Arr", @ship.reload.pirate.catchphrase
end
def test_should_automatically_validate_the_associated_model
@ship.pirate.catchphrase = ""
assert_predicate @ship, :invalid?
assert_predicate @ship.errors[:"pirate.catchphrase"], :any?
end
def test_should_merge_errors_on_the_associated_model_onto_the_parent_even_if_it_is_not_valid
@ship.name = nil
@ship.pirate.catchphrase = nil
assert_predicate @ship, :invalid?
assert_predicate @ship.errors[:name], :any?
assert_predicate @ship.errors[:"pirate.catchphrase"], :any?
end
def test_should_still_allow_to_bypass_validations_on_the_associated_model
@ship.pirate.catchphrase = ""
@ship.name = ""
@ship.save(validate: false)
# Oracle saves empty string as NULL
if current_adapter?(:OracleAdapter)
assert_equal [nil, nil], [@ship.reload.name, @ship.pirate.catchphrase]
else
assert_equal ["", ""], [@ship.reload.name, @ship.pirate.catchphrase]
end
end
def test_should_still_raise_an_ActiveRecordRecord_Invalid_exception_if_we_want_that
@ship.pirate.catchphrase = ""
assert_raise(ActiveRecord::RecordInvalid) do
@ship.save!
end
end
def test_should_not_save_and_return_false_if_a_callback_cancelled_saving
ship = Ship.new(name: "The Vile Insanity")
pirate = ship.build_pirate(catchphrase: "Arr")
pirate.cancel_save_from_callback = true
assert_no_difference "Ship.count" do
assert_no_difference "Pirate.count" do
assert_not ship.save
end
end
end
def test_should_rollback_any_changes_if_an_exception_occurred_while_saving
before = [@ship.pirate.catchphrase, @ship.name]
@ship.pirate.catchphrase = "Arr"
@ship.name = "The Vile Insanity"
# Stub the save method of the @ship.pirate instance to raise an exception
class << @ship.pirate
def save(*args)
super
raise "Oh noes!"
end
end
assert_raise(RuntimeError) { assert_not @ship.save }
assert_equal before, [@ship.pirate.reload.catchphrase, @ship.reload.name]
end
def test_should_not_load_the_associated_model
assert_queries(1) { @ship.name = "The Vile Insanity"; @ship.save! }
end
end
module AutosaveAssociationOnACollectionAssociationTests
def test_should_automatically_save_the_associated_models
new_names = ["Grace OMalley", "Privateers Greed"]
@pirate.send(@association_name).each_with_index { |child, i| child.name = new_names[i] }
@pirate.save
assert_equal new_names.sort, @pirate.reload.send(@association_name).map(&:name).sort
end
def test_should_automatically_save_bang_the_associated_models
new_names = ["Grace OMalley", "Privateers Greed"]
@pirate.send(@association_name).each_with_index { |child, i| child.name = new_names[i] }
@pirate.save!
assert_equal new_names.sort, @pirate.reload.send(@association_name).map(&:name).sort
end
def test_should_update_children_when_autosave_is_true_and_parent_is_new_but_child_is_not
parrot = Parrot.create!(name: "Polly")
parrot.name = "Squawky"
pirate = Pirate.new(parrots: [parrot], catchphrase: "Arrrr")
pirate.save!
assert_equal "Squawky", parrot.reload.name
end
def test_should_not_update_children_when_parent_creation_with_no_reason
parrot = Parrot.create!(name: "Polly")
assert_equal 0, parrot.updated_count
Pirate.create!(parrot_ids: [parrot.id], catchphrase: "Arrrr")
assert_equal 0, parrot.reload.updated_count
end
def test_should_automatically_validate_the_associated_models
@pirate.send(@association_name).each { |child| child.name = "" }
assert_not_predicate @pirate, :valid?
assert_equal ["can't be blank"], @pirate.errors["#{@association_name}.name"]
assert_empty @pirate.errors[@association_name]
end
def test_should_not_use_default_invalid_error_on_associated_models
@pirate.send(@association_name).build(name: "")
assert_not_predicate @pirate, :valid?
assert_equal ["can't be blank"], @pirate.errors["#{@association_name}.name"]
assert_empty @pirate.errors[@association_name]
end
def test_should_default_invalid_error_from_i18n
I18n.backend.store_translations(:en, activerecord: { errors: { models:
{ @associated_model_name.to_s.to_sym => { blank: "cannot be blank" } }
} })
@pirate.send(@association_name).build(name: "")
assert_not_predicate @pirate, :valid?
assert_equal ["cannot be blank"], @pirate.errors["#{@association_name}.name"]
assert_equal ["#{@association_name.to_s.humanize} name cannot be blank"], @pirate.errors.full_messages
assert_empty @pirate.errors[@association_name]
ensure
I18n.backend = I18n::Backend::Simple.new
end
def test_should_merge_errors_on_the_associated_models_onto_the_parent_even_if_it_is_not_valid
@pirate.send(@association_name).each { |child| child.name = "" }
@pirate.catchphrase = nil
assert_not_predicate @pirate, :valid?
assert_equal ["can't be blank"], @pirate.errors["#{@association_name}.name"]
assert_predicate @pirate.errors[:catchphrase], :any?
end
def test_should_allow_to_bypass_validations_on_the_associated_models_on_update
@pirate.catchphrase = ""
@pirate.send(@association_name).each { |child| child.name = "" }
assert @pirate.save(validate: false)
# Oracle saves empty string as NULL
if current_adapter?(:OracleAdapter)
assert_equal [nil, nil, nil], [
@pirate.reload.catchphrase,
@pirate.send(@association_name).first.name,
@pirate.send(@association_name).last.name
]
else
assert_equal ["", "", ""], [
@pirate.reload.catchphrase,
@pirate.send(@association_name).first.name,
@pirate.send(@association_name).last.name
]
end
end
def test_should_validation_the_associated_models_on_create
assert_no_difference("#{ @association_name == :birds ? 'Bird' : 'Parrot' }.count") do
2.times { @pirate.send(@association_name).build }
@pirate.save
end
end
def test_should_allow_to_bypass_validations_on_the_associated_models_on_create
assert_difference("#{ @association_name == :birds ? 'Bird' : 'Parrot' }.count", 2) do
2.times { @pirate.send(@association_name).build }
@pirate.save(validate: false)
end
end
def test_should_not_save_and_return_false_if_a_callback_cancelled_saving_in_either_create_or_update
@pirate.catchphrase = "Changed"
@child_1.name = "Changed"
@child_1.cancel_save_from_callback = true
assert_not @pirate.save
assert_equal "Don' botharrr talkin' like one, savvy?", @pirate.reload.catchphrase
assert_equal "Posideons Killer", @child_1.reload.name
new_pirate = Pirate.new(catchphrase: "Arr")
new_child = new_pirate.send(@association_name).build(name: "Grace OMalley")
new_child.cancel_save_from_callback = true
assert_no_difference "Pirate.count" do
assert_no_difference "#{new_child.class.name}.count" do
assert_not new_pirate.save
end
end
end
def test_should_rollback_any_changes_if_an_exception_occurred_while_saving
before = [@pirate.catchphrase, *@pirate.send(@association_name).map(&:name)]
new_names = ["Grace OMalley", "Privateers Greed"]
@pirate.catchphrase = "Arr"
@pirate.send(@association_name).each_with_index { |child, i| child.name = new_names[i] }
# Stub the save method of the first child instance to raise an exception
class << @pirate.send(@association_name).first
def save(*args)
super
raise "Oh noes!"
end
end
assert_raise(RuntimeError) { assert_not @pirate.save }
assert_equal before, [@pirate.reload.catchphrase, *@pirate.send(@association_name).map(&:name)]
end
def test_should_still_raise_an_ActiveRecordRecord_Invalid_exception_if_we_want_that
@pirate.send(@association_name).each { |child| child.name = "" }
assert_raise(ActiveRecord::RecordInvalid) do
@pirate.save!
end
end
def test_should_not_load_the_associated_models_if_they_were_not_loaded_yet
assert_queries(1) { @pirate.catchphrase = "Arr"; @pirate.save! }
@pirate.send(@association_name).load_target
assert_queries(3) do
@pirate.catchphrase = "Yarr"
new_names = ["Grace OMalley", "Privateers Greed"]
@pirate.send(@association_name).each_with_index { |child, i| child.name = new_names[i] }
@pirate.save!
end
end
end
class TestAutosaveAssociationOnAHasManyAssociation < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def setup
super
@association_name = :birds
@associated_model_name = :bird
@pirate = Pirate.create(catchphrase: "Don' botharrr talkin' like one, savvy?")
@child_1 = @pirate.birds.create(name: "Posideons Killer")
@child_2 = @pirate.birds.create(name: "Killer bandita Dionne")
end
include AutosaveAssociationOnACollectionAssociationTests
end
class TestAutosaveAssociationOnAHasAndBelongsToManyAssociation < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def setup
super
@association_name = :autosaved_parrots
@associated_model_name = :parrot
@habtm = true
@pirate = Pirate.create(catchphrase: "Don' botharrr talkin' like one, savvy?")
@child_1 = @pirate.parrots.create(name: "Posideons Killer")
@child_2 = @pirate.parrots.create(name: "Killer bandita Dionne")
end
include AutosaveAssociationOnACollectionAssociationTests
end
class TestAutosaveAssociationOnAHasAndBelongsToManyAssociationWithAcceptsNestedAttributes < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def setup
super
@association_name = :parrots
@associated_model_name = :parrot
@habtm = true
@pirate = Pirate.create(catchphrase: "Don' botharrr talkin' like one, savvy?")
@child_1 = @pirate.parrots.create(name: "Posideons Killer")
@child_2 = @pirate.parrots.create(name: "Killer bandita Dionne")
end
include AutosaveAssociationOnACollectionAssociationTests
end
class TestAutosaveAssociationValidationsOnAHasManyAssociation < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def setup
super
@pirate = Pirate.create(catchphrase: "Don' botharrr talkin' like one, savvy?")
@pirate.birds.create(name: "cookoo")
end
test "should automatically validate associations" do
assert_predicate @pirate, :valid?
@pirate.birds.each { |bird| bird.name = "" }
assert_not_predicate @pirate, :valid?
end
end
class TestAutosaveAssociationValidationsOnAHasOneAssociation < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def setup
super
@pirate = Pirate.create(catchphrase: "Don' botharrr talkin' like one, savvy?")
@pirate.create_ship(name: "titanic")
super
end
test "should automatically validate associations with :validate => true" do
assert_predicate @pirate, :valid?
@pirate.ship.name = ""
assert_not_predicate @pirate, :valid?
end
test "should not automatically add validate associations without :validate => true" do
assert_predicate @pirate, :valid?
@pirate.non_validated_ship.name = ""
assert_predicate @pirate, :valid?
end
end
class TestAutosaveAssociationValidationsOnABelongsToAssociation < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def setup
super
@pirate = Pirate.create(catchphrase: "Don' botharrr talkin' like one, savvy?")
end
test "should automatically validate associations with :validate => true" do
assert_predicate @pirate, :valid?
@pirate.parrot = Parrot.new(name: "")
assert_not_predicate @pirate, :valid?
end
test "should not automatically validate associations without :validate => true" do
assert_predicate @pirate, :valid?
@pirate.non_validated_parrot = Parrot.new(name: "")
assert_predicate @pirate, :valid?
end
end
class TestAutosaveAssociationValidationsOnAHABTMAssociation < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def setup
super
@pirate = Pirate.create(catchphrase: "Don' botharrr talkin' like one, savvy?")
end
test "should automatically validate associations with :validate => true" do
assert_predicate @pirate, :valid?
@pirate.parrots = [ Parrot.new(name: "popuga") ]
@pirate.parrots.each { |parrot| parrot.name = "" }
assert_not_predicate @pirate, :valid?
end
test "should not automatically validate associations without :validate => true" do
assert_predicate @pirate, :valid?
@pirate.non_validated_parrots = [ Parrot.new(name: "popuga") ]
@pirate.non_validated_parrots.each { |parrot| parrot.name = "" }
assert_predicate @pirate, :valid?
end
end
class TestAutosaveAssociationValidationMethodsGeneration < ActiveRecord::TestCase
self.use_transactional_tests = false unless supports_savepoints?
def setup
super
@pirate = Pirate.new
end
test "should generate validation methods for has_many associations" do
assert_respond_to @pirate, :validate_associated_records_for_birds
end
test "should generate validation methods for has_one associations with :validate => true" do
assert_respond_to @pirate, :validate_associated_records_for_ship
end
test "should not generate validation methods for has_one associations without :validate => true" do
assert_not_respond_to @pirate, :validate_associated_records_for_non_validated_ship
end
test "should generate validation methods for belongs_to associations with :validate => true" do
assert_respond_to @pirate, :validate_associated_records_for_parrot
end
test "should not generate validation methods for belongs_to associations without :validate => true" do
assert_not_respond_to @pirate, :validate_associated_records_for_non_validated_parrot
end
test "should generate validation methods for HABTM associations with :validate => true" do
assert_respond_to @pirate, :validate_associated_records_for_parrots
end
end
class TestAutosaveAssociationWithTouch < ActiveRecord::TestCase
def test_autosave_with_touch_should_not_raise_system_stack_error
invoice = Invoice.create
assert_nothing_raised { invoice.line_items.create(amount: 10) }
end
end
class TestAutosaveAssociationOnAHasManyAssociationWithInverse < ActiveRecord::TestCase
class Post < ActiveRecord::Base
has_many :comments, inverse_of: :post
end
class Comment < ActiveRecord::Base
belongs_to :post, inverse_of: :comments
attr_accessor :post_comments_count
after_save do
self.post_comments_count = post.comments.count
end
end
def setup
Comment.delete_all
end
def test_after_save_callback_with_autosave
post = Post.new(title: "Test", body: "...")
comment = post.comments.build(body: "...")
post.save!
assert_equal 1, post.comments.count
assert_equal 1, comment.post_comments_count
end
end
class TestAutosaveAssociationOnAHasManyAssociationDefinedInSubclassWithAcceptsNestedAttributes < ActiveRecord::TestCase
def test_should_update_children_when_association_redefined_in_subclass
agency = Agency.create!(name: "Agency")
valid_project = Project.create!(firm: agency, name: "Initial")
agency.update!(
"projects_attributes" => {
"0" => {
"name" => "Updated",
"id" => valid_project.id
}
}
)
valid_project.reload
assert_equal "Updated", valid_project.name
end
end
| 32.165405 | 126 | 0.739236 |
1d895d4229e367fbd020301fdb3d2a03bc65d145 | 3,105 | # -*- encoding: utf-8 -*-
require 'test_helper'
require 'hexapdf/configuration'
describe HexaPDF::Configuration do
before do
@config = HexaPDF::Configuration.new
@config['test'] = :test
end
it "can create a config based on the default one with certain values overwritten" do
config = HexaPDF::Configuration.with_defaults('io.chunk_size' => 10)
assert_equal(10, config['io.chunk_size'])
assert_equal(:A4, config['page.default_media_box'])
end
it "can check the availabilty of an option" do
assert(@config.option?('test'))
end
it "can return the value for an option" do
assert_equal(:test, @config['test'])
end
it "can set the value for an option" do
@config['test'] = :other
assert_equal(:other, @config['test'])
end
it "can create a new config object by merging another one or a hash" do
@config['hash'] = {'test' => :test, 'other' => :other}
@config['array'] = [5, 6]
config = @config.merge('test' => :other)
assert_equal(:other, config['test'])
config['hash']['test'] = :other
config1 = @config.merge(config)
assert_equal(:other, config1['hash']['test'])
assert_equal(:other, config1['hash']['other'])
config2 = @config.merge(config)
config2['array'].unshift(4)
assert_equal([4, 5, 6], config2['array'])
assert_equal([5, 6], config['array'])
end
describe "constantize" do
it "returns a constant for an option with a string value" do
@config['test'] = 'HexaPDF'
assert_equal(HexaPDF, @config.constantize('test'))
end
it "returns a constant for an option with a constant as value" do
@config['test'] = HexaPDF
assert_equal(HexaPDF, @config.constantize('test'))
end
it "returns a constant for a nested option" do
@config['test'] = {'test' => ['HexaPDF'], 'const' => {'const' => HexaPDF}}
assert_equal(HexaPDF, @config.constantize('test', 'test', 0))
assert_equal(HexaPDF, @config.constantize('test', 'const', 'const'))
@config['test'] = ['HexaPDF', HexaPDF]
assert_equal(HexaPDF, @config.constantize('test', 0))
assert_equal(HexaPDF, @config.constantize('test', 1))
end
def assert_constantize_error(&block) # :nodoc:
exp = assert_raises(HexaPDF::Error, &block)
assert_match(/Error getting constant for configuration option/, exp.message)
end
it "raises an error for an unknown option" do
assert_constantize_error { @config.constantize('unknown') }
end
it "raises an error for an unknown constant" do
@config['test'] = 'SomeUnknownConstant'
assert_constantize_error { @config.constantize('test') }
end
it "raises an error for an unknown constant using a nested option" do
@config['test'] = {}
assert_constantize_error { @config.constantize('test', 'test') }
assert_constantize_error { @config.constantize('test', nil) }
end
it "returns the result of the given block when no constant is found" do
assert_equal(:test, @config.constantize('unk') {|name| assert_equal('unk', name); :test })
end
end
end
| 33.031915 | 96 | 0.656039 |
08f63295665ab01345460f25e4fdc9695267f619 | 502 | class Number < ActiveRecord::Base
has_many :comments, dependent: :destroy
validates :digits, presence: true
validates_length_of :digits, is: 10, message: "must be 10 digits"
validates_uniqueness_of :digits
class << self
def clean_digits(digits)
digits.gsub(/\D/, '')
end
end
def scam_count
comments.where(vote: 1).count
end
def ok_count
comments.where(vote: 0).count
end
def to_param
digits.gsub(/(\d{0,3})(\d{3})(\d{4})$/,"\\1-\\2-\\3")
end
end
| 18.592593 | 67 | 0.64741 |
b98678e7c3c96079fa30a9b7cc3a59360997277e | 127 | # frozen_string_literal: true
RSpec.describe Fixably::User do
include_examples("a resource", "user", "users", %i[show])
end
| 21.166667 | 59 | 0.732283 |
033861012a3ee001b831a8f274e7e038b061e4c5 | 493 | module OData
class CollectionType < Type
attr_reader :member_type
def initialize(options = {})
super
@member_type = options[:member_type]
end
def valid_value?(value)
member_type.valid_value?(value)
end
def primitive_type?
OData::PrimitiveType === member_type
end
def enum_type?
OData::EnumType === member_type
end
def collection?
true
end
def properties
member_type.properties
end
end
end
| 15.903226 | 42 | 0.634888 |
797412853497f0d621ec6172356d8371820960d6 | 1,232 | class Rclone < Formula
desc "Rsync for cloud storage"
homepage "https://rclone.org/"
url "https://github.com/rclone/rclone/archive/v1.53.2.tar.gz"
sha256 "63c499cef3b216aa657b70ac8217b69f6b1925781d4d8881054194664462d4f1"
license "MIT"
head "https://github.com/rclone/rclone.git"
deprecate! because: "requires FUSE"
bottle do
cellar :any_skip_relocation
sha256 "3b50cc675b356f43b62f4f5a8acef082a56163ddf1dfdf03a4f360ee6969cdfe" => :catalina
sha256 "a59703acea34bb4bff3fcd878af83d425a993fc1012b35b7a4210066e6279eab" => :mojave
sha256 "cea1a5bf6e0346731ba8357e313ae6eb3633e14400c2e9e67bd5a3f8524721f6" => :high_sierra
end
depends_on "go" => :build
def install
system "go", "build", "-tags", "cmount", *std_go_args
man1.install "rclone.1"
system bin/"rclone", "genautocomplete", "bash", "rclone.bash"
system bin/"rclone", "genautocomplete", "zsh", "_rclone"
bash_completion.install "rclone.bash" => "rclone"
zsh_completion.install "_rclone"
end
test do
(testpath/"file1.txt").write "Test!"
system "#{bin}/rclone", "copy", testpath/"file1.txt", testpath/"dist"
assert_match File.read(testpath/"file1.txt"), File.read(testpath/"dist/file1.txt")
end
end
| 35.2 | 93 | 0.732143 |
ab498a697b76d9ed3e79a69a258cfb775631c2a0 | 588 | cask 'universal-media-server' do
version '7.0.1'
sha256 'd1007d93c711d3c1788946add48792b83664bc41c6eea975604204c4a89c489d'
# sourceforge.net/unimediaserver was verified as official when first introduced to the cask
url "https://downloads.sourceforge.net/unimediaserver/Official%20Releases/OS%20X/UMS-#{version}.dmg"
appcast 'https://sourceforge.net/projects/unimediaserver/rss?path=/Official%20Releases'
name 'Universal Media Server'
homepage 'http://www.universalmediaserver.com/'
app 'Universal Media Server.app'
zap trash: '~/Library/Application Support/UMS/'
end
| 39.2 | 102 | 0.789116 |
62e02d00b51ff35783b4c99888c03366e22b2ce9 | 4,451 | class SamplesController < ApplicationController
respond_to :html
include Seek::PreviewHandling
include Seek::AssetsCommon
include Seek::IndexPager
before_filter :samples_enabled?
before_filter :find_index_assets, only: :index
before_filter :find_and_authorize_requested_item, except: [:index, :new, :create, :preview]
before_filter :auth_to_create, only: [:new, :create]
include Seek::IsaGraphExtensions
include Seek::BreadCrumbs
def index
# There must be better ways of coding this
if @data_file || @sample_type
respond_to do |format|
format.html
format.json {render json: :not_implemented, status: :not_implemented }
end
#respond_with(@samples)
else
respond_to do |format|
format.html {super}
format.json {render json: :not_implemented, status: :not_implemented }
end
end
end
def new
if params[:sample_type_id]
@sample = Sample.new(sample_type_id: params[:sample_type_id])
respond_with(@sample)
else
redirect_to select_sample_types_path
end
end
def create
@sample = Sample.new(sample_type_id: params[:sample][:sample_type_id], title: params[:sample][:title])
update_sample_with_params
flash[:notice] = 'The sample was successfully created.' if @sample.save
respond_with(@sample)
end
def show
@sample = Sample.find(params[:id])
respond_to do |format|
format.html
format.json {render json: :not_implemented, status: :not_implemented }
end
end
def edit
@sample = Sample.find(params[:id])
respond_with(@sample)
end
def update
@sample = Sample.find(params[:id])
update_sample_with_params
flash[:notice] = 'The sample was successfully updated.' if @sample.save
respond_with(@sample)
end
def destroy
@sample = Sample.find(params[:id])
if @sample.can_delete? && @sample.destroy
flash[:notice] = 'The sample was successfully deleted.'
else
flash[:error] = 'It was not possible to delete the sample.'
end
respond_with(@sample, location: root_path)
end
# called from AJAX, returns the form containing the attributes for the sample_type_id
def attribute_form
sample_type_id = params[:sample_type_id]
sample = Sample.new(sample_type_id: sample_type_id)
respond_with do |format|
format.js do
render json: {
form: (render_to_string(partial: 'samples/sample_attributes_form', locals: { sample: sample }))
}
end
end
end
def filter
@associated_samples = params[:assay_id].blank? ? [] : Assay.find(params[:assay_id]).samples
@samples = Sample.where('title LIKE ?', "%#{params[:filter]}%").limit(20)
respond_with do |format|
format.html do
render partial: 'samples/association_preview', collection: @samples,
locals: { existing: @associated_samples }
end
end
end
private
def sample_params(sample_type)
sample_type_param_keys = sample_type ? sample_type.sample_attributes.map(&:hash_key).collect(&:to_sym) | sample_type.sample_attributes.map(&:method_name).collect(&:to_sym) : []
params.require(:sample).permit(:sample_type_id, :other_creators, { project_ids: [] },
{ data: sample_type_param_keys }, { creator_ids: [] },
{ special_auth_codes_attributes: [:code, :expiration_date, :id, :_destroy] }, sample_type_param_keys)
end
def update_sample_with_params
@sample.attributes = sample_params(@sample.sample_type)
update_sharing_policies @sample
update_annotations(params[:tag_list], @sample)
update_relationships(@sample, params)
@sample.save
end
def find_index_assets
if params[:data_file_id]
@data_file = DataFile.find(params[:data_file_id])
unless @data_file.can_view?
flash[:error] = 'You are not authorize to view samples from this data file'
respond_to do |format|
format.html { redirect_to data_file_path(@data_file) }
end
end
@samples = Sample.authorize_asset_collection(@data_file.extracted_samples.includes(sample_type: :sample_attributes), 'view')
elsif params[:sample_type_id]
@sample_type = SampleType.includes(:sample_attributes).find(params[:sample_type_id])
@samples = Sample.authorize_asset_collection(@sample_type.samples, 'view')
else
find_assets
end
end
end
| 31.34507 | 180 | 0.685464 |
21a028e351cc5c2cc17648942c0d349b379720e4 | 6,445 | class Environment < ActiveRecord::Base
# Used to generate random suffixes for the slug
LETTERS = 'a'..'z'
NUMBERS = '0'..'9'
SUFFIX_CHARS = LETTERS.to_a + NUMBERS.to_a
belongs_to :project, required: true, validate: true
has_many :deployments, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :last_deployment, -> { order('deployments.id DESC') }, class_name: 'Deployment'
before_validation :nullify_external_url
before_validation :generate_slug, if: ->(env) { env.slug.blank? }
before_save :set_environment_type
validates :name,
presence: true,
uniqueness: { scope: :project_id },
length: { maximum: 255 },
format: { with: Gitlab::Regex.environment_name_regex,
message: Gitlab::Regex.environment_name_regex_message }
validates :slug,
presence: true,
uniqueness: { scope: :project_id },
length: { maximum: 24 },
format: { with: Gitlab::Regex.environment_slug_regex,
message: Gitlab::Regex.environment_slug_regex_message }
validates :external_url,
length: { maximum: 255 },
allow_nil: true,
addressable_url: true
delegate :stop_action, :manual_actions, to: :last_deployment, allow_nil: true
scope :available, -> { with_state(:available) }
scope :stopped, -> { with_state(:stopped) }
scope :order_by_last_deployed_at, -> do
max_deployment_id_sql =
Deployment.select(Deployment.arel_table[:id].maximum)
.where(Deployment.arel_table[:environment_id].eq(arel_table[:id]))
.to_sql
order(Gitlab::Database.nulls_first_order("(#{max_deployment_id_sql})", 'ASC'))
end
scope :in_review_folder, -> { where(environment_type: "review") }
state_machine :state, initial: :available do
event :start do
transition stopped: :available
end
event :stop do
transition available: :stopped
end
state :available
state :stopped
after_transition do |environment|
environment.expire_etag_cache
end
end
def predefined_variables
[
{ key: 'CI_ENVIRONMENT_NAME', value: name, public: true },
{ key: 'CI_ENVIRONMENT_SLUG', value: slug, public: true }
]
end
def recently_updated_on_branch?(ref)
ref.to_s == last_deployment.try(:ref)
end
def nullify_external_url
self.external_url = nil if self.external_url.blank?
end
def set_environment_type
names = name.split('/')
self.environment_type = names.many? ? names.first : nil
end
def includes_commit?(commit)
return false unless last_deployment
last_deployment.includes_commit?(commit)
end
def last_deployed_at
last_deployment.try(:created_at)
end
def update_merge_request_metrics?
folder_name == "production"
end
def first_deployment_for(commit)
ref = project.repository.ref_name_for_sha(ref_path, commit.sha)
return nil unless ref
deployment_iid = ref.split('/').last
deployments.find_by(iid: deployment_iid)
end
def ref_path
"refs/#{Repository::REF_ENVIRONMENTS}/#{slug}"
end
def formatted_external_url
return nil unless external_url
external_url.gsub(/\A.*?:\/\//, '')
end
def stop_action?
available? && stop_action.present?
end
def stop_with_action!(current_user)
return unless available?
stop!
stop_action&.play(current_user)
end
def actions_for(environment)
return [] unless manual_actions
manual_actions.select do |action|
action.expanded_environment_name == environment
end
end
def has_terminals?
project.deployment_service.present? && available? && last_deployment.present?
end
def terminals
project.deployment_service.terminals(self) if has_terminals?
end
def has_metrics?
project.monitoring_service.present? && available? && last_deployment.present?
end
def metrics
project.monitoring_service.environment_metrics(self) if has_metrics?
end
def has_additional_metrics?
project.prometheus_service.present? && available? && last_deployment.present?
end
def additional_metrics
if has_additional_metrics?
project.prometheus_service.additional_environment_metrics(self)
end
end
def slug
super.presence || generate_slug
end
# An environment name is not necessarily suitable for use in URLs, DNS
# or other third-party contexts, so provide a slugified version. A slug has
# the following properties:
# * contains only lowercase letters (a-z), numbers (0-9), and '-'
# * begins with a letter
# * has a maximum length of 24 bytes (OpenShift limitation)
# * cannot end with `-`
def generate_slug
# Lowercase letters and numbers only
slugified = name.to_s.downcase.gsub(/[^a-z0-9]/, '-')
# Must start with a letter
slugified = 'env-' + slugified unless LETTERS.cover?(slugified[0])
# Repeated dashes are invalid (OpenShift limitation)
slugified.gsub!(/\-+/, '-')
# Maximum length: 24 characters (OpenShift limitation)
slugified = slugified[0..23]
# Cannot end with a dash (Kubernetes label limitation)
slugified.chop! if slugified.end_with?('-')
# Add a random suffix, shortening the current string if necessary, if it
# has been slugified. This ensures uniqueness.
if slugified != name
slugified = slugified[0..16]
slugified << '-' unless slugified.end_with?('-')
slugified << random_suffix
end
self.slug = slugified
end
def external_url_for(path, commit_sha)
return unless self.external_url
public_path = project.public_path_for_source_path(path, commit_sha)
return unless public_path
[external_url, public_path].join('/')
end
def expire_etag_cache
Gitlab::EtagCaching::Store.new.tap do |store|
store.touch(etag_cache_key)
end
end
def etag_cache_key
Gitlab::Routing.url_helpers.project_environments_path(
project,
format: :json)
end
def folder_name
self.environment_type || self.name
end
private
# Slugifying a name may remove the uniqueness guarantee afforded by it being
# based on name (which must be unique). To compensate, we add a random
# 6-byte suffix in those circumstances. This is not *guaranteed* uniqueness,
# but the chance of collisions is vanishingly small
def random_suffix
(0..5).map { SUFFIX_CHARS.sample }.join
end
end
| 26.966527 | 89 | 0.69294 |
f8b9712c36a1b9f9c8e0537418974e58bbf9f164 | 1,932 | require "spec_helper"
module Pumi
RSpec.describe District do
describe ".all" do
it "returns all districts" do
results = District.all
expect(results.size).to eq(204)
expect(results.first).to be_a(District)
end
end
describe ".where" do
it "filters by id" do
results = District.where(id: "0102")
district = results.first
expect(results.size).to eq(1)
expect(district.id).to eq("0102")
expect(district.name_km).to eq("មង្គលបូរី")
expect(district.full_name_km).to eq("ស្រុកមង្គលបូរី")
expect(district.name_latin).to eq("Mongkol Borei")
expect(district.full_name_latin).to eq("Srok Mongkol Borei")
expect(district.name_en).to eq("Mongkol Borei")
expect(district.full_name_en).to eq("Mongkol Borei District")
expect(district.province.name_en).to eq("Banteay Meanchey")
end
it "filters by province_id" do
results = District.where(province_id: "01")
expect(results.size).to eq(9)
expect(results.map(&:province_id).uniq).to eq(["01"])
end
it "filters by name_latin" do
results = District.where(name_latin: "Chamkar Mon")
district = results.first
expect(results.size).to eq(1)
expect(district.full_name_km).to eq("ខណ្ឌចំការមន")
expect(district.full_name_latin).to eq("Khan Chamkar Mon")
expect(district.full_name_en).to eq("Chamkar Mon Section")
end
it "filters by name_km" do
results = District.where(name_km: "ល្វាឯម")
district = results.first
expect(results.size).to eq(1)
expect(district.name_en).to eq("Lvea Aem")
end
end
describe ".find_by_id" do
it "finds the district by id" do
expect(District.find_by_id("0102")).not_to eq(nil)
expect(District.find_by_id("0199")).to eq(nil)
end
end
end
end
| 30.1875 | 69 | 0.624224 |
f8a61bc042a447a5a080f35ba06104c14ca33cd2 | 1,561 | #
# Be sure to run `pod lib lint FIDynamicViewController.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# Any lines starting with a # are optional, but encouraged
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "FIDynamicViewControllerNew"
s.version = "1.7.4"
s.summary = "Frameworks to create dynamic complex view controller"
s.description = <<-DESC
FIDynamicViewController built makes it easy to create a dynamic and flexible view controller with its contents.
For example, when you want to create a view controller with the components within it are loaded depends on a particular configuration.
DESC
s.homepage = "https://github.com/congncif/FIDynamicViewControllerNew"
# s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "NGUYEN CHI CONG" => "[email protected]" }
s.source = { :git => "https://github.com/congncif/FIDynamicViewControllerNew.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/congncif'
s.platform = :ios, '7.0'
s.requires_arc = true
#s.source_files = 'Pod/Classes/**/*'
#s.public_header_files = 'Pod/Classes/**/*.h'
s.frameworks = 'UIKit', 'CoreGraphics'
# s.dependency 'AFNetworking', '~> 2.3'
s.ios.vendored_frameworks = 'Pod/FIDynamicViewController.framework'
end
| 44.6 | 134 | 0.670724 |
ab217078abd3811141bc5235caf4e2454aff8f86 | 945 | class Exploitdb < Formula
desc "The official Exploit Database"
homepage "https://www.exploit-db.com/"
url "https://github.com/offensive-security/exploitdb.git",
:tag => "2019-07-18",
:revision => "40febc17ca38a0394c89786c2913546ab50f1f14"
version "2019-07-18"
head "https://github.com/offensive-security/exploitdb.git"
bottle :unneeded
def install
inreplace "searchsploit",
"rc_file=\"\"", "rc_file=\"#{etc}/searchsploit_rc\""
optpath = opt_share/"exploitdb"
inreplace ".searchsploit_rc" do |s|
s.gsub! "\"/opt/exploitdb\"", optpath
s.gsub! "\"/opt/exploitdb-papers\"", "#{optpath}-papers"
end
bin.install "searchsploit"
etc.install ".searchsploit_rc" => "searchsploit_rc"
pkgshare.install %w[.git exploits files_exploits.csv files_shellcodes.csv
shellcodes]
end
test do
system "#{bin}/searchsploit", "sendpage"
end
end
| 28.636364 | 77 | 0.65291 |
4a8eb9129621940948391e4e75e2ba59e54bcf3a | 299 | require "bcrypt"
class Login
def initialize(user_model)
@user_model = user_model || User
end
def user(email, password)
user = user_model.first(email: email)
user && BCrypt::Password.new(user.password_hash) == password ? user : nil
end
private
attr_reader :user_model
end
| 17.588235 | 77 | 0.698997 |
ed63ec0f00b85153c6bdb5e5d5956e52dbcc1fee | 130 | json.extract! group, :id, :name, :description, :icon, :user_id, :created_at, :updated_at
json.url group_url(group, format: :json)
| 43.333333 | 88 | 0.730769 |
1adf101744e1eabe871ae45283ed8decae964026 | 7,658 | require 'spec_helper'
describe Sidekiq::Status do
let!(:redis) { Sidekiq.redis { |conn| conn } }
let!(:job_id) { SecureRandom.hex(12) }
let!(:job_id_1) { SecureRandom.hex(12) }
let!(:unused_id) { SecureRandom.hex(12) }
let!(:plain_sidekiq_job_id) { SecureRandom.hex(12) }
let!(:retried_job_id) { SecureRandom.hex(12) }
# Clean Redis before each test
# Seems like flushall has no effect on recently published messages,
# so we should wait till they expire
before { redis.flushall; sleep 0.1 }
describe ".status, .working?, .complete?" do
it "gets job status by id as symbol" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(2) {
expect(LongJob.perform_async(1)).to eq(job_id)
}).to eq([job_id]*2)
expect(Sidekiq::Status.status(job_id)).to eq(:working)
expect(Sidekiq::Status.working?(job_id)).to be_truthy
expect(Sidekiq::Status::queued?(job_id)).to be_falsey
expect(Sidekiq::Status::failed?(job_id)).to be_falsey
expect(Sidekiq::Status::complete?(job_id)).to be_falsey
expect(Sidekiq::Status::stopped?(job_id)).to be_falsey
end
expect(Sidekiq::Status.status(job_id)).to eq(:complete)
expect(Sidekiq::Status.complete?(job_id)).to be_truthy
end
end
describe ".get" do
it "gets a single value from data hash as string" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(3) {
expect(DataJob.perform_async).to eq(job_id)
}).to eq([job_id]*3)
expect(Sidekiq::Status.get(job_id, :status)).to eq('working')
end
expect(Sidekiq::Status.get(job_id, :data)).to eq('meow')
end
end
describe ".at, .total, .pct_complete, .message" do
it "should return job progress with correct type to it" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(3) {
expect(ProgressJob.perform_async).to eq(job_id)
}).to eq([job_id]*3)
end
expect(Sidekiq::Status.at(job_id)).to be(100)
expect(Sidekiq::Status.total(job_id)).to be(500)
# It returns a float therefor we need eq()
expect(Sidekiq::Status.pct_complete(job_id)).to eq(20)
expect(Sidekiq::Status.message(job_id)).to eq('howdy, partner?')
end
end
describe ".get_all" do
it "gets the job hash by id" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(2) {
expect(LongJob.perform_async(1)).to eq(job_id)
}).to eq([job_id]*2)
expect(hash = Sidekiq::Status.get_all(job_id)).to include 'status' => 'working'
expect(hash).to include 'update_time'
end
expect(hash = Sidekiq::Status.get_all(job_id)).to include 'status' => 'complete'
expect(hash).to include 'update_time'
end
end
describe ".cancel" do
it "cancels a job by id" do
allow(SecureRandom).to receive(:hex).twice.and_return(job_id, job_id_1)
start_server do
job = LongJob.perform_in(3600)
expect(job).to eq(job_id)
second_job = LongJob.perform_in(3600)
expect(second_job).to eq(job_id_1)
initial_schedule = redis.zrange "schedule", 0, -1, {withscores: true}
expect(initial_schedule.size).to be(2)
expect(initial_schedule.select {|scheduled_job| JSON.parse(scheduled_job[0])["jid"] == job_id }.size).to be(1)
expect(Sidekiq::Status.unschedule(job_id)).to be_truthy
# Unused, therefore unfound => false
expect(Sidekiq::Status.cancel(unused_id)).to be_falsey
remaining_schedule = redis.zrange "schedule", 0, -1, {withscores: true}
expect(remaining_schedule.size).to be(initial_schedule.size - 1)
expect(remaining_schedule.select {|scheduled_job| JSON.parse(scheduled_job[0])["jid"] == job_id }.size).to be(0)
end
end
it "does not cancel a job with correct id but wrong time" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
scheduled_time = Time.now.to_i + 3600
returned_job_id = LongJob.perform_at(scheduled_time)
expect(returned_job_id).to eq(job_id)
initial_schedule = redis.zrange "schedule", 0, -1, {withscores: true}
expect(initial_schedule.size).to be(1)
# wrong time, therefore unfound => false
expect(Sidekiq::Status.cancel(returned_job_id, (scheduled_time + 1))).to be_falsey
expect((redis.zrange "schedule", 0, -1, {withscores: true}).size).to be(1)
# same id, same time, deletes
expect(Sidekiq::Status.cancel(returned_job_id, (scheduled_time))).to be_truthy
expect(redis.zrange "schedule", 0, -1, {withscores: true}).to be_empty
end
end
end
context "keeps normal Sidekiq functionality" do
let(:expiration_param) { nil }
it "does jobs with and without included worker module" do
seed_secure_random_with_job_ids
run_2_jobs!
expect_2_jobs_are_done_and_status_eq :complete
expect_2_jobs_ttl_covers 1..Sidekiq::Status::DEFAULT_EXPIRY
end
it "retries failed jobs" do
allow(SecureRandom).to receive(:hex).once.and_return(retried_job_id)
start_server do
expect(capture_status_updates(5) {
expect(RetriedJob.perform_async()).to eq(retried_job_id)
}).to eq([retried_job_id] * 5)
end
expect(Sidekiq::Status.status(retried_job_id)).to eq(:complete)
end
context ":expiration param" do
before { seed_secure_random_with_job_ids }
let(:expiration_param) { Sidekiq::Status::DEFAULT_EXPIRY * 100 }
it "allow to overwrite :expiration parameter" do
run_2_jobs!
expect_2_jobs_are_done_and_status_eq :complete
expect_2_jobs_ttl_covers (Sidekiq::Status::DEFAULT_EXPIRY+1)..expiration_param
end
it "allow to overwrite :expiration parameter by .expiration method from worker" do
overwritten_expiration = expiration_param * 100
allow_any_instance_of(NoStatusConfirmationJob).to receive(:expiration).
and_return(overwritten_expiration)
allow_any_instance_of(StubJob).to receive(:expiration).
and_return(overwritten_expiration)
run_2_jobs!
expect_2_jobs_are_done_and_status_eq :complete
expect_2_jobs_ttl_covers (expiration_param+1)..overwritten_expiration
end
end
def seed_secure_random_with_job_ids
allow(SecureRandom).to receive(:hex).exactly(4).times.
and_return(plain_sidekiq_job_id, plain_sidekiq_job_id, job_id_1, job_id_1)
end
def run_2_jobs!
start_server(:expiration => expiration_param) do
expect(capture_status_updates(12) {
expect(StubJob.perform_async).to eq(plain_sidekiq_job_id)
NoStatusConfirmationJob.perform_async(1)
expect(StubJob.perform_async).to eq(job_id_1)
NoStatusConfirmationJob.perform_async(2)
}).to match_array([plain_sidekiq_job_id, job_id_1] * 6)
end
end
def expect_2_jobs_ttl_covers(range)
expect(range).to cover redis.ttl(plain_sidekiq_job_id)
expect(range).to cover redis.ttl(job_id_1)
end
def expect_2_jobs_are_done_and_status_eq(status)
expect(redis.mget('NoStatusConfirmationJob_1', 'NoStatusConfirmationJob_2')).to eq(%w(done)*2)
expect(Sidekiq::Status.status(plain_sidekiq_job_id)).to eq(status)
expect(Sidekiq::Status.status(job_id_1)).to eq(status)
end
end
end
| 38.873096 | 120 | 0.681118 |
33c6653cb46fa227f086087ffa4aac256165ec6d | 236 |
# Check that the installation was successfull
describe file('/usr/bin/blastn') do
it { should be_file }
it { should be_executable }
end
describe command('/usr/bin/blastn -version') do
its(:stdout) { should match(/2.4.0+/) }
end
| 21.454545 | 47 | 0.699153 |
ff721a598ea868ec967c4730e125c40af23d812b | 48 | require 'dependencies'
require 'uri_translator'
| 16 | 24 | 0.833333 |
26d9844c5df8c8658fd8f88cbcf9390b40e6281d | 289 | class CreatePaidTimeOffs < ActiveRecord::Migration
def change
create_table :paid_time_offs do |t|
t.integer :user_id
t.integer :sick_days_taken
t.integer :sick_days_earned
t.integer :pto_taken
t.integer :pto_earned
t.timestamps
end
end
end
| 20.642857 | 50 | 0.688581 |
9117f4ecf60f37977db80bb226dd9777e4cdb389 | 169 | class Appointment < ActiveRecord::Base
validates :appointment_time, :user, presence: true
belongs_to :user
end
#<%=appointment.date.strftime('%a, %b %d %Y')%> | 28.166667 | 54 | 0.692308 |
7a65eec942d7ff3c65335184c77e53f7dfd050f9 | 302 | service 'tomcat' do
service_name node['opsworks_java']['tomcat']['service_name']
case node[:platform_family]
when 'debian'
supports :restart => true, :reload => false, :status => true
when 'rhel'
supports :restart => true, :reload => true, :status => true
end
action :nothing
end
| 23.230769 | 64 | 0.665563 |
3826cd14f31045e961c11b44165d3b5df224b2e8 | 381 | class UnknownMedia < Media
attr_accessor :media_type
def initialize(media_type:)
@media_type = media_type
end
def valid?
false
end
def errors
{
error: [
{
name: "media_type",
description: "Unknown media type '#{media_type}'"
}
]
}
end
def uuid
nil
end
def json_response
nil
end
end
| 12.290323 | 59 | 0.559055 |
d5fc8336f03c28f29f208d37067cce216e94b543 | 999 | require 'spec_helper'
describe IssueTrackers::GitlabTracker do
it "should create an issue on Gitlab with problem params" do
notice = Fabricate :notice
tracker = Fabricate :gitlab_tracker, :app => notice.app
problem = notice.problem
number = 5
@issue_link = "#{tracker.account}/#{tracker.project_id}/issues/#{number}/#{tracker.api_token}"
body = <<EOF
{
"title": "Title"
}
EOF
stub_request(:post, "#{tracker.account}/#{tracker.project_id}/issues/#{tracker.api_token}").
to_return(:status => 201, :headers => {'Location' => @issue_link}, :body => body )
problem.app.issue_tracker.create_issue(problem)
problem.reload
requested = have_requested(:post, "#{tracker.account}/#{tracker.project_id}/issues/#{tracker.api_token}")
WebMock.should requested.with(:body => /[production][foo#bar] FooError: Too Much Bar/)
WebMock.should requested.with(:body => /See this exception on Errbit/)
problem.issue_link.should == @issue_link
end
end
| 32.225806 | 109 | 0.693694 |
08cc79a1e6e1aa23658124c21aaddef6a452a929 | 1,496 | # frozen_string_literal: true
module SolargraphTestCoverage
# Some helper functions for the diagnostics
module ReporterHelpers
# @return [Hash]
def run_test(test_file)
ForkProcess.call do
Coverage.start(lines: true, branches: true)
runner = TestRunner.with(test_file).run!
extra = { test_status: runner.passed?, failed_examples: runner.failed_examples }
Coverage.result.fetch(@filename, {}).merge(extra)
end
end
def branch_warnings
Branch.build_from(@results)
.reject(&:covered?)
.map { |branch| branch_coverage_warning(branch.report) }
end
def test_passing_error
@results[:test_status] ? [] : [test_failing_error]
end
def example_failing_errors
@results.fetch(:failed_examples, [])
.map { |example| example_failing_error(example) }
end
def line_warnings
uncovered_lines.map { |line| line_coverage_warning(line) }
end
# Adapted from SingleCov
# Coverage returns nil for untestable lines (like 'do', 'end', 'if' keywords)
# otherwise returns int showing how many times a line was called
#
# [nil, 1, 0, 1, 0] -> [3, 5]
# Returns array of line numbers with 0 coverage
def uncovered_lines
return [] unless @results[:lines]
@results[:lines].each_with_index
.select { |c, _| c&.zero? }
.map { |_, i| i }
.compact
end
end
end
| 28.769231 | 88 | 0.620321 |
4acfa47d30bd339e026903e15b55960621a61426 | 2,021 | require "spec_helper"
describe Tabulo::Column do
subject do
Tabulo::Column.new(
align_body: :left,
align_header: :left,
extractor: extractor,
formatter: -> (n) { "%.2f" % n },
header: "X10",
header_styler: nil,
left_padding: 1,
index: 3,
padding_character: " ",
right_padding: 1,
styler: nil,
truncation_indicator: "~",
wrap_preserve: :rune,
width: 10)
end
let(:extractor) { -> (n) { n * 10 } }
describe "#initialize" do
it "create a Column" do
is_expected.to be_a(Tabulo::Column)
end
end
describe "#header_cell" do
it "returns a new Cell initialized with the header content" do
expect(subject.header_cell.instance_variable_get(:@value)).to eq("X10")
end
end
describe "#body_cell" do
let(:row_index) { 2 }
let(:column_index) { 5 }
it "returns a new Cell initialized with the value returned by calling the extractor on the passed source" do
cell = subject.body_cell(3, row_index: row_index, column_index: column_index)
expect(cell.instance_variable_get(:@value)).to eq(30)
end
it "returns a new Cell which formats its content using the formatter with which the Column was initialized" do
expect(subject.body_cell(3, row_index: row_index, column_index: column_index).formatted_content).to eq("30.00")
end
end
describe "#body_cell_value" do
context "when the extractor takes 1 parameter" do
let(:extractor) { -> (n) { n * 10 } }
it "returns the underlying value in this column for the passed source item" do
expect(subject.body_cell_value(3, row_index: 1, column_index: 5)).to eq(30)
end
end
context "when the extractor takes 2 parameters" do
let(:extractor) { -> (n, row_index) { row_index } }
it "returns the underlying value in this column for the passed source item" do
expect(subject.body_cell_value(3, row_index: 1, column_index: 5)).to eq(1)
end
end
end
end
| 29.720588 | 117 | 0.653142 |
6132d7a21edd99e84b39e69ae3a9aee6f3f9c666 | 1,878 | =begin
Copyright 2012-2013 inBloom, Inc. and its affiliates.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'test_helper'
class EulasControllerTest < ActionController::TestCase
setup do
@request.env['SERVER_PROTOCOL']='HTTP/1.1'
end
test "should respond to post for accept/reject" do
Eula.stubs(:accepted?).returns(true)
ApplicationHelper.stubs(:send_user_verification_email).returns(true)
get :create
assert_template :finish
ApplicationHelper.stubs(:send_user_verification_email).returns(false)
get :create
assert_template :account_error
Eula.stubs(:accepted?).returns(false)
ApplicationHelper.stubs(:remove_user_account)
get :create
assert_redirected_to APP_CONFIG['redirect_slc_url']
end
test "should show eula" do
assert_response :success
end
test "should check for valid session before rendering eula" do
Session.stubs(:valid?).returns(false)
assert_raise(ActionController::RoutingError) { get :show}
Session.stubs(:valid?).returns(true)
assert_nothing_raised { get :show }
end
test "should delete user upon email failure" do
Eula.stubs(:accepted?).returns(true)
ApplicationHelper.expects(:send_user_verification_email).once().raises(ApplicationHelper::EmailException)
APP_LDAP_CLIENT.expects(:delete_user).once()
get :create
assert_template :invalid_email
end
end
| 29.34375 | 109 | 0.761448 |
1a30d35dbeef699d84fcf694a0986c7bedde5a2a | 4,132 | Rabel::Application.routes.draw do
devise_for :users, :controllers => {:sessions => "sessions", :registrations => "registrations"}
get 'settings' => 'users#edit'
get 'member/:nickname' => 'users#show', :as => :member
get 'member/:nickname/topics' => 'users#topics', :as => :member_topics
post 'member/:nickname/follow' => 'users#follow', :as => :follow_user
post 'member/:nickname/unfollow' => 'users#unfollow', :as => :unfollow_user
put 'users/update_account' => 'users#update_account', :as => :update_account
put 'users/update_password' => 'users#update_password', :as => :update_password
put 'users/update_avatar' => 'users#update_avatar', :as => :update_avatar
get 'go/:key' => 'nodes#show', :as => :go
get 't/:id' => 'topics#show', :as => :t
match '/topics/:id' => redirect('/t/%{id}'), :constraints => { :id => /\d+/ }
get 'my/topics' => 'users#my_topics', :as => :my_topics
get 'my/following' => 'users#my_following', :as => :my_following
get 'page/:key' => 'pages#show', :as => :page
get 'goodbye' => 'welcome#goodbye'
get 'captcha' => 'welcome#captcha'
get 'sitemap' => 'welcome#sitemap'
resources :nodes do
resources :topics do
member do
get :move
get :edit_title
put :update_title
end
end
end
resources :topics do
resources :comments
resources :bookmarks
post :preview, :on => :collection
put :toggle_comments_closed
put :toggle_sticky
end
resources :comments, :bookmarks, :upyun_images
resources :notifications do
get :read, :on => :member
end
namespace :admin do
resources :planes do
resources :nodes
post :sort, :on => :collection
get :sort, :on => :collection
end
resources :nodes do
post :sort, :on => :collection
get :move, :on => :member
put :move_to, :on => :member
end
resources :users do
member do
put :toggle_admin
put :toggle_blocked
end
resources :rewards
end
resources :pages do
post :sort, :on => :collection
end
resource :site_settings
resources :topics, :advertisements, :cloud_files, :rewards
resources :notifications do
delete :clear, :on => :collection
end
get 'appearance' => 'site_settings#appearance'
root :to => 'welcome_admin#index'
end
root :to => 'welcome#index'
# The priority is based upon order of creation:
# first created -> highest priority.
# Sample of regular route:
# match 'products/:id' => 'catalog#view'
# Keep in mind you can assign values other than :controller and :action
# Sample of named route:
# match 'products/:id/purchase' => 'catalog#purchase', :as => :purchase
# This route can be invoked with purchase_url(:id => product.id)
# Sample resource route (maps HTTP verbs to controller actions automatically):
# resources :products
# Sample resource route with options:
# resources :products do
# member do
# get 'short'
# post 'toggle'
# end
#
# collection do
# get 'sold'
# end
# end
# Sample resource route with sub-resources:
# resources :products do
# resources :comments, :sales
# resource :seller
# end
# Sample resource route with more complex sub-resources
# resources :products do
# resources :comments
# resources :sales do
# get 'recent', :on => :collection
# end
# end
# Sample resource route within a namespace:
# namespace :admin do
# # Directs /admin/products/* to Admin::ProductsController
# # (app/controllers/admin/products_controller.rb)
# resources :products
# end
# You can have the root of your site routed with "root"
# just remember to delete public/index.html.
# root :to => 'welcome#index'
# See how all your routes lay out with "rake routes"
# This is a legacy wild controller route that's not recommended for RESTful applications.
# Note: This route will make all actions in every controller accessible via GET requests.
# match ':controller(/:action(/:id(.:format)))'
end
| 29.726619 | 97 | 0.63819 |
03c3ff25a195a8b2c3c20267760e2149ca40bb9c | 401 | module TelephoneNumber
class TimeZoneDataImporter
def self.load_data!
master_data = {}
File.open('data/timezones/map_data.txt', 'rb').each do |row|
number_prefix, timezone = row.split('|').map(&:strip)
master_data[number_prefix] = timezone
end
File.open('data/timezones/map_data.dat', 'wb') { |file| file << Marshal.dump(master_data) }
end
end
end
| 28.642857 | 97 | 0.650873 |
87f67cd17d5b7037b0a1b07af8c6778780e9098b | 7,758 | =begin
#NSX-T Manager API
#VMware NSX-T Manager REST API
OpenAPI spec version: 2.5.1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXT
class SIPacketsDroppedBySecurity
# The packets dropped by \"Spoof Guard\"; supported packet types are IPv4, IPv6, ARP, ND, non-IP.
attr_accessor :spoof_guard_dropped
# The number of IPv4 packets dropped by \"DHCP server block\".
attr_accessor :dhcp_server_dropped_ipv4
# The number of IPv6 packets dropped by \"DHCP server block\".
attr_accessor :dhcp_server_dropped_ipv6
# The number of IPv4 packets dropped by \"DHCP client block\".
attr_accessor :dhcp_client_dropped_ipv4
# The number of packets dropped by \"BPDU filter\".
attr_accessor :bpdu_filter_dropped
# The number of IPv6 packets dropped by \"DHCP client block\".
attr_accessor :dhcp_client_dropped_ipv6
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'spoof_guard_dropped' => :'spoof_guard_dropped',
:'dhcp_server_dropped_ipv4' => :'dhcp_server_dropped_ipv4',
:'dhcp_server_dropped_ipv6' => :'dhcp_server_dropped_ipv6',
:'dhcp_client_dropped_ipv4' => :'dhcp_client_dropped_ipv4',
:'bpdu_filter_dropped' => :'bpdu_filter_dropped',
:'dhcp_client_dropped_ipv6' => :'dhcp_client_dropped_ipv6'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'spoof_guard_dropped' => :'Array<SIPacketTypeAndCounter>',
:'dhcp_server_dropped_ipv4' => :'Integer',
:'dhcp_server_dropped_ipv6' => :'Integer',
:'dhcp_client_dropped_ipv4' => :'Integer',
:'bpdu_filter_dropped' => :'Integer',
:'dhcp_client_dropped_ipv6' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'spoof_guard_dropped')
if (value = attributes[:'spoof_guard_dropped']).is_a?(Array)
self.spoof_guard_dropped = value
end
end
if attributes.has_key?(:'dhcp_server_dropped_ipv4')
self.dhcp_server_dropped_ipv4 = attributes[:'dhcp_server_dropped_ipv4']
end
if attributes.has_key?(:'dhcp_server_dropped_ipv6')
self.dhcp_server_dropped_ipv6 = attributes[:'dhcp_server_dropped_ipv6']
end
if attributes.has_key?(:'dhcp_client_dropped_ipv4')
self.dhcp_client_dropped_ipv4 = attributes[:'dhcp_client_dropped_ipv4']
end
if attributes.has_key?(:'bpdu_filter_dropped')
self.bpdu_filter_dropped = attributes[:'bpdu_filter_dropped']
end
if attributes.has_key?(:'dhcp_client_dropped_ipv6')
self.dhcp_client_dropped_ipv6 = attributes[:'dhcp_client_dropped_ipv6']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
spoof_guard_dropped == o.spoof_guard_dropped &&
dhcp_server_dropped_ipv4 == o.dhcp_server_dropped_ipv4 &&
dhcp_server_dropped_ipv6 == o.dhcp_server_dropped_ipv6 &&
dhcp_client_dropped_ipv4 == o.dhcp_client_dropped_ipv4 &&
bpdu_filter_dropped == o.bpdu_filter_dropped &&
dhcp_client_dropped_ipv6 == o.dhcp_client_dropped_ipv6
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[spoof_guard_dropped, dhcp_server_dropped_ipv4, dhcp_server_dropped_ipv6, dhcp_client_dropped_ipv4, bpdu_filter_dropped, dhcp_client_dropped_ipv6].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXT.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.596639 | 157 | 0.651843 |
87950c396d79ba67920e7f54060733cb056ceaa6 | 1,751 | # Copyright (C) 2013-2015 Ruby-GNOME2 Project Team
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
require "gobject-introspection"
require "gtk2"
base_dir = Pathname.new(__FILE__).dirname.dirname.expand_path
vendor_dir = base_dir + "vendor" + "local"
vendor_bin_dir = vendor_dir + "bin"
GLib.prepend_dll_path(vendor_bin_dir)
module WebKitGtk2
class << self
def const_missing(name)
init
if const_defined?(name)
const_get(name)
else
super
end
end
def init
class << self
remove_method(:init)
remove_method(:const_missing)
end
loader = Loader.new(self)
loader.load
end
end
class Loader < GObjectIntrospection::Loader
NAMESPACE = "WebKit"
VERSION = "1.0"
def initialize(base_module)
super
@version = VERSION
end
def load
super(NAMESPACE)
end
private
def initialize_post(object)
super
return unless object.is_a?(GLib::Object)
self.class.reference_gobject(object, :sink => true)
end
end
end
| 26.134328 | 80 | 0.697887 |
39027260fe898ca2eac0db6a6a2f798cad4ecfd5 | 9,065 | require 'spec_helper'
module Spree
describe Order do
let(:user) { create(:user) }
let!(:store_credit) { create(:store_credit, user: user, amount: 45.00, remaining_amount: 45.00)}
let(:line_item) { mock_model(LineItem, :variant => double('variant'), :quantity => 5, :price => 10) }
let(:order) { create(:order, user: user) }
before do
reset_spree_preferences { |config| config.use_store_credit_minimum = 0 }
end
context "process_store_credit" do
before do
order.stub(:user => user, :total => 50 )
end
it "should create store credit adjustment when user has sufficient credit" do
order.store_credit_amount = 5.0
order.save
order.adjustments.store_credits.size.should == 1
order.store_credit_amount.should == 5.0
end
it "should only create adjustment with amount equal to users total credit" do
order.store_credit_amount = 50.0
order.save
order.store_credit_amount.should == 45.00
end
it "should only create adjustment with amount equal to order total" do
user.stub(:store_credits_total => 100.0)
order.store_credit_amount = 90.0
order.save
order.store_credit_amount.should == 50.00
end
it "should not create adjustment when user does not have any credit" do
user.stub(:store_credits_total => 0.0)
order.store_credit_amount = 5.0
order.save
order.adjustments.store_credits.size.should == 0
order.store_credit_amount.should == 0.0
end
it "should update order totals if credit is applied" do
pending
order.should_receive(:update_totals).twice
order.store_credit_amount = 5.0
order.save
end
it "should update payment amount if credit is applied" do
order.stub_chain(:pending_payments, :first => double('payment', :payment_method => double('payment method', :payment_profiles_supported? => true)))
order.pending_payments.first.should_receive(:amount=)
order.store_credit_amount = 5.0
order.save
end
it "should create negative adjustment" do
order.store_credit_amount = 5.0
order.save
order.adjustments[0].amount.should == -5.0
end
it "should process credits if order total is already zero" do
order.stub(:total => 0)
order.store_credit_amount = 5.0
order.should_receive(:process_store_credit)
order.save
order.adjustments.store_credits.size.should == 0
order.store_credit_amount.should == 0.0
end
context "with an existing adjustment" do
before { order.adjustments.store_credits.create(:label => I18n.t(:store_credit) , :amount => -10) }
it "should decrease existing adjustment if specific amount is less than adjustment amount" do
order.store_credit_amount = 5.0
order.save
order.adjustments.store_credits.size.should == 1
order.store_credit_amount.should == 5.0
end
it "should increase existing adjustment if specified amount is greater than adjustment amount" do
order.store_credit_amount = 25.0
order.save
order.adjustments.store_credits.size.should == 1
order.store_credit_amount.should == 25.0
end
it "should destroy the adjustment if specified amount is zero" do
order.store_credit_amount = 0.0
order.save
order.adjustments.store_credits.size.should == 0
order.store_credit_amount.should == 0.0
end
it "should decrease existing adjustment when existing credit amount is equal to the order total" do
order.stub(:total => 10)
order.store_credit_amount = 5.0
order.save
order.adjustments.store_credits.size.should == 1
order.store_credit_amount.should == 5.0
end
end
end
context "store_credit_amount" do
it "should return total for all store credit adjustments applied to order" do
order.adjustments.store_credits.create(:label => I18n.t(:store_credit) , :amount => -10)
order.adjustments.store_credits.create(:label => I18n.t(:store_credit) , :amount => -5)
order.store_credit_amount.should == BigDecimal.new('15')
end
end
context "consume_users_credit" do
let(:store_credit_1) { mock_model(StoreCredit, :amount => 100, :remaining_amount => 100) }
let(:store_credit_2) { mock_model(StoreCredit, :amount => 10, :remaining_amount => 5) }
let(:store_credit_3) { mock_model(StoreCredit, :amount => 60, :remaining_amount => 50 ) }
before { order.stub(:completed? => true, :store_credit_amount => 35, :total => 50) }
it "should reduce remaining amount on a single credit when that credit satisfies the entire amount" do
user.stub(:store_credits => [store_credit_1])
store_credit_1.should_receive(:remaining_amount=).with(65)
store_credit_1.should_receive(:save)
order.send(:consume_users_credit)
end
it "should reduce remaining amount on a multiple credits when a single credit does not satisfy the entire amount" do
order.stub(:store_credit_amount => 55)
user.stub(:store_credits => [store_credit_2, store_credit_3])
store_credit_2.should_receive(:update_attribute).with(:remaining_amount, 0)
store_credit_3.should_receive(:update_attribute).with(:remaining_amount, 0)
order.send(:consume_users_credit)
end
it "should call consume_users_credit after transition to complete" do
pending
new_order = Order.new()
new_order.state = :confirm
new_order.should_receive(:consume_users_credit).at_least(1).times
new_order.next!
new_order.state.should == 'complete'
end
# regression
it 'should do nothing on guest checkout' do
order.stub(:user => nil)
expect {
order.send(:consume_users_credit)
}.to_not raise_error
end
end
context "ensure_sufficient_credit" do
let(:order) { create(:completed_order_with_totals, store_credit_amount: 35, user: user)}
let!(:payment) { create(:payment, order: order, amount: 40, state: 'completed')}
before do
order.adjustments.store_credits.create(label: I18n.t(:store_credit) , amount: -10, eligible: true)
order.update!
end
it "should do nothing when user has credits" do
order.adjustments.store_credits.should_not_receive(:destroy_all)
order.should_not_receive(:update!)
order.send(:ensure_sufficient_credit)
end
context "when user no longer has sufficient credit to cover entire credit amount" do
before do
store_credit.remaining_amount = 0.0
store_credit.save!
user.reload
end
it "should destroy all store credit adjustments" do
order.adjustment_total.should eq(-10)
order.total.should eq(40)
order.send(:ensure_sufficient_credit)
order.adjustments.store_credits.size.should == 0
order.reload
order.adjustment_total.should eq(0)
end
it "should update the order's payment state" do
order.payment_state.should eq('paid')
order.send(:ensure_sufficient_credit)
order.reload
order.payment_state.should eq('balance_due')
end
end
end
context "process_payments!" do
it "should return false when total is greater than zero and payments are empty" do
order.stub(:pending_payments => [])
order.process_payments!.should be_false
end
it "should process payment when total is zero and payments is not empty" do
order.stub(:pending_payments => [mock_model(Payment)])
order.should_receive(:process_payments_without_credits!)
order.process_payments!
end
end
context "when minimum item total is set" do
before do
order.stub(:item_total => 50)
order.instance_variable_set(:@store_credit_amount, 25)
end
context "when item total is less than limit" do
before { reset_spree_preferences { |config| config.use_store_credit_minimum = 100 } }
it "should be invalid" do
order.valid?.should be_false
order.errors.should_not be_nil
end
it "should be valid when store_credit_amount is 0" do
order.instance_variable_set(:@store_credit_amount, 0)
order.stub(:item_total => 50)
order.valid?.should be_true
order.errors.count.should == 0
end
end
describe "when item total is greater than limit" do
before { reset_spree_preferences { |config| config.use_store_credit_minimum = 10 } }
it "should be valid when item total is greater than limit" do
order.valid?.should be_true
order.errors.count.should == 0
end
end
end
end
end
| 35.83004 | 155 | 0.655157 |
4abaf1aec7d814e7f577877ec2e91c3dc10d0076 | 341 | class CreateServerProfileUserAccesses < ActiveRecord::Migration
def self.up
create_table :server_profile_user_accesses do |t|
t.integer :server_profile_id
t.integer :user_id
t.string :role, :default => 'reader'
t.timestamps
end
end
def self.down
drop_table :server_profile_user_accesses
end
end
| 21.3125 | 63 | 0.718475 |
3863bbeb41d844896bdde6492484786daf91b476 | 501 | class Users::SessionsController < Devise::SessionsController
# before_action :configure_sign_in_params, only: [:create]
# GET /resource/sign_in
# def new
# super
# end
# POST /resource/sign_in
# def create
# super
# end
# DELETE /resource/sign_out
# def destroy
# super
# end
# protected
# If you have extra params to permit, append them to the sanitizer.
def configure_sign_in_params
devise_parameter_sanitizer.permit(:sign_in, keys: [:name])
end
end
| 19.269231 | 69 | 0.694611 |
b9c8d54ec45e7635f1b1a92880a713158c475e00 | 593 | # Ruby Sample program from www.sapphiresteel.com
module MagicThing
def m_power
return @power
end
def m_power=(aPower)
@m_power=aPower
end
end
module Treasure
attr_accessor :value
attr_accessor :insurance_cost
end
class Weapon
attr_accessor :deadliness
attr_accessor :power
end
class Sword < Weapon
include Treasure
include MagicThing
attr_accessor :name
end
s = Sword.new
s.name = "Excalibur"
s.deadliness = 10
s.power = 20
s.m_power = "Glows when Orcs Appear"
puts(s.name)
puts(s.deadliness)
puts(s.power)
puts(s.m_power)
| 14.825 | 50 | 0.703204 |
f8c363ad44c2fde025d87d3dcd60050625facc1f | 9,733 | # typed: false
# frozen_string_literal: true
require "test/support/fixtures/testball"
require "cleanup"
require "cask/cache"
require "fileutils"
using Homebrew::Cleanup::CleanupRefinement
describe Homebrew::Cleanup::CleanupRefinement do
describe "::prune?" do
alias_matcher :be_pruned, :be_prune
subject(:path) { HOMEBREW_CACHE/"foo" }
before do
path.mkpath
end
it "returns true when ctime and mtime < days_default" do
allow_any_instance_of(Pathname).to receive(:ctime).and_return(2.days.ago)
allow_any_instance_of(Pathname).to receive(:mtime).and_return(2.days.ago)
expect(path.prune?(1)).to be true
end
it "returns false when ctime and mtime >= days_default" do
expect(path.prune?(2)).to be false
end
end
end
describe Homebrew::Cleanup do
let(:ds_store) { Pathname.new("#{HOMEBREW_CELLAR}/.DS_Store") }
let(:lock_file) { Pathname.new("#{HOMEBREW_LOCKS}/foo") }
around do |example|
FileUtils.touch ds_store
FileUtils.touch lock_file
FileUtils.mkdir_p HOMEBREW_LIBRARY/"Homebrew/vendor"
FileUtils.touch HOMEBREW_LIBRARY/"Homebrew/vendor/portable-ruby-version"
example.run
ensure
FileUtils.rm_f ds_store
FileUtils.rm_f lock_file
FileUtils.rm_rf HOMEBREW_LIBRARY/"Homebrew"
end
describe "::cleanup" do
it "removes .DS_Store and lock files" do
subject.clean!
expect(ds_store).not_to exist
expect(lock_file).not_to exist
end
it "doesn't remove anything if `dry_run` is true" do
described_class.new(dry_run: true).clean!
expect(ds_store).to exist
expect(lock_file).to exist
end
it "doesn't remove the lock file if it is locked" do
lock_file.open(File::RDWR | File::CREAT).flock(File::LOCK_EX | File::LOCK_NB)
subject.clean!
expect(lock_file).to exist
end
context "when it can't remove a keg" do
let(:f1) { Class.new(Testball) { version "0.1" }.new }
let(:f2) { Class.new(Testball) { version "0.2" }.new }
before do
[f1, f2].each do |f|
f.brew do
f.install
end
Tab.create(f, DevelopmentTools.default_compiler, :libcxx).write
end
allow_any_instance_of(Keg)
.to receive(:uninstall)
.and_raise(Errno::EACCES)
end
it "doesn't remove any kegs" do
subject.cleanup_formula f2
expect(f1.installed_kegs.size).to eq(2)
end
it "lists the unremovable kegs" do
subject.cleanup_formula f2
expect(subject.unremovable_kegs).to contain_exactly(f1.installed_kegs[0])
end
end
end
specify "::cleanup_formula" do
f1 = Class.new(Testball) do
version "1.0"
end.new
f2 = Class.new(Testball) do
version "0.2"
version_scheme 1
end.new
f3 = Class.new(Testball) do
version "0.3"
version_scheme 1
end.new
f4 = Class.new(Testball) do
version "0.1"
version_scheme 2
end.new
[f1, f2, f3, f4].each do |f|
f.brew do
f.install
end
Tab.create(f, DevelopmentTools.default_compiler, :libcxx).write
end
expect(f1).to be_latest_version_installed
expect(f2).to be_latest_version_installed
expect(f3).to be_latest_version_installed
expect(f4).to be_latest_version_installed
subject.cleanup_formula f3
expect(f1).not_to be_latest_version_installed
expect(f2).not_to be_latest_version_installed
expect(f3).to be_latest_version_installed
expect(f4).to be_latest_version_installed
end
describe "#cleanup_cask", :cask do
before do
Cask::Cache.path.mkpath
end
context "when given a versioned cask" do
let(:cask) { Cask::CaskLoader.load("local-transmission") }
it "removes the download if it is not for the latest version" do
download = Cask::Cache.path/"#{cask.token}--7.8.9"
FileUtils.touch download
subject.cleanup_cask(cask)
expect(download).not_to exist
end
it "does not remove downloads for the latest version" do
download = Cask::Cache.path/"#{cask.token}--#{cask.version}"
FileUtils.touch download
subject.cleanup_cask(cask)
expect(download).to exist
end
end
context "when given a `:latest` cask" do
let(:cask) { Cask::CaskLoader.load("latest-with-appcast") }
it "does not remove the download for the latest version" do
download = Cask::Cache.path/"#{cask.token}--#{cask.version}"
FileUtils.touch download
subject.cleanup_cask(cask)
expect(download).to exist
end
it "removes the download for the latest version after 30 days" do
download = Cask::Cache.path/"#{cask.token}--#{cask.version}"
allow(download).to receive(:ctime).and_return(30.days.ago - 1.hour)
allow(download).to receive(:mtime).and_return(30.days.ago - 1.hour)
subject.cleanup_cask(cask)
expect(download).not_to exist
end
end
end
describe "::cleanup_logs" do
let(:path) { (HOMEBREW_LOGS/"delete_me") }
before do
path.mkpath
end
it "cleans all logs if prune is 0" do
described_class.new(days: 0).cleanup_logs
expect(path).not_to exist
end
it "cleans up logs if older than 30 days" do
allow_any_instance_of(Pathname).to receive(:ctime).and_return(31.days.ago)
allow_any_instance_of(Pathname).to receive(:mtime).and_return(31.days.ago)
subject.cleanup_logs
expect(path).not_to exist
end
it "does not clean up logs less than 30 days old" do
allow_any_instance_of(Pathname).to receive(:ctime).and_return(15.days.ago)
allow_any_instance_of(Pathname).to receive(:mtime).and_return(15.days.ago)
subject.cleanup_logs
expect(path).to exist
end
end
describe "::cleanup_cache" do
it "cleans up incomplete downloads" do
incomplete = (HOMEBREW_CACHE/"something.incomplete")
incomplete.mkpath
subject.cleanup_cache
expect(incomplete).not_to exist
end
it "cleans up 'cargo_cache'" do
cargo_cache = (HOMEBREW_CACHE/"cargo_cache")
cargo_cache.mkpath
subject.cleanup_cache
expect(cargo_cache).not_to exist
end
it "cleans up 'go_cache'" do
go_cache = (HOMEBREW_CACHE/"go_cache")
go_cache.mkpath
subject.cleanup_cache
expect(go_cache).not_to exist
end
it "cleans up 'glide_home'" do
glide_home = (HOMEBREW_CACHE/"glide_home")
glide_home.mkpath
subject.cleanup_cache
expect(glide_home).not_to exist
end
it "cleans up 'java_cache'" do
java_cache = (HOMEBREW_CACHE/"java_cache")
java_cache.mkpath
subject.cleanup_cache
expect(java_cache).not_to exist
end
it "cleans up 'npm_cache'" do
npm_cache = (HOMEBREW_CACHE/"npm_cache")
npm_cache.mkpath
subject.cleanup_cache
expect(npm_cache).not_to exist
end
it "cleans up 'gclient_cache'" do
gclient_cache = (HOMEBREW_CACHE/"gclient_cache")
gclient_cache.mkpath
subject.cleanup_cache
expect(gclient_cache).not_to exist
end
it "cleans up all files and directories" do
git = (HOMEBREW_CACHE/"gist--git")
gist = (HOMEBREW_CACHE/"gist")
svn = (HOMEBREW_CACHE/"gist--svn")
git.mkpath
gist.mkpath
FileUtils.touch svn
described_class.new(days: 0).cleanup_cache
expect(git).not_to exist
expect(gist).to exist
expect(svn).not_to exist
end
it "does not clean up directories that are not VCS checkouts" do
git = (HOMEBREW_CACHE/"git")
git.mkpath
described_class.new(days: 0).cleanup_cache
expect(git).to exist
end
it "cleans up VCS checkout directories with modified time < prune time" do
foo = (HOMEBREW_CACHE/"--foo")
foo.mkpath
allow_any_instance_of(Pathname).to receive(:ctime).and_return(Time.now - 2 * 60 * 60 * 24)
allow_any_instance_of(Pathname).to receive(:mtime).and_return(Time.now - 2 * 60 * 60 * 24)
described_class.new(days: 1).cleanup_cache
expect(foo).not_to exist
end
it "does not clean up VCS checkout directories with modified time >= prune time" do
foo = (HOMEBREW_CACHE/"--foo")
foo.mkpath
described_class.new(days: 1).cleanup_cache
expect(foo).to exist
end
context "cleans old files in HOMEBREW_CACHE" do
let(:bottle) { (HOMEBREW_CACHE/"testball--0.0.1.tag.bottle.tar.gz") }
let(:testball) { (HOMEBREW_CACHE/"testball--0.0.1") }
let(:testball_resource) { (HOMEBREW_CACHE/"testball--rsrc--0.0.1.txt") }
before do
FileUtils.touch bottle
FileUtils.touch testball
FileUtils.touch testball_resource
(HOMEBREW_CELLAR/"testball"/"0.0.1").mkpath
FileUtils.touch(CoreTap.instance.formula_dir/"testball.rb")
end
it "cleans up file if outdated" do
allow(Utils::Bottles).to receive(:file_outdated?).with(any_args).and_return(true)
subject.cleanup_cache
expect(bottle).not_to exist
expect(testball).not_to exist
expect(testball_resource).not_to exist
end
it "cleans up file if `scrub` is true and formula not installed" do
described_class.new(scrub: true).cleanup_cache
expect(bottle).not_to exist
expect(testball).not_to exist
expect(testball_resource).not_to exist
end
it "cleans up file if stale" do
subject.cleanup_cache
expect(bottle).not_to exist
expect(testball).not_to exist
expect(testball_resource).not_to exist
end
end
end
end
| 26.44837 | 96 | 0.660536 |
bf5dba2e57b21a6b1c21c269be852e933a32454c | 1,578 | require 'spec_helper_acceptance'
describe 'Identity - Group' do
let(:acl_manifest) do
<<-MANIFEST
file { '#{target_parent}':
ensure => directory
}
file { '#{target_parent}/#{target_file}':
ensure => file,
content => '#{file_content}',
require => File['#{target_parent}']
}
group { '#{group_id}':
ensure => present,
}
acl { '#{target_parent}/#{target_file}':
permissions => [
{ identity => '#{group_id}', rights => ['full'] },
],
}
MANIFEST
end
let(:verify_acl_command) { "icacls #{target_parent}/#{target_file}" }
let(:verify_content_path) { "#{target_parent}/#{target_file}" }
let(:acl_regex) { %r{.*\\#{group_id}:\(F\)} }
context 'Specify Group Identity' do
let(:target_file) { 'specify_group_ident.txt' }
let(:group_id) { 'bobs' }
let(:file_content) { 'Cat barf.' }
include_examples 'execute manifest and verify file'
end
context 'Specify Group with Long Name for Identity' do
let(:target_file) { 'specify_long_group_ident.txt' }
# 256 Characters
let(:group_id) { 'nzxncvkjnzxjkcnvkjzxncvkjznxckjvnzxkjncvzxnvckjnzxkjcnvkjzxncvkjzxncvkjzxncvkjnzxkjcnvkzjxncvkjzxnvckjnzxkjcvnzxkncjvjkzxncvkjzxnvckjnzxjkcvnzxkjncvkjzxncvjkzxncvkjzxnkvcjnzxjkcvnkzxjncvkjzxncvkzckjvnzxkcvnjzxjkcnvzjxkncvkjzxnvkjsdnjkvnzxkjcnvkjznvkjxcbvzs' } # rubocop:disable Metrics/LineLength
let(:file_content) { 'Pretty little poodle dressed in noodles.' }
include_examples 'execute manifest and verify file'
end
end
| 32.204082 | 318 | 0.667934 |
911daab0949f2882d4961569260cea33793c97fd | 7,127 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Geo::ContainerRepositorySync, :geo do
let_it_be(:group) { create(:group, name: 'group') }
let_it_be(:project) { create(:project, path: 'test', group: group) }
let_it_be(:container_repository) { create(:container_repository, name: 'my_image', project: project) }
let(:primary_api_url) { 'http://primary.registry.gitlab' }
let(:secondary_api_url) { 'http://registry.gitlab' }
let(:primary_repository_url) { "#{primary_api_url}/v2/#{container_repository.path}" }
let(:secondary_repository_url ) { "#{secondary_api_url}/v2/#{container_repository.path}" }
# Break symbol will be removed if JSON encode/decode operation happens so we use this
# to prove that it does not happen and we preserve original human readable JSON
let(:manifest) do
"{" \
"\n\"schemaVersion\":2," \
"\n\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\"," \
"\n\"layers\":[" \
"{\n\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\n\"size\":3333,\n\"digest\":\"sha256:3333\"}," \
"{\n\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\n\"size\":4444,\n\"digest\":\"sha256:4444\"}," \
"{\n\"mediaType\":\"application/vnd.docker.image.rootfs.foreign.diff.tar.gzip\",\n\"size\":5555,\n\"digest\":\"sha256:5555\",\n\"urls\":[\"https://foo.bar/v2/zoo/blobs/sha256:5555\"]}" \
"]" \
"}"
end
let(:manifest_list) do
%Q(
{
"schemaVersion":2,
"mediaType":"application/vnd.docker.distribution.manifest.list.v2+json",
"manifests":[
{
"mediaType":"application/vnd.docker.distribution.manifest.v2+json",
"size":6666,
"digest":"sha256:6666",
"platform":
{
"architecture":"arm64","os":"linux"
}
}
]
}
)
end
before do
stub_container_registry_config(enabled: true, api_url: secondary_api_url)
stub_registry_replication_config(enabled: true, primary_api_url: primary_api_url)
end
def stub_primary_repository_tags_requests(repository_url, tags)
stub_request(:get, "#{repository_url}/tags/list")
.to_return(
status: 200,
body: Gitlab::Json.dump(tags: tags.keys),
headers: { 'Content-Type' => 'application/json' })
tags.each do |tag, digest|
stub_request(:head, "#{repository_url}/manifests/#{tag}")
.to_return(status: 200, body: "", headers: { 'docker-content-digest' => digest })
end
end
def stub_secondary_repository_tags_requests(repository_url, tags)
stub_request(:get, "#{repository_url}/tags/list")
.to_return(
status: 200,
body: Gitlab::Json.dump(tags: tags.keys),
headers: { 'Content-Type' => 'application/json' })
tags.each do |tag, digest|
stub_request(:head, "#{repository_url}/manifests/#{tag}")
.to_return(status: 200, body: "", headers: { 'docker-content-digest' => digest })
end
end
def stub_primary_raw_manifest_request(repository_url, tag, manifest)
stub_request(:get, "#{repository_url}/manifests/#{tag}")
.to_return(status: 200, body: manifest, headers: {})
end
def stub_secondary_raw_manifest_request(repository_url, tag, manifest)
stub_request(:get, "#{repository_url}/manifests/#{tag}")
.to_return(status: 200, body: manifest, headers: {})
end
def stub_primary_raw_manifest_list_request(repository_url, tag, manifest)
stub_request(:get, "#{repository_url}/manifests/#{tag}")
.to_return(status: 200, body: manifest_list, headers: {})
end
def stub_secondary_push_manifest_request(repository_url, tag, manifest)
stub_request(:put, "#{repository_url}/manifests/#{tag}")
.with(body: manifest)
.to_return(status: 200, body: "", headers: {})
end
def stub_missing_blobs_requests(primary_repository_url, secondary_repository_url, blobs)
blobs.each do |digest, missing|
stub_request(:head, "#{secondary_repository_url}/blobs/#{digest}")
.to_return(status: (missing ? 404 : 200), body: "", headers: {})
next unless missing
stub_request(:get, "#{primary_repository_url}/blobs/#{digest}")
.to_return(status: 200, body: File.new(Rails.root.join('ee/spec/fixtures/ee_sample_schema.json')), headers: {})
end
end
describe '#execute' do
subject { described_class.new(container_repository) }
context 'single manifest' do
it 'determines list of tags to sync and to remove correctly' do
stub_primary_repository_tags_requests(primary_repository_url, { 'tag-to-sync' => 'sha256:1111' })
stub_secondary_repository_tags_requests(secondary_repository_url, { 'tag-to-remove' => 'sha256:2222' })
stub_primary_raw_manifest_request(primary_repository_url, 'tag-to-sync', manifest)
stub_missing_blobs_requests(primary_repository_url, secondary_repository_url, { 'sha256:3333' => true, 'sha256:4444' => false })
stub_secondary_push_manifest_request(secondary_repository_url, 'tag-to-sync', manifest)
expect(container_repository).to receive(:push_blob).with('sha256:3333', anything)
expect(container_repository).not_to receive(:push_blob).with('sha256:4444', anything)
expect(container_repository).not_to receive(:push_blob).with('sha256:5555', anything)
expect(container_repository).to receive(:delete_tag_by_digest).with('sha256:2222')
subject.execute
end
context 'when primary repository has no tags' do
it 'removes secondary tags and does not fail' do
stub_primary_repository_tags_requests(primary_repository_url, {})
stub_secondary_repository_tags_requests(secondary_repository_url, { 'tag-to-remove' => 'sha256:2222' })
expect(container_repository).to receive(:delete_tag_by_digest).with('sha256:2222')
subject.execute
end
end
end
context 'manifest list' do
it 'pushes the correct blobs and manifests' do
stub_primary_repository_tags_requests(primary_repository_url, { 'tag-to-sync' => 'sha256:1111' })
stub_secondary_repository_tags_requests(secondary_repository_url, {})
stub_primary_raw_manifest_list_request(primary_repository_url, 'tag-to-sync', manifest_list)
stub_primary_raw_manifest_request(primary_repository_url, 'sha256:6666', manifest)
stub_secondary_raw_manifest_request(secondary_repository_url, 'sha256:6666', manifest)
stub_missing_blobs_requests(primary_repository_url, secondary_repository_url, { 'sha256:3333' => true, 'sha256:4444' => false })
expect(container_repository).to receive(:push_blob).with('sha256:3333', anything)
expect(container_repository).to receive(:push_manifest).with('sha256:6666', anything, anything)
expect(container_repository).to receive(:push_manifest).with('tag-to-sync', anything, anything)
expect(container_repository).to receive(:delete_tag_by_digest).with('sha256:2222')
subject.execute
end
end
end
end
| 43.723926 | 194 | 0.68458 |
7a2474dc20e509b4cb9264c03d6af60368eec894 | 4,041 | require 'test_helper'
module ActsAsAuthenticTest
class LoginTest < ActiveSupport::TestCase
def test_login_field_config
assert_equal :login, User.login_field
assert_nil Employee.login_field
User.login_field = :nope
assert_equal :nope, User.login_field
User.login_field :login
assert_equal :login, User.login_field
end
def test_validate_login_field_config
assert User.validate_login_field
assert Employee.validate_login_field
User.validate_login_field = false
assert !User.validate_login_field
User.validate_login_field true
assert User.validate_login_field
end
def test_validates_length_of_login_field_options_config
assert_equal({:within => 3..100}, User.validates_length_of_login_field_options)
assert_equal({:within => 3..100}, Employee.validates_length_of_login_field_options)
User.validates_length_of_login_field_options = {:yes => "no"}
assert_equal({:yes => "no"}, User.validates_length_of_login_field_options)
User.validates_length_of_login_field_options({:within => 3..100})
assert_equal({:within => 3..100}, User.validates_length_of_login_field_options)
end
def test_validates_format_of_login_field_options_config
default = {:with => /\A\w[\w\.+\-_@ ]+\z/, :message => I18n.t('error_messages.login_invalid', :default => "should use only letters, numbers, spaces, and .-_@ please.")}
assert_equal default, User.validates_format_of_login_field_options
assert_equal default, Employee.validates_format_of_login_field_options
User.validates_format_of_login_field_options = {:yes => "no"}
assert_equal({:yes => "no"}, User.validates_format_of_login_field_options)
User.validates_format_of_login_field_options default
assert_equal default, User.validates_format_of_login_field_options
end
def test_validates_uniqueness_of_login_field_options_config
default = {:case_sensitive => false, :scope => User.validations_scope, :if => "#{User.login_field}_changed?".to_sym}
assert_equal default, User.validates_uniqueness_of_login_field_options
User.validates_uniqueness_of_login_field_options = {:yes => "no"}
assert_equal({:yes => "no"}, User.validates_uniqueness_of_login_field_options)
User.validates_uniqueness_of_login_field_options default
assert_equal default, User.validates_uniqueness_of_login_field_options
end
def test_validates_length_of_login_field
u = User.new
u.login = "a"
assert !u.valid?
assert u.errors[:login].size > 0
u.login = "aaaaaaaaaa"
assert !u.valid?
assert u.errors[:login].size == 0
end
def test_validates_format_of_login_field
u = User.new
u.login = "fdsf@^&*"
assert !u.valid?
assert u.errors[:login].size > 0
u.login = "fdsfdsfdsfdsfs"
assert !u.valid?
assert u.errors[:login].size == 0
u.login = "[email protected]"
assert !u.valid?
assert u.errors[:login].size == 0
end
def test_validates_uniqueness_of_login_field
users(:ben)
u = User.new
u.login = "bjohnson"
assert !u.valid?
assert u.errors[:login].size > 0
u.login = "BJOHNSON"
assert !u.valid?
assert u.errors[:login].size > 0
u.login = "fdsfdsf"
assert !u.valid?
assert u.errors[:login].size == 0
end
def test_find_by_smart_case_login_field
ben = users(:ben)
assert_equal ben, User.find_by_smart_case_login_field("bjohnson")
assert_equal ben, User.find_by_smart_case_login_field("BJOHNSON")
assert_equal ben, User.find_by_smart_case_login_field("Bjohnson")
drew = employees(:drew)
assert_equal drew, Employee.find_by_smart_case_login_field("[email protected]")
assert_equal drew, Employee.find_by_smart_case_login_field("[email protected]")
assert_equal drew, Employee.find_by_smart_case_login_field("[email protected]")
end
end
end
| 36.080357 | 174 | 0.715912 |
79a407c6d2ed134f647c66589fb5ffe3b45391f8 | 123 | require 'test_helper'
class CceReportTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.375 | 45 | 0.707317 |
610fa65d62dcfc45057085f5e283d3fc25d4270a | 826 | class Game_BattlerBase
def rec; check_for_zombie ? sparam(2) * -1 : sparam(2); end
def check_for_zombie; @states.any? {|st| $data_states[st].zombie}; end
end
class RPG::State < RPG::BaseItem
ZOMB = /\[zombie\]/i
attr_reader :zombie
def set_zombie
return if @zombie_check; @zombie_check = true
@zombie = false
self.note.split(/[\r\n]+/).each do |line|
case line
when ZOMB
@zombie = true
end
end
end
end
module DataManager
class << self
alias load_database_cpz load_database unless $@
end
def self.load_database
load_database_cpz
check_zomb
end
def self.check_zomb
groups = [$data_states]
for group in groups
for obj in group
next if obj == nil
obj.set_zombie if obj.is_a?(RPG::State)
end
end
end
end | 19.666667 | 72 | 0.639225 |
1c738bce777179dbd94efc38433a9f3ca78e5fc0 | 4,588 | require 'pathname'
Puppet::Type.newtype(:dsc_xdatabaselogin) do
require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc'
require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers'
@doc = %q{
The DSC xDatabaseLogin resource type.
Automatically generated from
'xDatabase/DSCResources/MSFT_xDatabaseLogin/MSFT_xDatabaseLogin.schema.mof'
To learn more about PowerShell Desired State Configuration, please
visit https://technet.microsoft.com/en-us/library/dn249912.aspx.
For more information about built-in DSC Resources, please visit
https://technet.microsoft.com/en-us/library/dn249921.aspx.
For more information about xDsc Resources, please visit
https://github.com/PowerShell/DscResources.
}
validate do
fail('dsc_loginname is a required attribute') if self[:dsc_loginname].nil?
end
def dscmeta_resource_friendly_name; 'xDatabaseLogin' end
def dscmeta_resource_name; 'MSFT_xDatabaseLogin' end
def dscmeta_module_name; 'xDatabase' end
def dscmeta_module_version; '1.4.0.0' end
newparam(:name, :namevar => true ) do
end
ensurable do
newvalue(:exists?) { provider.exists? }
newvalue(:present) { provider.create }
newvalue(:absent) { provider.destroy }
defaultto { :present }
end
# Name: Ensure
# Type: string
# IsMandatory: False
# Values: ["Present", "Absent"]
newparam(:dsc_ensure) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "Ensure - Valid values are Present, Absent."
validate do |value|
resource[:ensure] = value.downcase
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
unless ['Present', 'present', 'Absent', 'absent'].include?(value)
fail("Invalid value '#{value}'. Valid values are Present, Absent")
end
end
end
# Name: LoginName
# Type: string
# IsMandatory: True
# Values: None
newparam(:dsc_loginname) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "LoginName"
isrequired
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: LoginPassword
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_loginpassword) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "LoginPassword"
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: SqlAuthType
# Type: string
# IsMandatory: False
# Values: ["Windows", "SQL"]
newparam(:dsc_sqlauthtype) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "SqlAuthType - Valid values are Windows, SQL."
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
unless ['Windows', 'windows', 'SQL', 'sql'].include?(value)
fail("Invalid value '#{value}'. Valid values are Windows, SQL")
end
end
end
# Name: SqlServer
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_sqlserver) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "SqlServer - Sql Server Name"
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: SqlConnectionCredential
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_sqlconnectioncredential) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "SqlConnectionCredential"
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("SqlConnectionCredential", value)
end
end
def builddepends
pending_relations = super()
PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations)
end
end
Puppet::Type.type(:dsc_xdatabaselogin).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do
confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10240.16384'))
defaultfor :operatingsystem => :windows
mk_resource_methods
end
| 29.792208 | 124 | 0.661072 |
6210ab624716c67e26fdac219655b8e44d867185 | 173 | class AddUserVoterId < ActiveRecord::Migration
def self.up
add_column :users, :voter_id, :integer
end
def self.down
remove_column :users, :voter_id
end
end
| 17.3 | 46 | 0.722543 |
8723b8981fd4e7cd441414e4462b671bca0d1b76 | 1,899 | module Fastlane
module Actions
module SharedValues
FIV_BUILD_IONIC_ANDROID_CUSTOM_VALUE =
:FIV_BUILD_IONIC_ANDROID_CUSTOM_VALUE
end
class FivBuildIonicAndroidAction < Action
def self.run(params)
isProd = params[:isProd]
if (isProd)
sh 'ionic cordova build android --prod'
else
sh 'ionic cordova build android'
end
end
#####################################################
# @!group Documentation
#####################################################
def self.description
'A short description with <= 80 characters of what this action does'
end
def self.details
# Optional:
# this is your chance to provide a more detailed description of this action
'You can use this action to do cool things...'
end
def self.available_options
# Define all options your action supports.
# Below a few examples
[
FastlaneCore::ConfigItem.new(
key: :isProd,
env_name: 'FIV_BUILD_IONIC_ANDROID_IS_PROD',
description: 'Dev or Prod build',
optional: false,
type: Boolean
)
]
end
def self.output
# Define the shared values you are going to provide
# Example
[
[
'FIV_BUILD_IONIC_ANDROID_CUSTOM_VALUE',
'A description of what this value contains'
]
]
end
def self.return_value
# If your method provides a return value, you can describe here what it does
end
def self.authors
# So no one will ever forget your contribution to fastlane :) You are awesome btw!
['Your GitHub/Twitter Name']
end
def self.is_supported?(platform)
platform == :android
end
end
end
end
| 26.013699 | 90 | 0.551869 |
61bab1c68a00c2913ede8c3d9a12ba6ad10ba7ec | 3,375 | # -*- encoding: utf-8 -*-
# stub: loofah 2.2.0 ruby lib
Gem::Specification.new do |s|
s.name = "loofah".freeze
s.version = "2.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Mike Dalessio".freeze, "Bryan Helmkamp".freeze]
s.date = "2018-02-11"
s.description = "".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze]
s.extra_rdoc_files = ["CHANGELOG.md".freeze, "MIT-LICENSE.txt".freeze, "Manifest.txt".freeze, "README.md".freeze]
s.files = ["CHANGELOG.md".freeze, "MIT-LICENSE.txt".freeze, "Manifest.txt".freeze, "README.md".freeze]
s.licenses = ["MIT".freeze]
s.rdoc_options = ["--main".freeze, "README.rdoc".freeze]
s.rubygems_version = "2.5.2.2".freeze
s.summary = "".freeze
s.installed_by_version = "2.5.2.2" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<nokogiri>.freeze, [">= 1.5.9"])
s.add_runtime_dependency(%q<crass>.freeze, ["~> 1.0.2"])
s.add_development_dependency(%q<rake>.freeze, [">= 0.8"])
s.add_development_dependency(%q<minitest>.freeze, ["~> 2.2"])
s.add_development_dependency(%q<rr>.freeze, ["~> 1.2.0"])
s.add_development_dependency(%q<json>.freeze, [">= 0"])
s.add_development_dependency(%q<hoe-gemspec>.freeze, [">= 0"])
s.add_development_dependency(%q<hoe-debugging>.freeze, [">= 0"])
s.add_development_dependency(%q<hoe-bundler>.freeze, [">= 0"])
s.add_development_dependency(%q<hoe-git>.freeze, [">= 0"])
s.add_development_dependency(%q<concourse>.freeze, [">= 0.15.0"])
s.add_development_dependency(%q<rdoc>.freeze, ["~> 4.0"])
s.add_development_dependency(%q<hoe>.freeze, ["~> 3.16"])
else
s.add_dependency(%q<nokogiri>.freeze, [">= 1.5.9"])
s.add_dependency(%q<crass>.freeze, ["~> 1.0.2"])
s.add_dependency(%q<rake>.freeze, [">= 0.8"])
s.add_dependency(%q<minitest>.freeze, ["~> 2.2"])
s.add_dependency(%q<rr>.freeze, ["~> 1.2.0"])
s.add_dependency(%q<json>.freeze, [">= 0"])
s.add_dependency(%q<hoe-gemspec>.freeze, [">= 0"])
s.add_dependency(%q<hoe-debugging>.freeze, [">= 0"])
s.add_dependency(%q<hoe-bundler>.freeze, [">= 0"])
s.add_dependency(%q<hoe-git>.freeze, [">= 0"])
s.add_dependency(%q<concourse>.freeze, [">= 0.15.0"])
s.add_dependency(%q<rdoc>.freeze, ["~> 4.0"])
s.add_dependency(%q<hoe>.freeze, ["~> 3.16"])
end
else
s.add_dependency(%q<nokogiri>.freeze, [">= 1.5.9"])
s.add_dependency(%q<crass>.freeze, ["~> 1.0.2"])
s.add_dependency(%q<rake>.freeze, [">= 0.8"])
s.add_dependency(%q<minitest>.freeze, ["~> 2.2"])
s.add_dependency(%q<rr>.freeze, ["~> 1.2.0"])
s.add_dependency(%q<json>.freeze, [">= 0"])
s.add_dependency(%q<hoe-gemspec>.freeze, [">= 0"])
s.add_dependency(%q<hoe-debugging>.freeze, [">= 0"])
s.add_dependency(%q<hoe-bundler>.freeze, [">= 0"])
s.add_dependency(%q<hoe-git>.freeze, [">= 0"])
s.add_dependency(%q<concourse>.freeze, [">= 0.15.0"])
s.add_dependency(%q<rdoc>.freeze, ["~> 4.0"])
s.add_dependency(%q<hoe>.freeze, ["~> 3.16"])
end
end
| 47.535211 | 115 | 0.622222 |
6afa6c1264c4cec5381668ed0a0ff114550e0c95 | 980 | module DockerCloud
class Stack < DockerCloud::Type
def name
info[:name]
end
def state
info[:state]
end
def synchronized
info[:synchronized]
end
# def services; info[:services]; end
def services
if @services.nil?
@services = []
info[:services].each do |uri|
@services.push(client.services.get_from_uri(uri))
end
end
@services
end
def deployed_date
info[:deployed_datetime]
end
def destroyed_date
info[:destroyed_datetime]
end
def nickname
info[:nickname]
end
def start
api.start(uuid)
end
def redeploy
api.redeploy(uuid)
end
def terminate
api.terminate(uuid)
end
def stop
api.stop(uuid)
end
def update(params)
api.update(uuid, params)
end
def export
api.export(uuid)
end
private
def api
client.stacks
end
end
end
| 14.202899 | 59 | 0.570408 |
01434c78e127676fd5d93c740e9d78569086eb82 | 2,957 | # frozen_string_literal: true
describe Facter::FactManager do
describe '#resolve_facts' do
it 'resolved all facts' do
ubuntu_os_name = double(Facts::Debian::Os::Name)
user_query = []
loaded_fact_os_name = double(Facter::LoadedFact, name: 'os.name', klass: ubuntu_os_name, type: :core)
loaded_fact_custom_fact = double(Facter::LoadedFact, name: 'custom_fact', klass: nil, type: :custom)
loaded_facts = [loaded_fact_os_name, loaded_fact_custom_fact]
allow_any_instance_of(Facter::FactLoader).to receive(:load).and_return(loaded_facts)
searched_fact1 = double(Facter::SearchedFact, name: 'os', fact_class: ubuntu_os_name, filter_tokens: [],
user_query: '', type: :core)
searched_fact2 = double(Facter::SearchedFact, name: 'my_custom_fact', fact_class: nil, filter_tokens: [],
user_query: '', type: :custom)
resolved_fact = mock_resolved_fact('os', 'Ubuntu', '', [])
allow(Facter::QueryParser)
.to receive(:parse)
.with(user_query, loaded_facts)
.and_return([searched_fact1, searched_fact2])
allow_any_instance_of(Facter::InternalFactManager)
.to receive(:resolve_facts)
.with([searched_fact1, searched_fact2])
.and_return([resolved_fact])
allow_any_instance_of(Facter::ExternalFactManager)
.to receive(:resolve_facts)
.with([searched_fact1, searched_fact2])
resolved_facts = Facter::FactManager.instance.resolve_facts(user_query)
expect(resolved_facts).to eq([resolved_fact])
end
end
describe '#resolve_core' do
it 'resolves all core facts' do
ubuntu_os_name = double(Facts::Debian::Os::Name)
user_query = []
loaded_fact_os_name = double(Facter::LoadedFact, name: 'os.name', klass: ubuntu_os_name, type: :core)
loaded_facts = [loaded_fact_os_name]
# allow_any_instance_of(Facter::InternalFactLoader).to receive(:core_facts).and_return(loaded_facts)
allow_any_instance_of(Facter::FactLoader).to receive(:load).and_return(loaded_facts)
allow_any_instance_of(Facter::FactLoader).to receive(:internal_facts).and_return(loaded_facts)
searched_fact = double(Facter::SearchedFact, name: 'os', fact_class: ubuntu_os_name, filter_tokens: [],
user_query: '', type: :core)
resolved_fact = mock_resolved_fact('os', 'Ubuntu', '', [])
allow(Facter::QueryParser)
.to receive(:parse)
.with(user_query, loaded_facts)
.and_return([searched_fact])
allow_any_instance_of(Facter::InternalFactManager)
.to receive(:resolve_facts)
.with([searched_fact])
.and_return([resolved_fact])
resolved_facts = Facter::FactManager.instance.resolve_core(user_query)
expect(resolved_facts).to eq([resolved_fact])
end
end
end
| 39.959459 | 111 | 0.666554 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.