hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
91b22e3fdbf096f8df4bf92f5aac818e3d115367 | 3,938 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "univ_app_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.452632 | 102 | 0.758253 |
f81f6dc7324e4079d89824e7043bdb472f0bdf84 | 1,130 | # [PATCH] Deep merge in Rails 2.3.8 fails on HashWithIndifferentAccess
# https://rails.lighthouseapp.com/projects/8994/tickets/2732-deep_merge-does-not-work-on-hashwithindifferentaccess
# https://rails.lighthouseapp.com/projects/8994/tickets/2732/a/239457/deep_merge_replace_master.diff
# this should probably be in config/initializers, but it's not being loaded in time for the APP_CONFIG merge in linktv_platform.rb
module ActiveSupport #:nodoc:
module CoreExtensions #:nodoc:
module Hash #:nodoc:
# Allows for deep merging
module DeepMerge
# Returns a new hash with +self+ and +other_hash+ merged recursively.
def deep_merge(other_hash)
target = dup
other_hash.each_pair do |k,v|
target[k].is_a?(::Hash) && v.is_a?(::Hash) ? target[k] = target[k].deep_merge(v) : target[k] = v
end
target
end
# Returns a new hash with +self+ and +other_hash+ merged recursively.
# Modifies the receiver in place.
def deep_merge!(other_hash)
replace(deep_merge(other_hash))
end
end
end
end
end
| 37.666667 | 130 | 0.676106 |
6216dca1b0b3b9d64d65ad2c98daa86b65e8ae1a | 3,997 | require 'test_helper'
class Hash
# override Hash#to_query to prevent sorting of params
def to_query(namespace = nil)
collect do |key, value|
unless (value.is_a?(Hash) || value.is_a?(Array)) && value.empty?
value.to_query(namespace ? "#{namespace}[#{key}]" : key)
end
end.compact * "&"
end
end
class DefaultLayoutControllerTest < ActionController::TestCase
test 'renders without layout' do
request.env['HTTP_X_PJAX'] = true
get :index
if Rails::VERSION::STRING >= '4.0.0'
assert_match 'default_layout#index', response.body
else
# The behavior for ~> 3.0 varies from 4.0. If there is a layout for parent
# controller and `layout` in parent controller is set to false it will be
# rendered anyway with a warning in a log file. It should be set explicit
# in child controller.
assert_match 'layouts/application default_layout#index', response.body
end
end
test 'renders with default layout' do
get :index
assert_match 'layouts/application default_layout#index', response.body
end
test 'prevents pjax' do
request.env['HTTP_X_PJAX'] = true
get :prevent_pjax
assert_equal 406, response.status
end
test 'strips pjax params' do
request.env['HTTP_X_PJAX'] = true
get :index, '_pjax' => true
assert_equal({ 'controller' => 'default_layout', 'action' => 'index' }, Hash[@controller.params])
assert_nil request.env['rack.request.query_string']
assert_nil request.env['rack.request.query_hash']
assert_nil request.env['action_dispatch.request.query_parameters']
assert_equal '/default_layout', request.original_fullpath
assert_equal '/default_layout', request.fullpath
end
test 'strips pjax params with multiple params at the beginning' do
request.env['HTTP_X_PJAX'] = true
get :index, '_pjax' => true, 'first' => '1', 'second' => '2'
assert_equal({ 'controller' => 'default_layout', 'action' => 'index', 'first' => '1', 'second' => '2' }, Hash[@controller.params])
assert_nil request.env['rack.request.query_string']
assert_nil request.env['rack.request.query_hash']
assert_nil request.env['action_dispatch.request.query_parameters']
assert_equal '/default_layout?first=1&second=2', request.original_fullpath
assert_equal '/default_layout?first=1&second=2', request.fullpath
end
test 'strips pjax params with multiple params at the middle' do
request.env['HTTP_X_PJAX'] = true
get :index, 'first' => '1', '_pjax' => true, 'second' => '2'
assert_equal({ 'controller' => 'default_layout', 'action' => 'index', 'first' => '1', 'second' => '2' }, Hash[@controller.params])
assert_nil request.env['rack.request.query_string']
assert_nil request.env['rack.request.query_hash']
assert_nil request.env['action_dispatch.request.query_parameters']
assert_equal '/default_layout?first=1&second=2', request.original_fullpath
assert_equal '/default_layout?first=1&second=2', request.fullpath
end
test 'strips pjax params with multiple params at the end' do
request.env['HTTP_X_PJAX'] = true
get :index, 'first' => '1', 'second' => '2', '_pjax' => true
assert_equal({ 'controller' => 'default_layout', 'action' => 'index', 'first' => '1', 'second' => '2' }, Hash[@controller.params])
assert_nil request.env['rack.request.query_string']
assert_nil request.env['rack.request.query_hash']
assert_nil request.env['action_dispatch.request.query_parameters']
assert_equal '/default_layout?first=1&second=2', request.original_fullpath
assert_equal '/default_layout?first=1&second=2', request.fullpath
end
test 'sets pjax url' do
request.env['HTTP_X_PJAX'] = true
get :index
assert_equal 'http://test.host/default_layout', response.headers['X-PJAX-URL']
end
def get(action, params = {})
if Rails::VERSION::STRING >= '5.0.0'
super(action, { params: params })
else
super(action, params)
end
end
end
| 35.371681 | 134 | 0.689767 |
1c6627e3c2dd1deecce635b1243028ae14e6f312 | 2,443 | # frozen_string_literal: true
require 'omniauth-oauth2'
module OmniAuth
module Strategies
class Apple < OmniAuth::Strategies::OAuth2
option :name, 'apple'
option :client_options,
site: 'https://appleid.apple.com',
authorize_url: '/auth/authorize',
token_url: '/auth/token'
option :authorize_params,
response_mode: 'form_post'
uid { id_info['sub'] }
info do
{
sub: id_info['sub'],
email: email,
first_name: first_name,
last_name: last_name
}
end
extra do
{
raw_info: id_info.merge(user_info)
}
end
def client
::OAuth2::Client.new(options.client_id, client_secret, deep_symbolize(options.client_options))
end
def callback_url
options[:redirect_uri] || (full_host + script_name + callback_path)
end
private
def id_info
id_token = request.params['id_token'] || access_token.params['id_token']
log(:info, "id_token: #{id_token}")
log(:info, "access_token: #{access_token}")
log(:info, "raw_info_from_access_token: #{access_token.try(:get, '/userinfo')}")
log(:info, "raw_info_from_access_token unparsed: #{access_token.try(:get, '/me')}")
@id_info ||= ::JWT.decode(id_token, nil, false)[0].tap{|i| log(:info, "id_info: #{i}") }
end
def user_info
log(:info, "full_request_params_xxx: #{request.params.to_h}")
log(:info, "full_request_params_user: #{request.params['user']}")
return {} unless request.params['user'].present?
log(:info, "user_info: #{request.params['user']}")
@user_info ||= JSON.parse(request.params['user'])
end
def email
user_info['email'] || id_info['email']
end
def first_name
user_info.dig('name', 'firstName')
end
def last_name
user_info.dig('name', 'lastName')
end
def client_secret
payload = {
iss: options.team_id,
aud: 'https://appleid.apple.com',
sub: options.client_id,
iat: Time.now.to_i,
exp: Time.now.to_i + 60
}
headers = { kid: options.key_id }
::JWT.encode(payload, private_key, 'ES256', headers)
end
def private_key
::OpenSSL::PKey::EC.new(options.pem)
end
end
end
end
| 25.989362 | 102 | 0.573475 |
61b8d22530d5d68440df23c0772b32be3193c504 | 1,416 | require_relative '../../gen/ConstGenerator'
def gen_interfaceinfo_java(options)
ConstGenerator.new 'platform.interfaceinfo', options do |cg|
cg.include IS_WINDOWS ? "Ws2tcpip.h" : "net/if.h"
%w[
IFF_802_1Q_VLAN
IFF_ALLMULTI
IFF_ALTPHYS
IFF_AUTOMEDIA
IFF_BONDING
IFF_BRIDGE_PORT
IFF_BROADCAST
IFF_CANTCONFIG
IFF_DEBUG
IFF_DISABLE_NETPOLL
IFF_DONT_BRIDGE
IFF_DORMANT
IFF_DRV_OACTIVE
IFF_DRV_RUNNING
IFF_DYING
IFF_DYNAMIC
IFF_EBRIDGE
IFF_ECHO
IFF_ISATAP
IFF_LINK0
IFF_LINK1
IFF_LINK2
IFF_LIVE_ADDR_CHANGE
IFF_LOOPBACK
IFF_LOWER_UP
IFF_MACVLAN_PORT
IFF_MASTER
IFF_MASTER_8023AD
IFF_MASTER_ALB
IFF_MASTER_ARPMON
IFF_MONITOR
IFF_MULTICAST
IFF_NOARP
IFF_NOTRAILERS
IFF_OACTIVE
IFF_OVS_DATAPATH
IFF_POINTOPOINT
IFF_PORTSEL
IFF_PPROMISC
IFF_PROMISC
IFF_RENAMING
IFF_ROUTE
IFF_RUNNING
IFF_SIMPLEX
IFF_SLAVE
IFF_SLAVE_INACTIVE
IFF_SLAVE_NEEDARP
IFF_SMART
IFF_STATICARP
IFF_SUPP_NOFCS
IFF_TEAM_PORT
IFF_TX_SKB_SHARING
IFF_UNICAST_FLT
IFF_UP
IFF_WAN_HDLC
IFF_XMIT_DST_RELEASE
IFF_VOLATILE
IFF_CANTCHANGE
].each { |c| cg.const(c, '%ld', '(long int)') }
end
end
| 20.823529 | 62 | 0.646186 |
61c92a78a1ee73472555aeec433cbc2a8a264025 | 579 | # frozen_string_literal: true
module Mutations
class CreateProject < BaseMutation
argument :brand_id, ID, required: true
argument :project_attributes, Types::ProjectAttributes, required: true
field :project, Types::ProjectType, null: false
def authorized?(brand_id:, project_attributes:)
@brand = Brand.find(brand_id)
@project = @brand.projects.new(project_attributes.to_h)
context[:current_ability].authorize! :create, @project
true
end
def resolve(**_args)
@project.save!
{ project: @project }
end
end
end
| 26.318182 | 74 | 0.696028 |
e92a529ff23e608730eb0d513d73f09cc4d9380b | 5,672 | require 'chef/project'
require 'chef/version'
require 'digest'
class Chef
class ProjectCache
attr_reader :project
attr_reader :metadata_dir
def initialize(project, metadata_dir)
@project = project
@metadata_dir = metadata_dir
end
def fix_windows_manifest!(manifest, fix_up_to_version=:all)
builds_32bit = {}
builds_64bit = {}
manifest['windows'].each do |platform_version, build_data|
build_data.each do |architecture, builds|
builds.each do |version, build|
if :all == fix_up_to_version || Opscode::Version.parse(version) < fix_up_to_version
builds_32bit[version] = build
builds_64bit[version] = build
else
if architecture == 'x86_64'
builds_64bit[version] = build
else
builds_32bit[version] = build
end
end
end
end
end
manifest['windows'] = {
'2008r2' => {
'i686' => builds_32bit,
'i386' => builds_32bit,
'x86_64' => builds_64bit
}
}
manifest
end
def update(remap_up_to=nil)
update_cache
json_v2 = if remap_up_to
fix_windows_manifest!(generate_combined_manifest, remap_up_to)
else
generate_combined_manifest
end
write_data(build_list_path, json_v2)
File.open(platform_names_path, "w") do |f|
f.puts project.get_platform_names
end
end
def name
project.name
end
def build_list_path
metadata_file("build-#{name}-list.json")
end
def platform_names_path
metadata_file("#{name}-platform-names.json")
end
def self.for_project(project_name, channel, metadata_dir)
project = Chef::Project.new(project_name, channel)
Chef::ProjectCache.new(project, metadata_dir)
end
def timestamp
JSON.parse(File.read(build_list_path))['run_data']['timestamp']
end
private
def write_data(path, data)
data[:run_data] = { :timestamp => Time.now.to_s }
File.open(path, "w") { |f| Yajl::Encoder.encode(data, f, :pretty => true) }
end
def generate_combined_manifest
# after updating cache, we have the same manifests as remote
project.manifests.inject({}) do |combined_manifest_data, manifest|
manifest_file = cache_path_for_manifest(manifest)
manifest_data = Yajl::Parser.parse(File.read(manifest_file))
deep_merge(combined_manifest_data, manifest_data)
end
end
def update_cache
create_cache_dirs
manifests_to_delete = local_manifests - project.manifests
debug("Files to delete:\n#{manifests_to_delete.map{|f| "* #{f}"}.join("\n")}")
debug("Files to fetch:\n#{manifests_to_fetch.map{|f| "* #{f}"}.join("\n")}")
manifests_to_delete.each {|m| delete_manifest(m) }
manifests_to_fetch.each {|f| fetch_manifest(f) }
end
def manifests_to_fetch
@fetch_list ||= project.manifests.select { |manifest| should_fetch_manifest?(manifest) }
end
def should_fetch_manifest?(manifest)
if !local_manifest_exists?(manifest)
true
elsif have_both_md5s_for?(manifest)
!manifest_md5_matches?(manifest)
else
remote_manifest_newer?(manifest)
end
end
def local_manifest_exists?(manifest)
File.exist?(cache_path_for_manifest(manifest))
end
def local_manifest_md5_for(manifest)
return unless local_manifest_exists?(manifest)
Digest::MD5.file(cache_path_for_manifest(manifest))
end
def have_both_md5s_for?(manifest)
!local_manifest_md5_for(manifest).nil? && !project.manifest_md5_for(manifest).nil?
end
def manifest_md5_matches?(manifest)
local_manifest_md5_for(manifest) == project.manifest_md5_for(manifest)
end
def local_manifest_mtime(manifest)
return unless local_manifest_exists?(manifest)
File.mtime(cache_path_for_manifest(manifest))
end
def remote_manifest_newer?(manifest)
local = local_manifest_mtime(manifest)
remote = project.manifest_last_modified_for(manifest)
if !local
true
elsif !remote
false
else
remote > local
end
end
def fetch_manifest(manifest)
local_path = cache_path_for_manifest(manifest)
File.open(local_path, "w+") do |f|
f.print project.download_manifest(manifest)
end
rescue Exception
File.unlink(local_path) if local_path && File.exist?(local_path)
raise
end
def create_cache_dirs
FileUtils.mkdir_p(cache_dir, mode: 0700)
end
def delete_manifest(manifest)
File.unlink(cache_path_for_manifest(manifest))
end
def cache_dir
metadata_file("release-metadata-cache/#{project.release_manifest_name}")
end
def metadata_file(path)
File.join(metadata_dir, project.channel.name, path)
end
def cache_path_for_manifest(manifest_name)
File.join(cache_dir, manifest_name)
end
def local_manifests
Dir["#{cache_dir}/*"].map { |m| File.basename(m) }
end
# Define a deep merge for nested hashes
def deep_merge(h1, h2)
result = h1.dup
h2.keys.each do |key|
result[key] = if h1[key].is_a? Hash and h2[key].is_a? Hash
deep_merge(result[key], h2[key])
else
h2[key]
end
end
result
end
def debug(message)
# TODO: turn this off for cron
puts message
end
end
end
| 27.009524 | 96 | 0.63452 |
abadd933c69deb270172252812182802e85fea87 | 306 | class RadiantPlayer < Cask
version '1.2.1'
sha256 'dc1ed98170e05c430779f527191717cb412e74f398324878286df4a2bbad79d4'
url 'https://github.com/kbhomes/google-music-mac/releases/download/v1.2.1/Radiant.Player.zip'
homepage 'http://kbhomes.github.io/google-music-mac/'
app 'Radiant Player.app'
end
| 30.6 | 95 | 0.781046 |
e23f69962761001d29b1b4f3112bcb17c5d461ff | 1,082 | cask "reaper" do
version "6.16.0,6.16"
if MacOS.version <= :mojave
sha256 "91edeba84d382159dd9b678052c73947a155c421d0b4c1b610a82d35f788c347"
url "https://www.reaper.fm/files/#{version.major}.x/reaper#{version.after_comma.no_dots}_x86_64.dmg"
else
sha256 "bf0b60552964bb42be6a48aac7afb3355e1fb96f2fcfca963453688706deca08"
url "https://www.reaper.fm/files/#{version.major}.x/reaper#{version.after_comma.no_dots}_x86_64_catalina.dmg"
end
appcast "https://www.cockos.com/reaper/latestversion/?p=osx_64",
must_contain: version.after_comma
name "REAPER"
desc "Digital audio production application"
homepage "https://www.reaper.fm/"
app "REAPER64.app"
app "ReaMote64.app"
zap trash: [
"~/Library/Application Support/REAPER",
"~/Library/Saved Application State/com.cockos.reaper.savedState",
"~/Library/Saved Application State/com.cockos.reaperhosti386.savedState",
"~/Library/Saved Application State/com.cockos.reaperhostx8664.savedState",
"~/Library/Saved Application State/com.cockos.ReaMote.savedState",
]
end
| 34.903226 | 113 | 0.748614 |
accca83232384181c741d1499213062dfaf84b96 | 141 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_HelpQueue_session'
| 35.25 | 79 | 0.808511 |
d56d7d95a83c0edc31cd1402a4d3ad2e1c097f29 | 78 | require 'given_filesystem/spec_helpers'
require_relative '../lib/cli_tester'
| 19.5 | 39 | 0.820513 |
f79d02aa5e86c60295c3b8127dc5170d28c9ceca | 337 | # == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# nickname :string not null
# created_at :datetime not null
# updated_at :datetime not null
#
FactoryGirl.define do
factory :user_with_valid_attributes, class: User do
nickname 'nalabjp'
end
end
| 21.0625 | 53 | 0.62908 |
38a39e072519139b45a239230e1618e49e505428 | 182 | class CreateDistricts < ActiveRecord::Migration[5.0]
def change
create_table :districts do |t|
t.string :name
t.integer :area
t.timestamps
end
end
end
| 16.545455 | 52 | 0.653846 |
1854d3d29a2478dab74663ec3fb73244a2c1218b | 817 | class ZeroEightOhEight
def input
puts 'How bright a time? (Format: 23:59 or 2359)'
@t = gets.chomp.tr(':','')
calc_brightness(@t)
end
def autofind
24.times { |h| 60.times { |m| calc_brightness("#{"%02d" % h}#{"%02d" % m}") } }
end
def calc_brightness(time)
if time =~ /([01]\d|2[0-3])[0-5]\d/ && time.length == 4
b = 0
time.each_char { |c| b += lookup(c.to_i) }
puts "#{time}: Brightest, You are on TIME! #{b}" if b == 26
puts "#{time}: You time guessing DIMWIT! #{b}" unless b == 26 || b == 8
puts "#{time}: Thou lovest DARKNESS! #{b}" if b == 8
else
puts "Sorry wrong timezone ... DIMWIT!"
end
end
def lookup(d)
case d
when 1 then 2
when 7 then 3
when 4 then 4
when 2,3,5 then 5
when 0,6,9 then 6
when 8 then 7
else
0
end
end
end | 22.694444 | 81 | 0.564259 |
f8370df250a178425eb937df94abb519448968cc | 495 | class Group < ApplicationRecord
has_and_belongs_to_many :users
belongs_to :company
validates \
:name,
presence: true
validates \
:name,
uniqueness: {
case_insensitive: true
}
def active?
archived_at.blank?
end
def archived?
archived_at.present?
end
def archive!
write_attribute(:archived_at, Time.now)
save!
end
def unarchived?
a rchived_at.blank?
end
def unarchive!
write_attribute(:archived_at, nil)
save!
end
end
| 13.378378 | 43 | 0.672727 |
3933e6a2a89144b32f3ca889a004fcddc41acefd | 10,358 | class SamlAuthenticator < ::Auth::OAuth2Authenticator
attr_reader :user, :attributes, :info
def info=(info)
@info = info.present? ? info.with_indifferent_access : info
end
def initialize(name, opts = {})
opts[:trusted] ||= true
super(name, opts)
end
def attribute_name_format(type = "basic")
"urn:oasis:names:tc:SAML:2.0:attrname-format:#{type}"
end
def setting(key)
GlobalSetting.try("#{name}_#{key}") || GlobalSetting.try("saml_#{key.to_s}")
end
def request_attributes
attrs = "email|name|first_name|last_name"
custom_attrs = GlobalSetting.try(:saml_request_attributes)
attrs = "#{attrs}|#{custom_attrs}" if custom_attrs.present?
attrs.split("|").uniq.map do |name|
{ name: name, name_format: attribute_name_format, friendly_name: name }
end
end
def attribute_statements
result = {}
statements = "name:name|email:email,mail|first_name:first_name,firstname,firstName|last_name:last_name,lastname,lastName|nickname:screenName"
custom_statements = GlobalSetting.try(:saml_attribute_statements)
statements = "#{statements}|#{custom_statements}" if custom_statements.present?
statements.split("|").map do |statement|
attrs = statement.split(":")
next if attrs.count != 2
(result[attrs[0]] ||= []) << attrs[1].split(",")
result[attrs[0]].flatten!
end
result
end
def register_middleware(omniauth)
omniauth.provider :saml,
name: name,
issuer: SamlAuthenticator.saml_base_url,
idp_sso_target_url: setting(:target_url),
idp_slo_target_url: setting(:slo_target_url),
slo_default_relay_state: SamlAuthenticator.saml_base_url,
idp_cert_fingerprint: GlobalSetting.try(:saml_cert_fingerprint),
idp_cert_fingerprint_algorithm: GlobalSetting.try(:saml_cert_fingerprint_algorithm),
idp_cert: setting(:cert),
request_attributes: request_attributes,
attribute_statements: attribute_statements,
assertion_consumer_service_url: SamlAuthenticator.saml_base_url + "/auth/#{name}/callback",
single_logout_service_url: SamlAuthenticator.saml_base_url + "/auth/#{name}/slo",
name_identifier_format: GlobalSetting.try(:saml_name_identifier_format),
custom_url: (GlobalSetting.try(:saml_request_method) == 'post') ? "/discourse_saml" : nil,
certificate: GlobalSetting.try(:saml_sp_certificate),
private_key: GlobalSetting.try(:saml_sp_private_key),
security: {
authn_requests_signed: !!GlobalSetting.try(:saml_authn_requests_signed),
want_assertions_signed: !!GlobalSetting.try(:saml_want_assertions_signed),
signature_method: XMLSecurity::Document::RSA_SHA1
},
idp_slo_session_destroy: proc { |env, session| @user.user_auth_tokens.destroy_all; @user.logged_out }
end
def attr(key)
info[key] || attributes[key]&.join(",") || ""
end
def after_authenticate(auth)
self.info = auth[:info]
extra_data = auth.extra || {}
raw_info = extra_data[:raw_info]
@attributes = raw_info&.attributes || {}
auth[:uid] = attributes['uid'].try(:first) || auth[:uid] if GlobalSetting.try(:saml_use_attributes_uid)
uid = auth[:uid]
auth[:provider] = name
auth[:info][:email] ||= uid
result = super
if GlobalSetting.try(:saml_log_auth)
::PluginStore.set("saml", "#{name}_last_auth", auth.inspect)
::PluginStore.set("saml", "#{name}_last_auth_raw_info", raw_info.inspect)
::PluginStore.set("saml", "#{name}_last_auth_extra", extra_data.inspect)
end
if GlobalSetting.try(:saml_debug_auth)
data = {
uid: uid,
info: info,
extra: extra_data
}
log("#{name}_auth: #{data.inspect}")
end
result.username = begin
if attributes.present?
username = attributes['screenName'].try(:first)
username = attributes['uid'].try(:first) if GlobalSetting.try(:saml_use_uid)
end
username ||= UserNameSuggester.suggest(result.name) if result.name != uid
username ||= UserNameSuggester.suggest(result.email) if result.email != uid
username ||= uid
username
end
result.name = begin
if attributes.present?
fullname = attributes['fullName'].try(:first)
fullname = "#{attributes['firstName'].try(:first)} #{attributes['lastName'].try(:first)}"
end
fullname ||= result.name
fullname
end
if result.respond_to?(:skip_email_validation) && GlobalSetting.try(:saml_skip_email_validation)
result.skip_email_validation = true
end
if GlobalSetting.try(:saml_validate_email_fields).present? && attributes['memberOf'].present?
unless (GlobalSetting.try(:saml_validate_email_fields).split("|").map(&:downcase) & attributes['memberOf'].map(&:downcase)).empty?
result.email_valid = true
else
result.email_valid = false
end
elsif GlobalSetting.respond_to?(:saml_default_emails_valid) && !GlobalSetting.saml_default_emails_valid.nil?
result.email_valid = GlobalSetting.saml_default_emails_valid
else
result.email_valid = true
end
result.extra_data[:saml_attributes] = attributes
result.extra_data[:saml_info] = info
if result.user.blank?
result.username = '' if GlobalSetting.try(:saml_clear_username)
result.omit_username = true if GlobalSetting.try(:saml_omit_username)
result.user = auto_create_account(result) if GlobalSetting.try(:saml_auto_create_account) && result.email_valid
else
@user = result.user
sync_groups
sync_custom_fields
sync_email(result.email, uid)
sync_moderator
sync_trust_level
end
result
end
def log(info)
Rails.logger.warn("SAML Debugging: #{info}") if GlobalSetting.try(:saml_debug_auth)
end
def after_create_account(user, auth)
super
@user = user
self.info = auth[:extra_data][:saml_info]
@attributes = auth[:extra_data][:saml_attributes]
sync_groups
sync_moderator
sync_trust_level
sync_custom_fields
end
def auto_create_account(result)
email = result.email
return if User.find_by_email(email).present?
# Use a mutex here to counter SAML responses that are sent at the same time and the same email payload
DistributedMutex.synchronize("discourse_saml_#{email}") do
try_name = result.name.presence
try_username = result.username.presence
user_params = {
primary_email: UserEmail.new(email: email, primary: true),
name: try_name || User.suggest_name(try_username || email),
username: UserNameSuggester.suggest(try_username || try_name || email),
active: true
}
user = User.create!(user_params)
after_create_account(user, result.as_json.with_indifferent_access)
user
end
end
def sync_groups
return unless GlobalSetting.try(:saml_sync_groups)
groups_fullsync = GlobalSetting.try(:saml_groups_fullsync) || false
group_attribute = GlobalSetting.try(:saml_groups_attribute) || 'memberOf'
user_group_list = (attributes[group_attribute] || []).map(&:downcase)
if groups_fullsync
user_has_groups = user.groups.where(automatic: false).pluck(:name).map(&:downcase)
if user_has_groups.present?
groups_to_add = user_group_list - user_has_groups
groups_to_remove = user_has_groups - user_group_list
end
else
total_group_list = (GlobalSetting.try(:saml_sync_groups_list) || "").split('|').map(&:downcase)
groups_to_add = user_group_list + attr('groups_to_add').split(",").map(&:downcase)
groups_to_remove = attr('groups_to_remove').split(",").map(&:downcase)
if total_group_list.present?
groups_to_add = total_group_list & groups_to_add
removable_groups = groups_to_remove.dup
groups_to_remove = total_group_list - groups_to_add
groups_to_remove &= removable_groups if removable_groups.present?
end
end
return if user_group_list.blank? && groups_to_add.blank? && groups_to_remove.blank?
Group.where('LOWER(name) IN (?) AND NOT automatic', groups_to_add).each do |group|
group.add user
end
Group.where('LOWER(name) IN (?) AND NOT automatic', groups_to_remove).each do |group|
group.remove user
end
end
def sync_custom_fields
return if user.blank?
request_attributes.each do |attr|
key = attr[:name]
user.custom_fields["#{name}_#{key}"] = attr(key) if attr(key).present?
end
user.save_custom_fields
end
def sync_email(email, uid)
return unless GlobalSetting.try(:saml_sync_email)
email = Email.downcase(email)
return if user.email == email
existing_user = User.find_by_email(email)
if email =~ EmailValidator.email_regex && existing_user.nil?
user.email = email
user.save
user.oauth2_user_infos.where(provider: name, uid: uid).update_all(email: email)
end
end
def sync_moderator
return unless GlobalSetting.try(:saml_sync_moderator)
is_moderator_attribute = GlobalSetting.try(:saml_moderator_attribute) || 'isModerator'
is_moderator = ['1', 'true'].include?(attributes[is_moderator_attribute].try(:first).to_s.downcase)
return if user.moderator == is_moderator
user.moderator = is_moderator
user.save
end
def sync_trust_level
return unless GlobalSetting.try(:saml_sync_trust_level)
trust_level_attribute = GlobalSetting.try(:saml_trust_level_attribute) || 'trustLevel'
level = attributes[trust_level_attribute].try(:first).to_i
return unless level.between?(1,4)
if user.manual_locked_trust_level != level
user.manual_locked_trust_level = level
user.save
end
return if user.trust_level == level
user.change_trust_level!(level, log_action_for: user)
end
def enabled?
true # SAML plugin has no enabled setting
end
def self.saml_base_url
GlobalSetting.try(:saml_base_url) || Discourse.base_url
end
end
| 33.739414 | 145 | 0.674841 |
b9ac00cff1b47a6dc0612335939043f1e2f44b9e | 208 | # This migration comes from spree_annarbortees_twitter (originally 20141212051327)
class AddHashtagToProduct < ActiveRecord::Migration
def change
add_column :spree_products, :hashtag, :string
end
end
| 29.714286 | 82 | 0.8125 |
1a044e19a7ad86b55df71626fa94b6dc88b0cddf | 955 | # frozen_string_literal: true
require 'haml/template/options'
# check for a compatible Rails version when Haml is loaded
if (activesupport_spec = Gem.loaded_specs['activesupport'])
if activesupport_spec.version.to_s < '3.2'
raise Exception.new("\n\n** Haml now requires Rails 3.2 and later. Use Haml version 4.0.4\n\n")
end
end
module Haml
class Railtie < ::Rails::Railtie
initializer :haml do |app|
ActiveSupport.on_load(:action_view) do
require "haml/template"
if defined?(::Sass::Rails::SassTemplate) && app.config.assets.enabled
require "haml/sass_rails_filter"
end
if defined? Erubi
require "haml/helpers/safe_erubi_template"
Haml::Filters::Erb.template_class = Haml::SafeErubiTemplate
else
require "haml/helpers/safe_erubis_template"
Haml::Filters::Erb.template_class = Haml::SafeErubisTemplate
end
end
end
end
end
| 29.84375 | 99 | 0.679581 |
625912b94b10ead1b284849e3bc2e671ffc7d399 | 15,549 | require 'spec_helper'
describe 'cis_hardening::logaudit::accounting' do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts) { os_facts }
# Check for default class
it { is_expected.to contain_class('cis_hardening::logaudit::accounting') }
# Ensure Auditing is enabled - Section 4.1.1
# Ensure that auditd is installed - Section 4.1.1.1
it {
is_expected.to contain_package('audit').with(
'ensure' => 'present',
)
}
it {
is_expected.to contain_package('audit-libs').with(
'ensure' => 'present',
)
}
# Ensure auditd service is enabled and running - Section 4.1.1.2
it {
is_expected.to contain_service('auditd').with(
'ensure' => 'running',
'enable' => true,
'hasstatus' => true,
'hasrestart' => true,
).that_requires('File[/etc/audit/audit.rules]')
}
it {
is_expected.to contain_exec('restart_auditd').with(
'path' => '/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin',
'command' => '/bin/systemctl restart auditd',
).that_requires('Package[audit]')
}
# Ensure that Ensure auditing for processes that start prior to auditd is enabled - Section 4.1.1.3
it {
is_expected.to contain_file_line('pre_auditd_settings').with(
'ensure' => 'present',
'path' => '/etc/default/grub',
'line' => 'GRUB_CMDLINE_LINUX="audit=1"',
).that_requires('File[/etc/default/grub]')
}
it {
is_expected.to contain_file('/etc/audit/audit.rules').with(
'ensure' => 'present',
'owner' => 'root',
'group' => 'root',
'mode' => '0640',
).that_requires('Package[audit]')
}
it {
is_expected.top contain_file_line('auditd_restart_enable').with(
'ensure' => 'present',
'path'
)
}
# If you leave AuditD as-is, you'll get an error because the default is to not allow AuditD to restart. For the
# purposes of CIS hardening, you have to be able to specify options and restart the service. This changes the option
# when Puppet runs. It will only be activated once booted after the Puppet run.
file_line { 'auditd_restart_enable':
ensure => 'present',
path => '/usr/lib/systemd/system/auditd.service',
line => 'RefuseManualStop=no',
match => '^RefuseManualStop\=',
}
# Configure Data Retention - 4.1.2
# Ensure audit log storage size is configured - Section 4.1.2.1
it {
is_expected.to contain_file_line('set_auditd_logfile_size').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'max_log_file = 1024',
'match' => '^max_log_file\ \=',
).that_notifies('Exec[restart_auditd]')
}
# Ensure that Ensure audit logs are not automatically deleted - Section 4.1.2.2
it {
is_expected.to contain_file_line('set_max_logfile_action').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'max_log_file_action = keep_logs',
'match' => '^max_log_file_action\ \=',
)
}
# Ensure system is disabled when audit logs are full - Section 4.1.2.3
it {
is_expected.to contain_file_line('full_logfile_notify_action').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'space_left_action = email',
'match' => '^space_left_action\ \=',
).that_notifies('Exec[restart_auditd]')
}
it {
is_expected.to contain_file_line('set_action_mail_account').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'action_mail_acct = root',
'match' => '^action_mail_acct\ \=',
).that_notifies('Exec[restart_auditd]')
}
it {
is_expected.to contain_file_line('set_admin_space_left_action').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'admin_space_left_action = SYSLOG',
'match' => '^admin_space_left_action\ \=',
).that_notifies('Exec[restart_auditd]')
}
# Ensure audit_backlog_limit is sufficient - Section 4.1.2.4 - PASS
# Ensure defaults directory is present for grub settings - Section 4.1.3 prerequisites
it {
is_expected.to contain_file('/etc/default').with(
'ensure' => 'directory',
'owner' => 'root',
'group' => 'root',
'mode' => '0755',
)
}
it {
is_expected.to contain_file('/etc/default/grub').with(
'ensure' => 'file',
'owner' => 'root',
'group' => 'root',
'mode' => '0644',
).that_requires('File[/etc/default]')
}
# Ensure events that modify date and time information are collected - Section 4.1.3
it {
is_expected.to contain_file_line('time_change_64bit_item1').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S adjtimex -S settimeofday -k time-change',
)
}
it {
is_expected.to contain_file_line('time_change_64bit_item2').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S clock_settime -k time-change',
)
}
it {
is_expected.to contain_file_line('time_change_64bit_item3').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/localtime -p wa -k time-change',
)
}
# Ensure events that modify user/group information are collected - Section 4.1.4
it {
is_expected.to contain_file_line('ownerchange_group').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/group -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_passwd').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/passwd -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_gshadow').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/gshadow -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_shadow').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/shadow -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_opasswd').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/security/opasswd -p wa -k identity',
)
}
# Ensure events that modify the system's network environment are collected - Section 4.1.5
it {
is_expected.to contain_file_line('network_namechanges').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S sethostname -S setdomainname -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_issue').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/issue -p wa -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_issuedotnet').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/issue.net -p wa -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_network').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sysconfig/network -p wa -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_networkscripts').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sysconfig/network-scripts/ -p wa -k system-locale',
)
}
# Ensure events that modify the system's Mandatory Access Controls are collected - Section 4.1.6
it {
is_expected.to contain_file_line('macpolicy_selinux').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/selinux/ -p wa -k MAC-policy',
)
}
it {
is_expected.to contain_file_line('macpolicy_selinuxshare').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /usr/share/selinux/ -p wa -k MAC-policy',
)
}
# Ensure that Ensure login and logout events are collected - Section 4.1.7
it {
is_expected.to contain_file_line('lastlogin').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/log/lastlog -p wa -k logins',
)
}
it {
is_expected.to contain_file_line('faillog').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/log/faillog -p wa -k logins',
)
}
it {
is_expected.to contain_file_line('faillock').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/faillock/ -p wa -k logins',
)
}
# Ensure session initiation information is collected - Section 4.1.8
it {
is_expected.to contain_file_line('utmp_entry').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/utmp -p wa -k session',
)
}
it {
is_expected.to contain_file_line('wtmp_entry').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/wtmp -p wa -k logins',
)
}
it {
is_expected.to contain_file_line('btmp_entry').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/btmp -p wa -k logins',
)
}
# Ensure discretionary access control permission modification events are collected - Section 4.1.9
it {
is_expected.to contain_file_line('chmod_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S chmod -S fchmod -S fchmodat -F auid>=1000 -F auid!=4294967295 -k perm_mod',
)
}
it {
is_expected.to contain_file_line('chown_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S chown -S fchown -S fchownat -S lchown -F auid>=1000 -F auid!=4294967295 -k perm_mod',
)
}
it {
is_expected.to contain_file_line('xattr_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F auid>=1000 -F auid!=4294967295 -k perm_mod',
)
}
# Ensure unsuccessful unauthorized file access attempts are collected - Section 4.1.10
it {
is_expected.to contain_file_line('file_truncate').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S creat -S open -S openat -S truncate -S ftruncate -F exit=-EACCES -F auid>=1000 -F auid!=4294967295 -k access',
)
}
# Ensure use of privileged commands is collected - Section 4.1.11 **unused**
# Ensure succesful filesystem mounts are collected - Section 4.1.12
it {
is_expected.to contain_file_line('mount_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S mount -F auid>=1000 -F auid!=4294967295 -k mounts',
)
}
# Ensure that Ensure file deletion events by users are captured - Section 4.1.13
it {
is_expected.to contain_file_line('file_deletions').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S unlink -S unlinkat -S rename -S renameat -F auid>=1000 -F auid!=4294967295 -k delete',
)
}
# Ensure that Ensure changes to system administration scope (sudoers) is collected - Section 4.1.14
it {
is_expected.to contain_file_line('sudoers_file').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sudoers -p wa -k scope',
)
}
it {
is_expected.to contain_file_line('sudoers_dir').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sudoers.d/ -p wa -k scope',
)
}
# Ensure that Ensure system administrator actions (sudolog) are collected - Section 4.1.15
it {
is_expected.to contain_file_line('sudolog').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/log/sudo.log -p wa -k actions',
)
}
# Ensure that Ensure Kernel module loading and unloading are collected - Section 4.1.16
it {
is_expected.to contain_file_line('check_insmod').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /sbin/insmod -p x -k modules',
)
}
it {
is_expected.to contain_file_line('check_rmmod').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /sbin/rmmod -p x -k modules',
)
}
it {
is_expected.to contain_file_line('check_modprobe').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /sbin/modprobe -p x -k modules',
)
}
it {
is_expected.to contain_file_line('check_modulestate').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S init_module -S delete_module -k modules',
)
}
# Ensure the audit configuration is immutable - Section 4.1.17
it {
is_expected.to contain_file_line('make_auditd_immutable').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-e 2',
'match' => '^-e\ ',
'append_on_no_match' => true,
)
}
# Ensure manifest compiles with all dependencies
it {
is_expected.to compile.with_all_deps
}
end
end
end
| 34.324503 | 182 | 0.538748 |
39aee4c82fae9f81ef4a691e7d3e87d329dee180 | 7,597 | module Asciidoctor
# A base module for defining converters that can be used to convert {AbstractNode}
# objects in a parsed AsciiDoc document to a backend format such as HTML or
# DocBook.
#
# Implementing a converter involves:
#
# * including this module in a {Converter} implementation class
# * overriding the {Converter#convert} method
# * optionally associating the converter with one or more backends using
# the {#register_for} DSL method imported by the {Config Converter::Config} module
#
# Examples
#
# class TextConverter
# include Asciidoctor::Converter
# register_for 'text'
# def initialize backend, opts
# super
# outfilesuffix '.txt'
# end
# def convert node, transform = nil
# case (transform ||= node.node_name)
# when 'document'
# node.content
# when 'section'
# [node.title, node.content] * "\n\n"
# when 'paragraph'
# node.content.tr("\n", ' ') << "\n"
# else
# if transform.start_with? 'inline_'
# node.text
# else
# %(<#{transform}>\n)
# end
# end
# end
# end
#
# puts Asciidoctor.convert_file 'sample.adoc', backend: :text
module Converter
# A module that provides the {#register_for} method for statically
# registering a converter with the default {Factory Converter::Factory} instance.
module Config
# Public: Statically registers the current {Converter} class with the default
# {Factory Converter::Factory} to handle conversion to the specified backends.
#
# This method also defines the converts? method on the class which returns whether
# the class is registered to convert a specified backend.
#
# backends - A String Array of backends with which to associate this {Converter} class.
#
# Returns nothing
def register_for *backends
Factory.register self, backends
metaclass = class << self; self; end
if backends == ['*']
metaclass.send :define_method, :converts? do |name|
true
end
else
metaclass.send :define_method, :converts? do |name|
backends.include? name
end
end
nil
end
end
module BackendInfo
def backend_info
@backend_info ||= setup_backend_info
end
def setup_backend_info
raise ::ArgumentError, %(Cannot determine backend for converter: #{self.class}) unless @backend
base = @backend.sub TrailingDigitsRx, ''
if (ext = DEFAULT_EXTENSIONS[base])
type = ext[1..-1]
else
# QUESTION should we be forcing the basebackend to html if unknown?
base = 'html'
ext = '.html'
type = 'html'
syntax = 'html'
end
{
'basebackend' => base,
'outfilesuffix' => ext,
'filetype' => type,
'htmlsyntax' => syntax
}
end
def filetype value = nil
if value
backend_info['filetype'] = value
else
backend_info['filetype']
end
end
def basebackend value = nil
if value
backend_info['basebackend'] = value
else
backend_info['basebackend']
end
end
def outfilesuffix value = nil
if value
backend_info['outfilesuffix'] = value
else
backend_info['outfilesuffix']
end
end
def htmlsyntax value = nil
if value
backend_info['htmlsyntax'] = value
else
backend_info['htmlsyntax']
end
end
end
class << self
# Mixes the {Config Converter::Config} module into any class that includes the {Converter} module.
#
# converter - The Class that includes the {Converter} module
#
# Returns nothing
def included converter
converter.extend Config
end
end
include Config
include BackendInfo
# Public: Creates a new instance of Converter
#
# backend - The String backend format to which this converter converts.
# opts - An options Hash (optional, default: {})
#
# Returns a new instance of [Converter]
def initialize backend, opts = {}
@backend = backend
setup_backend_info
end
=begin
# Public: Invoked when this converter is added to the chain of converters in a {CompositeConverter}.
#
# owner - The CompositeConverter instance
#
# Returns nothing
def composed owner
end
=end
# Public: Converts an {AbstractNode} using the specified transform. If a
# transform is not specified, implementations typically derive one from the
# {AbstractNode#node_name} property.
#
# Implementations are free to decide how to carry out the conversion. In
# the case of the built-in converters, the tranform value is used to
# dispatch to a handler method. The {TemplateConverter} uses the value of
# the transform to select a template to render.
#
# node - The concrete instance of AbstractNode to convert
# transform - An optional String transform that hints at which transformation
# should be applied to this node. If a transform is not specified,
# the transform is typically derived from the value of the
# node's node_name property. (optional, default: nil)
#
# Returns the [String] result
def convert node, transform = nil
raise ::NotImplementedError
end
# Public: Converts an {AbstractNode} using the specified transform along
# with additional options. Delegates to {#convert} without options by default.
# Used by the template-based converter to delegate to the converter for outline.
# (see https://github.com/asciidoctor/asciidoctor-backends/blob/master/slim/html5/block_toc.html.slim#L11)
#
# node - The concrete instance of AbstractNode to convert
# transform - An optional String transform that hints at which transformation
# should be applied to this node. If a transform is not specified,
# the transform is typically derived from the value of the
# node's node_name property. (optional, default: nil)
# opts - An optional Hash of options that provide additional hints about
# how to convert the node.
#
# Returns the [String] result
def convert_with_options node, transform = nil, opts = {}
convert node, transform
end
end
# A module that can be used to mix the {#write} method into a {Converter}
# implementation to allow the converter to control how the output is written
# to disk.
module Writer
# Public: Writes the output to the specified target file name or stream.
#
# output - The output String to write
# target - The String file name or stream object to which the output should
# be written.
#
# Returns nothing
def write output, target
if target.respond_to? :write
target.write output.chomp
# ensure there's a trailing endline to be nice to terminals
target.write EOL
else
::File.open(target, 'w') {|f| f.write output }
end
nil
end
end
module VoidWriter
include Writer
# Public: Does not write output
def write output, target
end
end
end
require 'asciidoctor/converter/base'
require 'asciidoctor/converter/factory'
| 32.32766 | 110 | 0.625115 |
1d5d26492cca5672906582c36591ddd93b60f110 | 1,379 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Compute::Mgmt::V2018_06_01
module Models
#
# Describes the parameters of differencing disk settings that can be be
# specified for operating system disk. <br><br> NOTE: The differencing disk
# settings can only be specified for managed disk.
#
class DiffDiskSettings
include MsRestAzure
# @return [DiffDiskOptions] Specifies the differencing disk settings for
# operating system disk. Possible values include: 'Local'
attr_accessor :option
#
# Mapper for DiffDiskSettings class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'DiffDiskSettings',
type: {
name: 'Composite',
class_name: 'DiffDiskSettings',
model_properties: {
option: {
client_side_validation: true,
required: false,
serialized_name: 'option',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 27.58 | 79 | 0.586657 |
b907f8b109f9eb8fb7e1ea978c7b257a7b1eaca1 | 366 | ENV['RAILS_ENV'] ||= 'test'
require_relative '../config/environment'
require 'rails/test_help'
require "minitest/reporters"
Minitest::Reporters.use!
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
include ApplicationHelper
def is_logged_in?
!session[:user_id].nil?
end
end
| 22.875 | 82 | 0.754098 |
032f683a992dee0cfd9dbd9632936e7f5d4492cf | 895 | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fluent/output'
module Fluent
class NullOutput < Output
Plugin.register_output('null', self)
def initialize
super
end
def configure(conf)
super
end
def start
end
def shutdown
end
def emit(tag, es, chain)
chain.next
end
end
end
| 21.309524 | 77 | 0.680447 |
d55b7694b5bbe782c63f379e381031b67812e604 | 1,203 | class Range
def bsearch
return to_enum(:bsearch) unless block_given?
from = self.begin
to = self.end
unless from.is_a?(Numeric) && to.is_a?(Numeric)
raise TypeError, "can't do binary search for #{from.class}"
end
midpoint = nil
if from.is_a?(Integer) && to.is_a?(Integer)
convert = Proc.new{ midpoint }
else
map = Proc.new do |pk, unpk, nb|
result, = [nb.abs].pack(pk).unpack(unpk)
nb < 0 ? -result : result
end
from = map['D', 'q', from.to_f]
to = map['D', 'q', to.to_f]
convert = Proc.new{ map['q', 'D', midpoint] }
end
to -= 1 if exclude_end?
satisfied = nil
while from <= to do
midpoint = (from + to).div(2)
result = yield(cur = convert.call)
case result
when Numeric
return cur if result == 0
result = result < 0
when true
satisfied = cur
when nil, false
# nothing to do
else
raise TypeError, "wrong argument type #{result.class} (must be numeric, true, false or nil)"
end
if result
to = midpoint - 1
else
from = midpoint + 1
end
end
satisfied
end
end
| 25.0625 | 100 | 0.551953 |
03e19004e2523e9debfed66f680757c6e9e05209 | 4,445 | # -*- encoding: utf-8 -*-
# stub: github-pages 90 ruby lib
Gem::Specification.new do |s|
s.name = "github-pages".freeze
s.version = "90"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["GitHub, Inc.".freeze]
s.date = "2016-08-05"
s.description = "Bootstrap the GitHub Pages Jekyll environment locally.".freeze
s.email = "[email protected]".freeze
s.executables = ["github-pages".freeze]
s.files = ["bin/github-pages".freeze]
s.homepage = "https://github.com/github/pages-gem".freeze
s.licenses = ["MIT".freeze]
s.post_install_message = "---------------------------------------------------\nThank you for installing github-pages!\nGitHub Pages recently upgraded to Jekyll 3.0, which\nincludes some breaking changes. More information:\nhttps://github.com/blog/2100-github-pages-jekyll-3\n---------------------------------------------------\n".freeze
s.required_ruby_version = Gem::Requirement.new(">= 2.0.0".freeze)
s.rubygems_version = "3.3.5".freeze
s.summary = "Track GitHub Pages dependencies.".freeze
s.installed_by_version = "3.3.5" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<jekyll>.freeze, ["= 3.1.6"])
s.add_runtime_dependency(%q<jekyll-sass-converter>.freeze, ["= 1.3.0"])
s.add_runtime_dependency(%q<kramdown>.freeze, ["= 1.11.1"])
s.add_runtime_dependency(%q<liquid>.freeze, ["= 3.0.6"])
s.add_runtime_dependency(%q<rouge>.freeze, ["= 1.11.1"])
s.add_runtime_dependency(%q<github-pages-health-check>.freeze, ["= 1.1.2"])
s.add_runtime_dependency(%q<jemoji>.freeze, ["= 0.7.0"])
s.add_runtime_dependency(%q<jekyll-mentions>.freeze, ["= 1.1.3"])
s.add_runtime_dependency(%q<jekyll-redirect-from>.freeze, ["= 0.11.0"])
s.add_runtime_dependency(%q<jekyll-sitemap>.freeze, ["= 0.10.0"])
s.add_runtime_dependency(%q<jekyll-feed>.freeze, ["= 0.5.1"])
s.add_runtime_dependency(%q<jekyll-gist>.freeze, ["= 1.4.0"])
s.add_runtime_dependency(%q<jekyll-paginate>.freeze, ["= 1.1.0"])
s.add_runtime_dependency(%q<jekyll-coffeescript>.freeze, ["= 1.0.1"])
s.add_runtime_dependency(%q<jekyll-seo-tag>.freeze, ["= 2.0.0"])
s.add_runtime_dependency(%q<jekyll-github-metadata>.freeze, ["= 2.0.2"])
s.add_runtime_dependency(%q<listen>.freeze, ["= 3.0.6"])
s.add_runtime_dependency(%q<activesupport>.freeze, ["= 4.2.7"])
s.add_runtime_dependency(%q<mercenary>.freeze, ["~> 0.3"])
s.add_runtime_dependency(%q<terminal-table>.freeze, ["~> 1.4"])
s.add_development_dependency(%q<rspec>.freeze, ["~> 3.3"])
s.add_development_dependency(%q<rubocop>.freeze, ["~> 0.35"])
s.add_development_dependency(%q<pry>.freeze, ["~> 0.10"])
s.add_development_dependency(%q<jekyll_test_plugin_malicious>.freeze, ["~> 0.2"])
else
s.add_dependency(%q<jekyll>.freeze, ["= 3.1.6"])
s.add_dependency(%q<jekyll-sass-converter>.freeze, ["= 1.3.0"])
s.add_dependency(%q<kramdown>.freeze, ["= 1.11.1"])
s.add_dependency(%q<liquid>.freeze, ["= 3.0.6"])
s.add_dependency(%q<rouge>.freeze, ["= 1.11.1"])
s.add_dependency(%q<github-pages-health-check>.freeze, ["= 1.1.2"])
s.add_dependency(%q<jemoji>.freeze, ["= 0.7.0"])
s.add_dependency(%q<jekyll-mentions>.freeze, ["= 1.1.3"])
s.add_dependency(%q<jekyll-redirect-from>.freeze, ["= 0.11.0"])
s.add_dependency(%q<jekyll-sitemap>.freeze, ["= 0.10.0"])
s.add_dependency(%q<jekyll-feed>.freeze, ["= 0.5.1"])
s.add_dependency(%q<jekyll-gist>.freeze, ["= 1.4.0"])
s.add_dependency(%q<jekyll-paginate>.freeze, ["= 1.1.0"])
s.add_dependency(%q<jekyll-coffeescript>.freeze, ["= 1.0.1"])
s.add_dependency(%q<jekyll-seo-tag>.freeze, ["= 2.0.0"])
s.add_dependency(%q<jekyll-github-metadata>.freeze, ["= 2.0.2"])
s.add_dependency(%q<listen>.freeze, ["= 3.0.6"])
s.add_dependency(%q<activesupport>.freeze, ["= 4.2.7"])
s.add_dependency(%q<mercenary>.freeze, ["~> 0.3"])
s.add_dependency(%q<terminal-table>.freeze, ["~> 1.4"])
s.add_dependency(%q<rspec>.freeze, ["~> 3.3"])
s.add_dependency(%q<rubocop>.freeze, ["~> 0.35"])
s.add_dependency(%q<pry>.freeze, ["~> 0.10"])
s.add_dependency(%q<jekyll_test_plugin_malicious>.freeze, ["~> 0.2"])
end
end
| 54.876543 | 338 | 0.652193 |
1d5b981f3d408be5ad0332ff0209ef81b3bd36d6 | 89 | if defined?(ActiveRecord::Migration)
ActiveRecord::Migration.maintain_test_schema!
end
| 22.25 | 47 | 0.831461 |
08a9e2878ffdc87cc9f1b1945b95eb548f590fdb | 990 | require "emque/stats/version"
require "emque/stats/configuration"
require "emque/stats/client"
module Emque
module Stats
class << self
attr_accessor :client
attr_writer :configuration
def logger
self.configuration.logger
end
def configure
yield(configuration)
self.client = Client.new(configuration)
end
def configuration
@configuration ||= Configuration.new
end
def track(event_name, props = {})
Emque::Stats.client.produce_track_event(event_name, props)
end
def increment(event_name)
count(event_name, 1)
end
def count(event_name, count=1)
Emque::Stats.client.produce_count(event_name, count)
end
def timer(event_name, duration)
Emque::Stats.client.produce_timer(event_name, duration)
end
def gauge(event_name, value)
Emque::Stats.client.produce_gauge(event_name, value)
end
end
end
end
| 21.521739 | 66 | 0.648485 |
399c187d7a7b9f69af486c58768064602ca419eb | 4,240 | Huginn::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = false
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
if ENV["RAILS_LOG_TO_STDOUT"].present? ||
ENV['ON_HEROKU'] ||
ENV['HEROKU_POSTGRESQL_ROSE_URL'] ||
ENV['HEROKU_POSTGRESQL_GOLD_URL'] ||
File.read(File.join(File.dirname(__FILE__), '../../Procfile')) =~ /intended for Heroku/
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Compress JavaScripts and CSS
config.assets.js_compressor = :uglifier
config.assets.css_compressor = :sass
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = ENV['FORCE_SSL'] == 'true'
# See everything in the log (default is will be :debug in Rails 5.0)
config.log_level = :info
# Prepend all log lines with the following tags
config.log_tags = [ :request_id ] # :subdomain
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
config.cache_store = :memory_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
if ENV['ASSET_HOST'].present?
config.action_controller.asset_host = ENV['ASSET_HOST']
end
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.default_url_options = { :host => ENV['DOMAIN'] }
config.action_mailer.asset_host = ENV['DOMAIN']
if ENV['ASSET_HOST'].present?
config.action_mailer.asset_host = ENV['ASSET_HOST']
end
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
config.action_mailer.delivery_method = ENV.fetch('SMTP_DELIVERY_METHOD', 'smtp').to_sym
config.action_mailer.perform_caching = false
# smtp_settings moved to config/initializers/action_mailer.rb
end
| 41.568627 | 102 | 0.75283 |
793e3e698efa316c51c6bf60902034155da343e2 | 14,585 | # frozen_string_literal: true
module Homebrew
module EnvConfig
module_function
ENVS = {
HOMEBREW_ARCH: {
description: "Linux only: Pass the set value to a type name representing the compiler's `-march` option.",
default: "native",
},
HOMEBREW_ARTIFACT_DOMAIN: {
description: "Prefix all download URLs, including those for bottles, with this " \
"variable. For example, `HOMEBREW_ARTIFACT_DOMAIN=http://localhost:8080` will cause a " \
"formula with the URL `https://example.com/foo.tar.gz` to instead download from " \
"`http://localhost:8080/example.com/foo.tar.gz`.",
},
HOMEBREW_AUTO_UPDATE_SECS: {
description: "Automatically check for updates once per this seconds interval.",
default: 300,
},
HOMEBREW_BAT: {
description: "If set, use `bat` for the `brew cat` command.",
boolean: true,
},
HOMEBREW_BINTRAY_KEY: {
description: "Use this API key when accessing the Bintray API (where bottles are stored).",
},
HOMEBREW_BINTRAY_USER: {
description: "Use this username when accessing the Bintray API (where bottles are stored).",
},
HOMEBREW_BOTTLE_DOMAIN: {
description: "Use the specified URL as the download mirror for bottles. " \
"For example, `HOMEBREW_BOTTLE_DOMAIN=http://localhost:8080` will cause all bottles to " \
"download from the prefix `http://localhost:8080/`.",
default_text: "macOS: `https://homebrew.bintray.com/`, Linux: `https://linuxbrew.bintray.com/`.",
default: HOMEBREW_BOTTLE_DEFAULT_DOMAIN,
},
HOMEBREW_BREW_GIT_REMOTE: {
description: "Use the specified URL as the Homebrew/brew `git`(1) remote.",
default: HOMEBREW_BREW_DEFAULT_GIT_REMOTE,
},
HOMEBREW_BROWSER: {
description: "Use this as the browser when opening project homepages.",
default_text: "`$BROWSER` or the OS's default browser.",
},
HOMEBREW_CACHE: {
description: "Use the specified directory as the download cache.",
default_text: "macOS: `$HOME/Library/Caches/Homebrew`, " \
"Linux: `$XDG_CACHE_HOME/Homebrew` or `$HOME/.cache/Homebrew`.",
default: HOMEBREW_DEFAULT_CACHE,
},
HOMEBREW_COLOR: {
description: "If set, force colour output on non-TTY outputs.",
boolean: true,
},
HOMEBREW_CORE_GIT_REMOTE: {
description: "Use the specified URL as the Homebrew/homebrew-core `git`(1) remote.",
default_text: "macOS: `https://github.com/Homebrew/homebrew-core`, " \
"Linux: `https://github.com/Homebrew/linuxbrew-core`.",
default: HOMEBREW_CORE_DEFAULT_GIT_REMOTE,
},
HOMEBREW_CURLRC: {
description: "If set, do not pass `--disable` when invoking `curl`(1), which disables the " \
"use of `curlrc`.",
boolean: true,
},
HOMEBREW_CURL_RETRIES: {
description: "Pass the given retry count to `--retry` when invoking `curl`(1).",
default: 3,
},
HOMEBREW_CURL_VERBOSE: {
description: "If set, pass `--verbose` when invoking `curl`(1).",
boolean: true,
},
HOMEBREW_DEVELOPER: {
description: "If set, tweak behaviour to be more relevant for Homebrew developers (active or " \
"budding) by e.g. turning warnings into errors.",
boolean: true,
},
HOMEBREW_DISABLE_LOAD_FORMULA: {
description: "If set, refuse to load formulae. This is useful when formulae are not trusted (such " \
"as in pull requests).",
boolean: true,
},
HOMEBREW_DISPLAY: {
description: "Use this X11 display when opening a page in a browser, for example with " \
"`brew home`. Primarily useful on Linux.",
default_text: "`$DISPLAY`.",
},
HOMEBREW_DISPLAY_INSTALL_TIMES: {
description: "If set, print install times for each formula at the end of the run.",
boolean: true,
},
HOMEBREW_EDITOR: {
description: "Use this editor when editing a single formula, or several formulae in the " \
"same directory.\n\n *Note:* `brew edit` will open all of Homebrew as discontinuous files " \
"and directories. Visual Studio Code can handle this correctly in project mode, but many " \
"editors will do strange things in this case.",
default_text: "`$EDITOR` or `$VISUAL`.",
},
HOMEBREW_FAIL_LOG_LINES: {
description: "Output this many lines of output on formula `system` failures.",
default: 15,
},
HOMEBREW_FORCE_BREWED_CURL: {
description: "If set, always use a Homebrew-installed `curl`(1) rather than the system version. " \
"Automatically set if the system version of `curl` is too old.",
boolean: true,
},
HOMEBREW_FORCE_BREWED_GIT: {
description: "If set, always use a Homebrew-installed `git`(1) rather than the system version. " \
"Automatically set if the system version of `git` is too old.",
boolean: true,
},
HOMEBREW_FORCE_HOMEBREW_ON_LINUX: {
description: "If set, running Homebrew on Linux will use URLs for macOS. This is useful when merging " \
"pull requests for macOS while on Linux.",
boolean: true,
},
HOMEBREW_FORCE_VENDOR_RUBY: {
description: "If set, always use Homebrew's vendored, relocatable Ruby version even if the system version " \
"of Ruby is new enough.",
boolean: true,
},
HOMEBREW_GITHUB_API_PASSWORD: {
description: "Use this password for authentication with the GitHub API, for features " \
"such as `brew search`. We strongly recommend using `HOMEBREW_GITHUB_API_TOKEN` instead.",
},
HOMEBREW_GITHUB_API_TOKEN: {
description: "Use this personal access token for the GitHub API, for features such as " \
"`brew search`. You can create one at <https://github.com/settings/tokens>. If set, " \
"GitHub will allow you a greater number of API requests. For more information, see: " \
"<https://developer.github.com/v3/#rate-limiting>\n\n *Note:* Homebrew doesn't " \
"require permissions for any of the scopes, but some developer commands may require " \
"additional permissions.",
},
HOMEBREW_GITHUB_API_USERNAME: {
description: "Use this username for authentication with the GitHub API, for features " \
"such as `brew search`. We strongly recommend using `HOMEBREW_GITHUB_API_TOKEN` instead.",
},
HOMEBREW_GIT_EMAIL: {
description: "Set the Git author and committer name to this value.",
},
HOMEBREW_GIT_NAME: {
description: "Set the Git author and committer email to this value.",
},
HOMEBREW_INSTALL_BADGE: {
description: "Print this text before the installation summary of each successful build.",
default_text: 'The "Beer Mug" emoji.',
default: "🍺",
},
HOMEBREW_LOGS: {
description: "Use the specified directory to store log files.",
default_text: "macOS: `$HOME/Library/Logs/Homebrew`, "\
"Linux: `$XDG_CACHE_HOME/Homebrew/Logs` or `$HOME/.cache/Homebrew/Logs`.",
default: HOMEBREW_DEFAULT_LOGS,
},
HOMEBREW_MAKE_JOBS: {
description: "Use this value as the number of parallel jobs to run when building with `make`(1).",
default_text: "The number of available CPU cores.",
default: lambda {
require "os"
require "hardware"
Hardware::CPU.cores
},
},
HOMEBREW_NO_ANALYTICS: {
description: "If set, do not send analytics. See: <https://docs.brew.sh/Analytics>.",
boolean: true,
},
HOMEBREW_NO_AUTO_UPDATE: {
description: "If set, do not automatically update before running " \
"`brew install`, `brew upgrade` or `brew tap`.",
boolean: true,
},
HOMEBREW_NO_BOTTLE_SOURCE_FALLBACK: {
description: "If set, fail on the failure of installation from a bottle rather than " \
"falling back to building from source.",
boolean: true,
},
HOMEBREW_NO_COLOR: {
description: "If set, do not print text with colour added.",
default_text: "`$NO_COLOR`.",
boolean: true,
},
HOMEBREW_NO_COMPAT: {
description: "If set, disable all use of legacy compatibility code.",
boolean: true,
},
HOMEBREW_NO_EMOJI: {
description: "If set, do not print `HOMEBREW_INSTALL_BADGE` on a successful build." \
"\n\n *Note:* Only tries to print emoji on OS X Lion or newer.",
boolean: true,
},
HOMEBREW_NO_GITHUB_API: {
description: "If set, do not use the GitHub API, e.g. for searches or fetching relevant issues " \
"on a failed install.",
boolean: true,
},
HOMEBREW_NO_INSECURE_REDIRECT: {
description: "If set, forbid redirects from secure HTTPS to insecure HTTP." \
"\n\n *Note:* While ensuring your downloads are fully secure, this is likely to cause " \
"from-source SourceForge, some GNU & GNOME based formulae to fail to download.",
boolean: true,
},
HOMEBREW_NO_INSTALL_CLEANUP: {
description: "If set, `brew install`, `brew upgrade` and `brew reinstall` will never automatically " \
"cleanup installed/upgraded/reinstalled formulae or all formulae every 30 days.",
boolean: true,
},
HOMEBREW_PRY: {
description: "If set, use Pry for the `brew irb` command.",
boolean: true,
},
HOMEBREW_SKIP_OR_LATER_BOTTLES: {
description: "If set with `HOMEBREW_DEVELOPER`, do not use bottles from older versions " \
"of macOS. This is useful in development on new macOS versions.",
boolean: true,
},
HOMEBREW_SVN: {
description: "Use this as the `svn`(1) binary.",
default_text: "A Homebrew-built Subversion (if installed), or the system-provided binary.",
},
HOMEBREW_TEMP: {
description: "Use this path as the temporary directory for building packages. Changing " \
"this may be needed if your system temporary directory and Homebrew prefix are on " \
"different volumes, as macOS has trouble moving symlinks across volumes when the target " \
"does not yet exist. This issue typically occurs when using FileVault or custom SSD " \
"configurations.",
default_text: "macOS: `/private/tmp`, Linux: `/tmp`.",
default: HOMEBREW_DEFAULT_TEMP,
},
HOMEBREW_UPDATE_TO_TAG: {
description: "If set, always use the latest stable tag (even if developer commands " \
"have been run).",
boolean: true,
},
HOMEBREW_VERBOSE: {
description: "If set, always assume `--verbose` when running commands.",
boolean: true,
},
HOMEBREW_VERBOSE_USING_DOTS: {
description: "If set, verbose output will print a `.` no more than once a minute. This can be " \
"useful to avoid long-running Homebrew commands being killed due to no output.",
boolean: true,
},
all_proxy: {
description: "Use this SOCKS5 proxy for `curl`(1), `git`(1) and `svn`(1) when downloading through Homebrew.",
},
ftp_proxy: {
description: "Use this FTP proxy for `curl`(1), `git`(1) and `svn`(1) when downloading through Homebrew.",
},
http_proxy: {
description: "Use this HTTP proxy for `curl`(1), `git`(1) and `svn`(1) when downloading through Homebrew.",
},
https_proxy: {
description: "Use this HTTPS proxy for `curl`(1), `git`(1) and `svn`(1) when downloading through Homebrew.",
},
no_proxy: {
description: "A comma-separated list of hostnames and domain names excluded " \
"from proxying by `curl`(1), `git`(1) and `svn`(1) when downloading through Homebrew.",
},
}.freeze
def env_method_name(env, hash)
method_name = env.to_s
.sub(/^HOMEBREW_/, "")
.downcase
method_name = "#{method_name}?" if hash[:boolean]
method_name
end
ENVS.each do |env, hash|
method_name = env_method_name(env, hash)
env = env.to_s
if hash[:boolean]
define_method(method_name) do
ENV[env].present?
end
elsif hash[:default].present?
# Needs a custom implementation.
next if env == "HOMEBREW_MAKE_JOBS"
define_method(method_name) do
ENV[env].presence || hash.fetch(:default).to_s
end
else
define_method(method_name) do
ENV[env].presence
end
end
end
# Needs a custom implementation.
def make_jobs
jobs = ENV["HOMEBREW_MAKE_JOBS"].to_i
return jobs.to_s if jobs.positive?
ENVS.fetch(:HOMEBREW_MAKE_JOBS)
.fetch(:default)
.call
.to_s
end
end
end
| 47.353896 | 117 | 0.56373 |
bb70df872b41ada30ff9847299ed8d40367e1ce2 | 1,068 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'smartystreets_ruby_sdk/version'
Gem::Specification.new do |spec|
spec.name = 'smartystreets_ruby_sdk'
spec.version = SmartyStreets::VERSION
spec.authors = ['SmartyStreets SDK Team']
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.summary = 'An official library for the SmartyStreets APIs'
spec.homepage = 'https://github.com/smartystreets/smartystreets-ruby-sdk'
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 2.2.27'
spec.add_development_dependency 'rake', '~> 12.3.3'
spec.add_development_dependency 'minitest', '~> 5.8', '>= 5.8.3'
spec.add_development_dependency 'simplecov', '~> 0.12.0'
end
| 39.555556 | 80 | 0.661049 |
039435897b3e391ff915c9f415f219a32efd8bf4 | 176 | class PrintedThirdPartyConsentFormActivity < Activity
def self.from(appointment)
create!(appointment_id: appointment.id)
end
def owner_required?
false
end
end
| 17.6 | 53 | 0.767045 |
ac5cfabf9aaca5265d86be2bc487c1ea68965d04 | 923 | cask 'astro' do
version '3.0.15,4135'
sha256 '2a7ae8f9f88df720ee17f097c81f07a8cdb3f345e1783f36b5e5af7d7c146018'
# pexlabs-updates-xvuif5mcicazzducz2j2xy3lki.s3-us-west-2.amazonaws.com was verified as official when first introduced to the cask
url "https://pexlabs-updates-xvuif5mcicazzducz2j2xy3lki.s3-us-west-2.amazonaws.com/Astro-#{version.after_comma}.dmg"
appcast 'https://pexlabs-updates-xvuif5mcicazzducz2j2xy3lki.s3-us-west-2.amazonaws.com/pexappcast.xml'
name 'Astro'
homepage 'https://www.helloastro.com/'
auto_updates true
app 'Astro.app'
uninstall launchctl: 'com.pexlabs.Huskymail.mac.helper',
quit: 'com.pexlabs.Huskymail.mac'
zap trash: [
'~/Library/Application Support/com.pexlabs.Huskymail.mac',
'~/Library/Caches/com.pexlabs.Huskymail.mac',
'~/Library/Preferences/com.pexlabs.Huskymail.mac.plist',
]
end
| 38.458333 | 132 | 0.717226 |
284256cbd742e988a7891c8ce4c534831c6d695f | 5,250 | class ProjectUser < ActiveRecord::Base
belongs_to :project
belongs_to :user
auto_subscribes :user, :to => :project
after_save :check_role, :remove_updates, :subscribe_to_assessment_sections_later
after_destroy :remove_updates
validates_uniqueness_of :user_id, :scope => :project_id, :message => "already a member of this project"
validates_presence_of :project, :user
validates_rules_from :project, :rule_methods => [:has_time_zone?]
validate :user_invited?
CURATOR_CHANGE_NOTIFICATION = "curator_change"
ROLES = %w(curator manager)
ROLES.each do |role|
const_set role.upcase, role
scope role.pluralize, where(:role => role)
end
notifies_subscribers_of :project, :on => :save, :notification => CURATOR_CHANGE_NOTIFICATION,
:include_notifier => true,
# don't bother queuing this if there's no relevant role change
:queue_if => Proc.new {|pu|
pu.role_changed? && (ROLES.include?(pu.role) || pu.user_id == pu.project.user_id)
},
# check to make sure role status hasn't changed since queuing
:if => Proc.new {|pu| ROLES.include?(pu.role) || pu.user_id == pu.project.user_id}
def to_s
"<ProjectUser #{id} project: #{project_id} user: #{user_id} role: #{role}>"
end
def project_observations
project.project_observations.includes(:observation).where("observations.user_id = ?", user_id).scoped
end
def remove_updates
return true unless role_changed? && role.blank?
Update.where(
:notifier_type => "ProjectUser",
:notifier_id => id,
:resource_type => "Project",
:resource_id => project_id).destroy_all
true
end
def subscribe_to_assessment_sections_later
return true unless role_changed? && !role.blank?
delay(:priority => USER_INTEGRITY_PRIORITY).subscribe_to_assessment_sections
true
end
def subscribe_to_assessment_sections
AssessmentSection.includes(:assessment).where("assessments.project_id = ?", project).find_each do |as|
Subscription.create(:resource => as, :user => user)
end
end
def has_time_zone?
user.time_zone?
end
def is_curator?
role == 'curator' || is_manager? || is_admin?
end
def is_manager?
role == 'manager' || is_admin?
end
def is_admin?
user_id == project.user_id
end
def user_invited?
return true unless project
return true if project.preferred_membership_model == Project::MEMBERSHIP_OPEN
uid = user_id || user.try(:id)
pid = project_id || project.try(:id)
unless ProjectUserInvitation.where(:invited_user_id => uid, :project_id => pid).exists?
errors.add(:user, "hasn't been invited to this project")
end
end
def update_observations_counter_cache
update_attributes(:observations_count => project_observations.count)
end
# set taxa_count on project user, which is the number of taxa observed by this user, favoring the curator ident
def update_taxa_counter_cache
sql = <<-SQL
SELECT count(DISTINCT COALESCE(i.taxon_id, o.taxon_id))
FROM project_observations po
JOIN observations o ON po.observation_id = o.id
LEFT OUTER JOIN taxa ot ON ot.id = o.taxon_id
LEFT OUTER JOIN identifications i ON po.curator_identification_id = i.id
LEFT OUTER JOIN taxa it ON it.id = i.taxon_id
WHERE
po.project_id = #{project_id}
AND o.user_id = #{user_id}
AND (
-- observer's ident taxon is species or lower
ot.rank_level <= #{Taxon::SPECIES_LEVEL}
-- curator's ident taxon is species or lower
OR it.rank_level <= #{Taxon::SPECIES_LEVEL}
)
SQL
update_attributes(:taxa_count => ProjectUser.connection.execute(sql)[0]['count'].to_i)
end
def check_role
return true unless role_changed?
if role_was.blank?
Project.delay(:priority => USER_INTEGRITY_PRIORITY).update_curator_idents_on_make_curator(project_id, id)
elsif role.blank?
Project.delay(:priority => USER_INTEGRITY_PRIORITY).update_curator_idents_on_remove_curator(project_id, id)
end
true
end
def self.update_observations_counter_cache_from_project_and_user(project_id, user_id)
return unless project_user = ProjectUser.first(:conditions => {
:project_id => project_id,
:user_id => user_id
})
project_user.update_observations_counter_cache
end
def self.update_taxa_counter_cache_from_project_and_user(project_id, user_id)
return unless project_user = ProjectUser.first(:conditions => {
:project_id => project_id,
:user_id => user_id
})
project_user.update_taxa_counter_cache
end
def self.update_taxa_obs_and_observed_taxa_count_after_update_observation(observation_id, user_id)
unless obs = Observation.find_by_id(observation_id)
return
end
unless usr = User.find_by_id(user_id)
return
end
obs.project_observations.each do |po|
if project_user = ProjectUser.first(:conditions => {
:project_id => po.project_id,
:user_id => user_id
})
project_user.update_taxa_counter_cache
project_user.update_observations_counter_cache
Project.update_observed_taxa_count(po.project_id)
end
end
end
end
| 33.43949 | 113 | 0.70381 |
79cc37df005379adf18f501b8bb727b2e3b5bee2 | 2,204 | # frozen_string_literal: true
module Crossbeams
module Layout
# Display one or more contact methods
class ContactMethod
include PageNode
attr_reader :contact_methods, :lookup_icon
def initialize(page_config, contact_methods, options = {})
@page_config = page_config
@nodes = []
@contact_methods = Array(contact_methods)
@lookup_icon = {
'tel' => 'tel',
'cell' => 'cell',
'fax' => 'fax',
'email' => 'email'
}
(options[:icon_lookups] || {}).each do |method_type, icon|
@lookup_icon[method_type.to_s.downcase] = icon
end
end
# Is this node invisible?
#
# @return [boolean] - true if it should not be rendered at all, else false.
def invisible?
false
end
# Is this node hidden?
#
# @return [boolean] - true if it should be rendered as hidden, else false.
def hidden?
false
end
# Render this node as HTML link.
#
# @return [string] - HTML representation of this node.
def render
@contact_methods.map { |contact_method| render_contact_method(contact_method) }.join("\n")
end
private
def render_contact_method(contact_method)
<<~HTML
<div class="center mw5 mw6-ns hidden ba mv3">
<h1 class="f4 bg-light-purple white mv0 pv2 ph3" style="text-transform:lowercase">#{icon(contact_method)}#{contact_method.contact_method_type}</h1>
<div class="f6 f5-ns lh-copy measure mv0 pa2">#{contact_method.contact_method_code}
</div>
</div>
HTML
end
def icon(contact_method)
case lookup_icon[contact_method.contact_method_type.downcase]
when 'tel'
Icon.render(:phone, css_class: 'mr1')
when 'cell'
Icon.render(:cell, css_class: 'mr1')
when 'fax'
# TODO: get a fax svg...
Icon.render(:printer, css_class: 'mr1')
when 'social'
Icon.render(:at, css_class: 'mr1')
else
Icon.render(:star, css_class: 'mr1')
end
end
end
end
end
| 29 | 159 | 0.573956 |
1a38c9631eaded341109b2e60844cd3b8ede8b05 | 869 | require_relative 'adapters'
module Moltrio
module Config
class ChainContainer
def initialize(chains)
@chains = chains
end
delegate(*Adapter.instance_methods(false), to: :default_chain)
def default_chain
chain(:default)
end
def available_namespaces(chain_name = :default)
unless chain = chains[chain_name]
raise "No chain named #{chain_name} chain configured!"
end
chain.available_namespaces
end
def chain(name)
chain = chains[name]
if chain.nil?
raise "No chain named #{name.inspect} configured!"
elsif chain.missing_namespace?
raise "Chain #{name.inspect} requires namespace, but no namespace provided"
else
chain
end
end
private
attr_reader :chains
end
end
end
| 20.209302 | 85 | 0.614499 |
bb27f7ee494483f9276879e19fb31f03751e051a | 1,198 | require 'spec_helper'
describe CertCheck::CLI do
let(:cli) { CertCheck::CLI.new }
describe '#array_to_hash' do
arr = [
['C', 'JP', 19],
['ST', 'TOKYO', 19],
['L', 'SHIBUYA', 19],
['O', 'EXAMPLE, Inc.,', 19],
['OU', '', 19],
['CN', 'www.example.com', 19]
]
expected = {
'C' => 'JP',
'ST' => 'TOKYO',
'L' => 'SHIBUYA',
'O' => 'EXAMPLE, Inc.,',
'OU' => '',
'CN' => 'www.example.com'
}
it 'returns hash with contents of certificate files' do
expect(cli.send(:array_to_hash, arr)).to be_a Hash
expect(cli.send(:array_to_hash, arr)).to match(expected)
end
end
describe '#sans_to_array' do
text = "Subject Alternative Name: DNS:www.example.com, DNS:example.com\n"
expected = ['www.example.com', 'example.com']
it 'returns array with contents of certificate files' do
openssl_mock = double('OpenSSL::X509::Certificate')
allow(openssl_mock).to receive(:to_text).and_return(text)
cli.instance_variable_set(:@cert, openssl_mock)
expect(cli.send(:sans_to_array)).to be_a Array
expect(cli.send(:sans_to_array)).to match(expected)
end
end
end
| 27.860465 | 77 | 0.586811 |
7a0199dafc7d16897e78b1ba25d54a1e3e2a8d51 | 646 | module Searchlight::Options
def self.empty?(value)
return true if value.nil?
return true if value.respond_to?(:empty?) && value.empty?
return true if /\A[[:space:]]*\z/ === value
false
end
def self.checked?(value)
!(['0', 'false', ''].include?(value.to_s.strip))
end
def self.excluding_empties(input)
output = input.dup
output.each do |key, value|
if value.is_a?(Hash)
output[key] = value.reject { |_, v| empty?(v) }
end
if value.instance_of?(Array)
output[key] = value.reject { |v| empty?(v) }
end
end
output.reject { |_, value| empty?(value) }
end
end
| 23.071429 | 61 | 0.592879 |
116ea33b3a5528e4804cd02167918ec1c16144db | 984 | # Requires
require 'rails/generators'
require 'rails/generators/migration'
class PurgatoryGenerator < Rails::Generators::Base
include Rails::Generators::Migration
def self.source_root
@source_root ||= File.join(File.dirname(__FILE__), 'templates')
end
def self.next_migration_number(dirname)
if ActiveRecord::Base.timestamped_migrations
Time.new.utc.strftime("%Y%m%d%H%M%S")
else
"%.3d" % (current_migration_number(dirname) + 1)
end
end
def create_migration_file
['create_purgatories', 'add_performable_method_to_purgatories'].each do |filename|
unless self.class.migration_exists?("db/migrate", "#{filename}").present?
migration_template "#{filename}.rb", "db/migrate/#{filename}.rb"
end
end
end
def create_initializer_file
create_file 'config/initializers/purgatory.rb', <<-eos
PurgatoryModule.configure do |config|
config.user_class_name = 'User'
end
require 'purgatory/purgatory'
eos
end
end | 27.333333 | 86 | 0.723577 |
38ea795bd76cd3a3e1a1453edbef74cdf4159642 | 6,733 | require 'set'
module Audited
# Audit saves the changes to ActiveRecord models. It has the following attributes:
#
# * <tt>auditable</tt>: the ActiveRecord model that was changed
# * <tt>user</tt>: the user that performed the change; a string or an ActiveRecord model
# * <tt>action</tt>: one of create, update, or delete
# * <tt>audited_changes</tt>: a hash of all the changes
# * <tt>comment</tt>: a comment set with the audit
# * <tt>version</tt>: the version of the model
# * <tt>request_uuid</tt>: a uuid based that allows audits from the same controller request
# * <tt>created_at</tt>: Time that the change was performed
#
class YAMLIfTextColumnType
class << self
def load(obj)
if Audited.audit_class.columns_hash["audited_changes"].type.to_s == "text"
ActiveRecord::Coders::YAMLColumn.new(Object).load(obj)
else
obj
end
end
def dump(obj)
if Audited.audit_class.columns_hash["audited_changes"].type.to_s == "text"
ActiveRecord::Coders::YAMLColumn.new(Object).dump(obj)
else
obj
end
end
end
end
class Audit < ::ActiveRecord::Base
belongs_to :auditable, polymorphic: true
belongs_to :user, polymorphic: true
belongs_to :associated, polymorphic: true
before_create :set_version_number, :set_audit_user, :set_request_uuid, :set_remote_address
before_create do
self.assign_attributes(::Audited.namespace_conditions)
end
cattr_accessor :audited_class_names
self.audited_class_names = Set.new
serialize :audited_changes, YAMLIfTextColumnType
scope :ascending, ->{ reorder(version: :asc) }
scope :descending, ->{ reorder(version: :desc)}
scope :creates, ->{ where(action: 'create')}
scope :updates, ->{ where(action: 'update')}
scope :destroys, ->{ where(action: 'destroy')}
scope :namespaced, ->{ where(Audited.namespace_conditions)}
scope :not_before_created_at, ->(audited_record) do
where(created_at: Range.new(
((audited_record.try(:created_at) || Time.now) - 1.day),
(Time.now + 1.day)
))
end
scope :up_until, ->(date_or_time){ where("created_at <= ?", date_or_time) }
scope :from_version, ->(version){ where('version >= ?', version) }
scope :to_version, ->(version){ where('version <= ?', version) }
scope :auditable_finder, ->(auditable_id, auditable_type){ namespaced.where(auditable_id: auditable_id, auditable_type: auditable_type)}
# Return all audits older than the current one.
def ancestors
self.class.ascending.auditable_finder(auditable_id, auditable_type).to_version(version)
end
# Return an instance of what the object looked like at this revision. If
# the object has been destroyed, this will be a new record.
def revision
clazz = auditable_type.constantize
(clazz.find_by_id(auditable_id) || clazz.new).tap do |m|
self.class.assign_revision_attributes(m, self.class.reconstruct_attributes(ancestors).merge(audit_version: version))
end
end
# Returns a hash of the changed attributes with the new values
def new_attributes
(audited_changes || {}).inject({}.with_indifferent_access) do |attrs, (attr, values)|
attrs[attr] = values.is_a?(Array) ? values.last : values
attrs
end
end
# Returns a hash of the changed attributes with the old values
def old_attributes
(audited_changes || {}).inject({}.with_indifferent_access) do |attrs, (attr, values)|
attrs[attr] = Array(values).first
attrs
end
end
# Allows user to undo changes
def undo
case action
when 'create'
# destroys a newly created record
auditable.destroy!
when 'destroy'
# creates a new record with the destroyed record attributes
auditable_type.constantize.create!(audited_changes)
when 'update'
# changes back attributes
auditable.update!(audited_changes.transform_values(&:first))
else
raise StandardError, "invalid action given #{action}"
end
end
# Allows user to be set to either a string or an ActiveRecord object
# @private
def user_as_string=(user)
# reset both either way
self.user_as_model = self.username = nil
user.is_a?(::ActiveRecord::Base) ?
self.user_as_model = user :
self.username = user
end
alias_method :user_as_model=, :user=
alias_method :user=, :user_as_string=
# @private
def user_as_string
user_as_model || username
end
alias_method :user_as_model, :user
alias_method :user, :user_as_string
# Returns the list of classes that are being audited
def self.audited_classes
audited_class_names.map(&:constantize)
end
# All audits made during the block called will be recorded as made
# by +user+. This method is hopefully threadsafe, making it ideal
# for background operations that require audit information.
def self.as_user(user)
last_audited_user = ::Audited.store[:audited_user]
::Audited.store[:audited_user] = user
yield
ensure
::Audited.store[:audited_user] = last_audited_user
end
# @private
def self.reconstruct_attributes(audits)
audits.each_with_object({}) do |audit, all|
all.merge!(audit.new_attributes)
all[:audit_version] = audit.version
end
end
# @private
def self.assign_revision_attributes(record, attributes)
attributes.each do |attr, val|
record = record.dup if record.frozen?
if record.respond_to?("#{attr}=")
record.attributes.key?(attr.to_s) ?
record[attr] = val :
record.send("#{attr}=", val)
end
end
record
end
# use created_at as timestamp cache key
def self.collection_cache_key(collection = all, *)
super(collection, :created_at)
end
private
def set_version_number
max = self.class.namespaced.not_before_created_at(auditable).auditable_finder(auditable_id, auditable_type).maximum(:version) || 0
self.version = max + 1
end
def set_audit_user
self.user ||= ::Audited.store[:audited_user] # from .as_user
self.user ||= ::Audited.store[:current_user].try!(:call) # from Sweeper
nil # prevent stopping callback chains
end
def set_request_uuid
self.request_uuid ||= ::Audited.store[:current_request_uuid]
self.request_uuid ||= SecureRandom.uuid
end
def set_remote_address
self.remote_address ||= ::Audited.store[:current_remote_address]
end
end
end
| 33.497512 | 140 | 0.662706 |
5d8fadcb5ce8452238d3feb056b81bb8eb56e916 | 581 | cask :v1 => 'phpstorm7' do
version '7.1.4'
sha256 '618d05d39d8565677cddc63afee0fc9b50431a7efc6da930a18c54831ea102f5'
url "http://download.jetbrains.com/webide/PhpStorm-#{version}.dmg"
homepage 'http://www.jetbrains.com/phpstorm/'
license :commercial
app 'PhpStorm.app'
postflight do
plist_set(':JVMOptions:JVMVersion', '1.6+')
end
zap :delete => [
'~/Library/Application Support/WebIde70',
'~/Library/Preferences/WebIde70',
'~/Library/Preferences/com.jetbrains.PhpStorm.plist',
]
end
| 27.666667 | 75 | 0.643718 |
1d78e91cd582b05827efddef8e88e99c3f781c1f | 217 | class User < ActiveRecord::Base
has_secure_password
has_many :tweets
def slug
self.username.downcase.gsub(" ", "-")
end
def self.find_by_slug(slug)
self.all.find{|user| user.slug == slug}
end
end | 18.083333 | 43 | 0.682028 |
abefb264146dde7f0a25f8ebaa8451fc89853266 | 322 | class ContactMailer < ActionMailer::Base
default to: "[email protected]",
subject: "Message From Website Contact Form"
def contact(email)
@name = email.name
@email = email.email
@message = email.message
from = "#{@name} <#{@email}>"
mail(from: from, reply_to: from)
end
end
| 24.769231 | 54 | 0.649068 |
e940b506a6e25f783f97bd986af036f9c363996c | 159 | class OauthController < ApplicationController
before_filter :require_access_token, only: [:new]
def new
head :ok
end
def callback
end
end | 15.9 | 51 | 0.710692 |
0301261d7758fadf1c2a9532ae9c61f36b43b2c9 | 620 | cask 'pocketcast' do
version '1.25'
sha256 '01091dfc5f6819a800c5da51230e272dbecfdbac8a93d12124820a0cd0bb74b7'
url "https://github.com/mortenjust/PocketCastsOSX/releases/download/#{version}/PocketCast#{version.no_dots}.zip"
appcast 'https://github.com/mortenjust/PocketCastsOSX/releases.atom',
checkpoint: '6f976b988d5d821d25489abdcaa28adc5193d8fec545f95821838a458f1e195e'
name 'Pocket Casts for Mac'
homepage 'https://github.com/mortenjust/PocketCastsOSX'
license :unknown # TODO: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'PocketCast.app'
end
| 44.285714 | 114 | 0.793548 |
1812fddbbb79ac55eb6b726aedee6c633f40e7bf | 104 | # LANGUAGE: Ruby
# AUTHOR: Hannah Zulueta
# GITHUB: https://github.com/hanapotski
puts "Hello, World!"
| 17.333333 | 39 | 0.721154 |
62179cf39842ed6d206a31f02e32e8450b565c7c | 1,137 | require "rails_helper"
describe "fields/belongs_to/_show", type: :view do
let(:product) { create(:product) }
let(:product_path) { polymorphic_path([:admin, product]) }
let(:link) { "<a href=\"#{product_path}\">#{product.name}</a>" }
let(:associated_class) { "test_associated_class" }
let(:belongs_to) do
instance_double(
"Administrate::Field::BelongsTo",
associated_class: associated_class,
display_associated_resource: product.name,
data: product,
)
end
context "if associated resource has a show route" do
it "displays link" do
allow(view).to receive(:valid_action?).and_return(true)
render_belongs_to_show
expect(rendered.strip).to include(link)
end
end
context "if associated resource has no show route" do
it "displays link" do
allow(view).to receive(:valid_action?).and_return(false)
render_belongs_to_show
expect(rendered.strip).to_not include(link)
end
end
def render_belongs_to_show
render(
partial: "fields/belongs_to/show.html.erb",
locals: { field: belongs_to, namespace: "admin" },
)
end
end
| 28.425 | 66 | 0.682498 |
e9764bd764f373f803790d20742178feaaa7c054 | 140 | class OrderSerializer < ActiveModel::Serializer
attributes :id, :address, :city, :state, :user_id, :total, :created_at, :order_number
end
| 35 | 87 | 0.757143 |
39286b494114267ab52da9f8f250eeb9539fd9ad | 403 | # frozen_string_literal: true
# Lamel:
RSpec.describe Funcky::ATransform::Lamel do
let(:instance) { described_class.new }
describe 'initialize' do
subject { instance }
it { is_expected.not_to be_nil }
end
describe '#parse' do
subject { instance.parse(value) }
let(:value) { nil }
context 'safely handle nil' do
it { is_expected.to eq('') }
end
end
end
| 17.521739 | 43 | 0.64268 |
d50ea687cf7a0fba297f4d96cac331400c802d1e | 283 | module Yuriita
class DynamicFilter
attr_reader :qualifier
def initialize(qualifier:, &block)
@qualifier = qualifier
@block = block
end
def apply(relation, input)
block.call(relation, input)
end
private
attr_reader :block
end
end
| 14.894737 | 38 | 0.650177 |
876fac6bde45d00ae44520aaa4f5f924e5377948 | 1,655 | #
# Cookbook:: end_to_end
# Recipe:: macos
#
# Copyright:: Copyright (c) Chef Software Inc.
#
chef_sleep "2"
execute "sleep 1"
execute "sleep 1 second" do
command "sleep 1"
live_stream true
end
execute "sensitive sleep" do
command "sleep 1"
sensitive true
end
timezone "America/Los_Angeles"
include_recipe "ntp"
include_recipe "resolver"
users_manage "remove sysadmin" do
group_name "sysadmin"
group_id 2300
action [:remove]
end
users_manage "create sysadmin" do
group_name "sysadmin"
group_id 2300
action [:create]
end
ssh_known_hosts_entry "github.com"
include_recipe "::_chef_client_config"
include_recipe "::_chef_client_trusted_certificate"
chef_client_launchd "Every 30 mins Infra Client run" do
interval 30
action :enable
end
include_recipe "git"
# test various archive formats in the archive_file resource
%w{tourism.tar.gz tourism.tar.xz tourism.zip}.each do |archive|
cookbook_file File.join(Chef::Config[:file_cache_path], archive) do
source archive
end
archive_file archive do
path File.join(Chef::Config[:file_cache_path], archive)
extract_to File.join(Chef::Config[:file_cache_path], archive.tr(".", "_"))
end
end
osx_profile "Remove screensaver profile" do
identifier "com.company.screensaver"
action :remove
end
build_essential
launchd "io.chef.testing.fake" do
source "io.chef.testing.fake.plist"
action "enable"
end
homebrew_update "update" do
action :update
end
homebrew_package "vim"
homebrew_package "vim" do
action :purge
end
include_recipe "::_dmg_package"
include_recipe "::_macos_userdefaults"
include_recipe "::_ohai_hint"
include_recipe "::_openssl"
| 18.388889 | 78 | 0.758912 |
bf202ea5d668c4b136bdd7193d20754832d1f56a | 1,316 | # frozen-string-literal: true
#
class Roda
module RodaPlugins
# The drop_body plugin automatically drops the body and
# Content-Type/Content-Length headers from the response if
# the response status indicates that the response should
# not include a body (response statuses 100, 101, 102, 204,
# and 304). For response status 205, the body and Content-Type
# headers are dropped, but the Content-length header is set to
# '0' instead of being dropped.
module DropBody
module ResponseMethods
DROP_BODY_STATUSES = [100, 101, 102, 204, 205, 304].freeze
RodaPlugins.deprecate_constant(self, :DROP_BODY_STATUSES)
# If the response status indicates a body should not be
# returned, use an empty body and remove the Content-Length
# and Content-Type headers.
def finish
r = super
case r[0]
when 100, 101, 102, 204, 304
r[2] = EMPTY_ARRAY
h = r[1]
h.delete("Content-Length")
h.delete("Content-Type")
when 205
r[2] = EMPTY_ARRAY
h = r[1]
h["Content-Length"] = '0'
h.delete("Content-Type")
end
r
end
end
end
register_plugin(:drop_body, DropBody)
end
end
| 30.604651 | 67 | 0.601824 |
211f5b4b19a31c94b530231e32a284da29c598d9 | 258 | class CreateBuyRequests < ActiveRecord::Migration[5.0]
def change
create_table :buy_requests do |t|
t.string :netid
t.string :status
t.integer :show_id
t.timestamps
end
add_foreign_key :buy_requests, :shows
end
end
| 21.5 | 54 | 0.670543 |
6151a9ce1bc52136fbfcabbc2d9ec01878b10a2d | 5,035 | # frozen_string_literal: true
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "sigoe_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV['RAILS_LOG_TO_STDOUT'].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 43.782609 | 114 | 0.762661 |
f7fcdebfc2e45d92b12dd50e469a05bab8948dcb | 9,668 | require File.expand_path(File.dirname(__FILE__) + "/../test_helper")
class CronTest < Test::Unit::TestCase
context "When parsing time in minutes" do
should "raise if less than 1 minute" do
assert_raises ArgumentError do
parse_time(59.seconds)
end
assert_raises ArgumentError do
parse_time(0.minutes)
end
end
# For santity, do some tests on straight String
should "parse correctly" do
assert_equal '* * * * *', parse_time(1.minute)
assert_equal '0,5,10,15,20,25,30,35,40,45,50,55 * * * *', parse_time(5.minutes)
assert_equal '7,14,21,28,35,42,49,56 * * * *', parse_time(7.minutes)
assert_equal '0,30 * * * *', parse_time(30.minutes)
assert_equal '32 * * * *', parse_time(32.minutes)
assert_not_equal '60 * * * *', parse_time(60.minutes) # 60 minutes bumps up into the hour range
end
# Test all minutes
(2..59).each do |num|
should "parse correctly for #{num} minutes" do
start = 0
start += num unless 60.modulo(num).zero?
minutes = (start..59).step(num).to_a
assert_equal "#{minutes.join(',')} * * * *", parse_time(num.minutes)
end
end
end
context "When parsing time in hours" do
should "parse correctly" do
assert_equal '0 * * * *', parse_time(1.hour)
assert_equal '0 0,2,4,6,8,10,12,14,16,18,20,22 * * *', parse_time(2.hours)
assert_equal '0 0,3,6,9,12,15,18,21 * * *', parse_time(3.hours)
assert_equal '0 5,10,15,20 * * *', parse_time(5.hours)
assert_equal '0 17 * * *', parse_time(17.hours)
assert_not_equal '0 24 * * *', parse_time(24.hours) # 24 hours bumps up into the day range
end
(2..23).each do |num|
should "parse correctly for #{num} hours" do
start = 0
start += num unless 24.modulo(num).zero?
hours = (start..23).step(num).to_a
assert_equal "0 #{hours.join(',')} * * *", parse_time(num.hours)
end
end
should "parse correctly when given an 'at' with minutes as an Integer" do
assert_minutes_equals "1", 1
assert_minutes_equals "14", 14
assert_minutes_equals "27", 27
assert_minutes_equals "55", 55
end
should "parse correctly when given an 'at' with minutes as a Time" do
# Basically just testing that Chronic parses some times and we get the minutes out of it
assert_minutes_equals "1", '3:01am'
assert_minutes_equals "1", 'January 21 2:01 PM'
assert_minutes_equals "0", 'midnight'
assert_minutes_equals "59", '13:59'
end
end
context "When parsing time in days (of month)" do
should "parse correctly" do
assert_equal '0 0 * * *', parse_time(1.days)
assert_equal '0 0 1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31 * *', parse_time(2.days)
assert_equal '0 0 1,5,9,13,17,21,25,29 * *', parse_time(4.days)
assert_equal '0 0 1,8,15,22 * *', parse_time(7.days)
assert_equal '0 0 1,17 * *', parse_time(16.days)
assert_equal '0 0 17 * *', parse_time(17.days)
assert_equal '0 0 29 * *', parse_time(29.days)
assert_not_equal '0 0 30 * *', parse_time(30.days) # 30 days bumps into the month range
end
should "parse correctly when given an 'at' with hours, minutes as a Time" do
# first param is an array with [hours, minutes]
assert_hours_and_minutes_equals %w(3 45), '3:45am'
assert_hours_and_minutes_equals %w(20 1), '8:01pm'
assert_hours_and_minutes_equals %w(0 0), 'midnight'
assert_hours_and_minutes_equals %w(1 23), '1:23 AM'
assert_hours_and_minutes_equals %w(23 59), 'March 21 11:59 pM'
end
should "parse correctly when given an 'at' with hours as an Integer" do
# first param is an array with [hours, minutes]
assert_hours_and_minutes_equals %w(1 0), 1
assert_hours_and_minutes_equals %w(3 0), 3
assert_hours_and_minutes_equals %w(15 0), 15
assert_hours_and_minutes_equals %w(19 0), 19
assert_hours_and_minutes_equals %w(23 0), 23
end
end
context "When parsing time in months" do
should "parse correctly" do
assert_equal '0 0 1 * *', parse_time(1.month)
assert_equal '0 0 1 1,3,5,7,9,11 *', parse_time(2.months)
assert_equal '0 0 1 1,4,7,10 *', parse_time(3.months)
assert_equal '0 0 1 1,5,9 *', parse_time(4.months)
assert_equal '0 0 1 1,6 *', parse_time(5.months)
assert_equal '0 0 1 7 *', parse_time(7.months)
assert_equal '0 0 1 8 *', parse_time(8.months)
assert_equal '0 0 1 9 *', parse_time(9.months)
assert_equal '0 0 1 10 *', parse_time(10.months)
assert_equal '0 0 1 11 *', parse_time(11.months)
assert_equal '0 0 1 12 *', parse_time(12.months)
end
should "parse months with a date and/or time" do
# should set the day to 1 if no date is given
assert_equal '0 17 1 * *', parse_time(1.month, nil, "5pm")
# should use the date if one is given
assert_equal '0 2 23 * *', parse_time(1.month, nil, "February 23rd at 2am")
# should use an iteger as the day
assert_equal '0 0 5 * *', parse_time(1.month, nil, 5)
end
should "parse correctly when given an 'at' with days, hours, minutes as a Time" do
# first param is an array with [days, hours, minutes]
assert_days_and_hours_and_minutes_equals %w(1 3 45), 'January 1st 3:45am'
assert_days_and_hours_and_minutes_equals %w(11 23 0), 'Feb 11 11PM'
assert_days_and_hours_and_minutes_equals %w(22 1 1), 'march 22nd at 1:01 am'
assert_days_and_hours_and_minutes_equals %w(23 0 0), 'march 22nd at midnight' # looks like midnight means the next day
end
should "parse correctly when given an 'at' with days as an Integer" do
# first param is an array with [days, hours, minutes]
assert_days_and_hours_and_minutes_equals %w(1 0 0), 1
assert_days_and_hours_and_minutes_equals %w(15 0 0), 15
assert_days_and_hours_and_minutes_equals %w(29 0 0), 29
end
end
context "When parsing time in days (of week)" do
should "parse days of the week correctly" do
{
'0' => %w(sun Sunday SUNDAY SUN),
'1' => %w(mon Monday MONDAY MON),
'2' => %w(tue tues Tuesday TUESDAY TUE),
'3' => %w(wed Wednesday WEDNESDAY WED),
'4' => %w(thu thurs thur Thursday THURSDAY THU),
'5' => %w(fri Friday FRIDAY FRI),
'6' => %w(sat Saturday SATURDAY SAT)
}.each do |day, day_tests|
day_tests.each do |day_test|
assert_equal "0 0 * * #{day}", parse_time(day_test)
end
end
end
should "allow additional directives" do
assert_equal '30 13 * * 5', parse_time('friday', nil, "1:30 pm")
assert_equal '22 2 * * 1', parse_time('Monday', nil, "2:22am")
assert_equal '55 17 * * 4', parse_time('THU', nil, "5:55PM")
end
should "parse weekday correctly" do
assert_equal '0 0 * * 1-5', parse_time('weekday')
assert_equal '0 0 * * 1-5', parse_time('Weekdays')
assert_equal '0 1 * * 1-5', parse_time('Weekdays', nil, "1:00 am")
assert_equal '59 5 * * 1-5', parse_time('Weekdays', nil, "5:59 am")
end
should "parse weekend correctly" do
assert_equal '0 0 * * 6,0', parse_time('weekend')
assert_equal '0 0 * * 6,0', parse_time('Weekends')
assert_equal '0 7 * * 6,0', parse_time('Weekends', nil, "7am")
assert_equal '2 18 * * 6,0', parse_time('Weekends', nil, "6:02PM")
end
end
context "When parsing time using the cron shortcuts" do
should "parse a :symbol into the correct shortcut" do
assert_equal '@reboot', parse_time(:reboot)
assert_equal '@annually', parse_time(:annually)
assert_equal '@annually', parse_time(:yearly)
assert_equal '@daily', parse_time(:daily)
assert_equal '@midnight', parse_time(:midnight)
assert_equal '@monthly', parse_time(:monthly)
assert_equal '@weekly', parse_time(:weekly)
assert_equal '@hourly', parse_time(:hourly)
end
should "convert time-based shortcuts to times" do
assert_equal '0 0 1 * *', parse_time(:month)
assert_equal '0 0 * * *', parse_time(:day)
assert_equal '0 * * * *', parse_time(:hour)
assert_equal '0 0 1 12 *', parse_time(:year)
assert_equal '0 0 1,8,15,22 * *', parse_time(:week)
end
should "raise an exception if a valid shortcut is given but also an :at" do
assert_raises ArgumentError do
parse_time(:hourly, nil, "1:00 am")
end
assert_raises ArgumentError do
parse_time(:reboot, nil, 5)
end
assert_raises ArgumentError do
parse_time(:daily, nil, '4:20pm')
end
end
end
context "When given raw cron sytax" do
should "return the same cron sytax" do
crons = ['0 0 27-31 * *', '* * * * *', '2/3 1,9,22 11-26 1-6 *']
crons.each do |cron|
assert_equal cron, parse_time(cron)
end
end
end
private
def assert_days_and_hours_and_minutes_equals(expected, time)
cron = parse_time(2.months, 'some task', time)
minutes, hours, days, *garbage = cron.split(' ')
assert_equal expected, [days, hours, minutes]
end
def assert_hours_and_minutes_equals(expected, time)
cron = parse_time(2.days, 'some task', time)
minutes, hours, *garbage = cron.split(' ')
assert_equal expected, [hours, minutes]
end
def assert_minutes_equals(expected, time)
cron = parse_time(2.hours, 'some task', time)
assert_equal expected, cron.split(' ')[0]
end
def parse_time(time = nil, task = nil, at = nil)
Whenever::Output::Cron.new(time, task, at).time_in_cron_syntax
end
end | 38.672 | 124 | 0.635705 |
e800470964a6f63cbe07d5337eb30f84856690f0 | 351 | # encoding: UTF-8
require 'helper'
class TestFakerName < Test::Unit::TestCase
def setup
@tester = FFaker::Name
end
def test_name
assert @tester.name.match(/(\w+\.? ?){2,3}/)
end
def test_prefix
assert @tester.prefix.match(/[A-Z][a-z]+\.?/)
end
def test_suffix
assert @tester.suffix.match(/[A-Z][a-z]*\.?/)
end
end
| 15.954545 | 49 | 0.615385 |
e2cad3c055d9166d305ff9cf135291fdeeaa2ebf | 1,450 | # encoding: utf-8
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# /spec/fixtures/responses/whois.ripe.net/va/status_available.expected
#
# and regenerate the tests with the following rake task
#
# $ rake genspec:parsers
#
require 'spec_helper'
require 'whois/record/parser/whois.ripe.net.rb'
describe Whois::Record::Parser::WhoisRipeNet, "status_available.expected" do
before(:each) do
file = fixture("responses", "whois.ripe.net/va/status_available.txt")
part = Whois::Record::Part.new(:body => File.read(file))
@parser = klass.new(part)
end
context "#status" do
it do
@parser.status.should == :available
end
end
context "#available?" do
it do
@parser.available?.should == true
end
end
context "#registered?" do
it do
@parser.registered?.should == false
end
end
context "#created_on" do
it do
lambda { @parser.created_on }.should raise_error(Whois::PropertyNotSupported)
end
end
context "#updated_on" do
it do
lambda { @parser.updated_on }.should raise_error(Whois::PropertyNotSupported)
end
end
context "#expires_on" do
it do
lambda { @parser.expires_on }.should raise_error(Whois::PropertyNotSupported)
end
end
context "#nameservers" do
it do
@parser.nameservers.should be_a(Array)
@parser.nameservers.should == []
end
end
end
| 23.770492 | 83 | 0.68 |
08cffeef3cc8c39b6b944face216e4d2434d5412 | 1,714 | require "spec_helper"
RSpec.describe Scalingo::Auth::TwoFactorAuth do
describe_method "status" do
let(:stub_pattern) { "status" }
it_behaves_like "a singular object response"
end
describe_method "initiate" do
context "success" do
let(:arguments) { Scalingo::Auth::TwoFactorAuth::DEFAULT_PROVIDER }
let(:stub_pattern) { "initiate-success" }
it_behaves_like "a singular object response", 201
end
context "wrong provider" do
let(:arguments) { meta[:initiate][:invalid] }
let(:stub_pattern) { "initiate-wrong-provider" }
it_behaves_like "a client error"
end
context "already enabled" do
let(:stub_pattern) { "initiate-already-enabled" }
it_behaves_like "a client error"
end
end
describe_method "validate" do
context "success" do
let(:arguments) { meta[:validate][:valid] }
let(:stub_pattern) { "validate-success" }
let(:expected_keys) { %i[codes user] }
it_behaves_like "a singular object response", 201
end
context "wrong provider" do
let(:arguments) { meta[:validate][:invalid] }
let(:stub_pattern) { "validate-wrong" }
it_behaves_like "a client error"
end
context "already enabled" do
let(:arguments) { meta[:validate][:invalid] }
let(:stub_pattern) { "validate-not-initiated" }
it_behaves_like "a client error"
end
end
describe_method "disable" do
context "success" do
let(:stub_pattern) { "disable-success" }
it_behaves_like "a singular object response"
end
context "not enabled" do
let(:stub_pattern) { "disable-not-initiated" }
it_behaves_like "a client error"
end
end
end
| 24.485714 | 73 | 0.656359 |
382b5df7a181b0d4c69f692fbc03f0e0665efff2 | 735 | require File.expand_path('../../config/environment', __FILE__)
require 'rails/test_help'
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
# Add more helper methods to be used by all tests here...
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
require 'rails/test_help'
require "minitest/reporters"
Minitest::Reporters.use!
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests
# in alphabetical order.
fixtures :all
include ApplicationHelper
# Add more helper methods to be used by all tests here...
end
end
| 29.4 | 82 | 0.693878 |
2876baf0dbb73ea6e746a75eec3609e9b0bd9315 | 1,305 | require 'spec_helper'
describe OrderBid do
subject { create(:order_bid) }
its(:compute_locked) { should == subject.volume*subject.price }
context "compute locked for market order" do
let(:price_levels) do
[ ['100'.to_d, '10.0'.to_d],
['101'.to_d, '10.0'.to_d],
['102'.to_d, '10.0'.to_d],
['200'.to_d, '10.0'.to_d] ]
end
before do
global = Global.new('btccny')
global.stubs(:asks).returns(price_levels)
Global.stubs(:[]).returns(global)
end
it "should require a little" do
OrderBid.new(volume: '5'.to_d, ord_type: 'market').compute_locked.should == '500'.to_d * OrderBid::LOCKING_BUFFER_FACTOR
end
it "should require more" do
OrderBid.new(volume: '25'.to_d, ord_type: 'market').compute_locked.should == '2520'.to_d * OrderBid::LOCKING_BUFFER_FACTOR
end
it "should raise error if the market is not deep enough" do
expect { OrderBid.new(volume: '50'.to_d, ord_type: 'market').compute_locked }.to raise_error
end
it "should raise error if volume is too large" do
expect { OrderBid.new(volume: '30'.to_d, ord_type: 'market').compute_locked }.not_to raise_error
expect { OrderBid.new(volume: '31'.to_d, ord_type: 'market').compute_locked }.to raise_error
end
end
end
| 31.071429 | 128 | 0.655172 |
18bf77e1645818ce63333ad54cc355b949746089 | 707 | describe "/lines/new", :type => :view do
let(:context) { Chouette.create { line_provider } }
let!(:workbench) { assign :workbench, context.workbench }
let!(:line_referential) { assign :line_referential, context.line_referential }
let!(:line) { assign :line, context.line_provider.lines.build(name: 'Test') }
describe "form" do
before :each do
allow(view).to receive(:resource_class).and_return(Chouette::Line)
allow(view).to receive(:referential).and_return(line_referential)
end
it "should render input for name" do
render
expect(rendered).to have_selector("form") do
with_selector "input[type=text][name=?]", line.name
end
end
end
end
| 30.73913 | 80 | 0.681754 |
b917974f8bfeda42327e9b1034b7291d9f28fe1c | 1,992 | class Comment4sController < ApplicationController
before_action :set_comment4, only: [:show, :edit, :update, :destroy]
# GET /comment4s
# GET /comment4s.json
def index
@comment4s = Comment4.all
end
# GET /comment4s/1
# GET /comment4s/1.json
def show
end
# GET /comment4s/new
def new
@comment4 = Comment4.new
end
# GET /comment4s/1/edit
def edit
end
# POST /comment4s
# POST /comment4s.json
def create
@comment4 = Comment4.new(comment4_params)
respond_to do |format|
if @comment4.save
format.html { redirect_to @comment4, notice: 'Comment4 was successfully created.' }
format.json { render :show, status: :created, location: @comment4 }
else
format.html { render :new }
format.json { render json: @comment4.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /comment4s/1
# PATCH/PUT /comment4s/1.json
def update
respond_to do |format|
if @comment4.update(comment4_params)
format.html { redirect_to @comment4, notice: 'Comment4 was successfully updated.' }
format.json { render :show, status: :ok, location: @comment4 }
else
format.html { render :edit }
format.json { render json: @comment4.errors, status: :unprocessable_entity }
end
end
end
# DELETE /comment4s/1
# DELETE /comment4s/1.json
def destroy
@comment4.destroy
respond_to do |format|
format.html { redirect_to comment4s_url, notice: 'Comment4 was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_comment4
@comment4 = Comment4.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def comment4_params
params.require(:comment4).permit(:content, :post_id, :something, :somethingelse, :something, :more)
end
end
| 26.56 | 105 | 0.669679 |
ed7f3f8e0c128555a6192a1ad5900bbdeaf4145d | 359 | cask "google-featured-photos" do
version "1.0.0.208"
sha256 "4d4abc378b38278b0cd247e99a008c730a3b6b77824437b01db0bf8beaf24bfb"
url "https://dl.google.com/featuredphotosscreensaver/GoogleFeaturedPhotos-#{version}.dmg"
name "Google Featured Photos"
homepage "https://plus.google.com/featuredphotos"
screen_saver "Google Featured Photos.saver"
end
| 32.636364 | 91 | 0.799443 |
bb03b2b1eae22631de8bc3b2d5e16965193f425f | 117 | class UnitCategory < ActiveRecord::Base
has_many :units, :dependent => :destroy
validates_presence_of :name
end
| 19.5 | 41 | 0.769231 |
2183a5ad795b1a395abeafd01495b0a6fbb7e153 | 2,178 | require 'rails_helper'
feature "User Sign In" do
extend SubdomainHelpers
let!(:account) {FactoryGirl.create(:account)}
let!(:sign_in_url) {"http://#{account.subdomain}.example.com/sign_in"}
let!(:root_url) {"http://#{account.subdomain}.example.com/"}
within_account_subdomain do
scenario "signs in as an account owner successfully" do
visit root_url
expect(page.current_url).to eq(sign_in_url)
fill_in "Email", with: account.owner.email
fill_in "Password", with: "password"
click_button "Sign In"
expect(page).to have_content("You are now signed in.")
expect(page.current_url).to eq(root_url)
end
scenario "attempts sign in with an invalid password and fails" do
visit subscribem.root_url(:subdomain => account.subdomain)
expect(page.current_url).to eq(sign_in_url)
expect(page).to have_content("Please sign in.")
fill_in "Email", with: account.owner.email
fill_in "Password", with: "drowssap"
click_button "Sign In"
expect(page).to have_content("Invalid email or password")
expect(page.current_url).to eq(sign_in_url)
end
scenario "attempts sign in with an invalid email address and fails" do
visit subscribem.root_url(:subdomain => account.subdomain)
expect(page.current_url).to eq(sign_in_url)
expect(page).to have_content("Please sign in.")
fill_in "Email", with: "[email protected]"
fill_in "Password", with: "password"
click_button "Sign In"
expect(page).to have_content("Invalid email or password")
expect(page.current_url).to eq(sign_in_url)
end
scenario "cant sign in if not a part of this subdomain" do
other_account = FactoryGirl.create(:account)
visit subscribem.root_url(:subdomain => account.subdomain)
expect(page.current_url).to eq(sign_in_url)
expect(page).to have_content("Please sign in.")
fill_in "Email", with: other_account.owner.email
fill_in "Password", with: "password"
click_button "Sign In"
expect(page).to have_content("Invalid email or password")
expect(page.current_url).to eq(sign_in_url)
end
end
end
| 35.704918 | 74 | 0.693756 |
d5d9074f40ab145355aa347bf8723cd9655a8a4c | 439 | # == Schema Information
#
# Table name: users
#
# id :bigint(8) not null, primary key
# name :text
# email :text
# password_digest :text
# admin :boolean
# created_at :datetime not null
# updated_at :datetime not null
#
class User < ApplicationRecord
has_secure_password
validates :email, :presence => true, :uniqueness => true
has_many :likes
end
| 23.105263 | 58 | 0.583144 |
2180b7290909cc95ba42ece78bc7b0f248989262 | 14,038 | describe HttpStub::Server::Application::Routes::Stub, "when a server is running" do
include_context "server integration"
let(:response_document) { Nokogiri::HTML(response.body) }
describe "that has multiple scenarios configured" do
let(:configurator) { HttpStub::Examples::ConfiguratorWithExhaustiveScenarios }
describe "GET /http_stub/stubs" do
describe "when multiple stubs are configured" do
before(:example) do
(1..3).each do |i|
HTTParty.post("#{server_uri}/http_stub/scenarios/activate", :body => { name: "Scenario #{i}" })
end
end
let(:response) { HTTParty.get("#{server_uri}/http_stub/stubs") }
it "returns a 200 response code" do
expect(response.code).to eql(200)
end
it "returns a response whose body contains the uri of each stub" do
(1..3).each do |stub_number|
expect(response.body).to match(/#{escape_html("/path_#{stub_number}")}/)
end
end
it "returns a response whose body contains the uri of each stub trigger" do
(1..3).each do |stub_number|
(1..3).each do |trigger_number|
expect(response.body).to match(/#{escape_html("/path_#{stub_number}_trigger_#{trigger_number}")}/)
end
end
end
it "returns a response whose body contains the request headers of each stub" do
(1..3).each do |stub_number|
expect(response.body).to match(/request_header_#{stub_number}:request_header_value_#{stub_number}/)
end
end
it "returns a response whose body contains the request headers of each stub trigger" do
(1..3).each do |stub_number|
(1..3).each do |trigger_number|
expected_header_key = "request_header_#{stub_number}_trigger_#{trigger_number}"
expected_header_value = "request_header_value_#{stub_number}_trigger_#{trigger_number}"
expect(response.body).to match(/#{expected_header_key}:#{expected_header_value}/)
end
end
end
it "returns a response whose body contains the parameters of each stub" do
(1..3).each do |stub_number|
expect(response.body).to match(/parameter_#{stub_number}=parameter_value_#{stub_number}/)
end
end
it "returns a response whose body contains the parameters of each stub trigger" do
(1..3).each do |stub_number|
(1..3).each do |trigger_number|
expected_parameter_key = "parameter_#{stub_number}_trigger_#{trigger_number}"
expected_parameter_value = "parameter_value_#{stub_number}_trigger_#{trigger_number}"
expect(response.body).to match(/#{expected_parameter_key}=#{expected_parameter_value}/)
end
end
end
it "returns a response whose body contains the bodies of each stub" do
(1..3).each do |stub_number|
expect(response.body).to(
match(/#{escape_html("\"property_#{stub_number}\":{\"type\":\"property_#{stub_number}_type\"")}/)
)
end
end
it "returns a response whose body contains the bodies of each stub trigger" do
(1..3).each do |stub_number|
(1..3).each do |trigger_number|
expected_property_name = "property_#{stub_number}_trigger_#{trigger_number}"
expected_property_type = "property_#{stub_number}_trigger_#{trigger_number}_type"
expect(response.body).to(
match(/#{escape_html("\"#{expected_property_name}\":{\"type\":\"#{expected_property_type}\"")}/)
)
end
end
end
it "returns a response whose body contains the response status of each stub" do
(1..3).each { |stub_number| expect(response.body).to match(/20#{stub_number}/) }
end
it "returns a response whose body contains the response status of each stub trigger" do
(1..3).each do |stub_number|
(1..3).each do |trigger_number|
expect(response.body).to match(/30#{stub_number * trigger_number}/)
end
end
end
it "returns a response whose body contains the response headers of each stub" do
(1..3).each do |stub_number|
expected_header_key = "response_header_#{stub_number}"
expected_header_value = "response_header_value_#{stub_number}"
expect(response.body).to match(/#{expected_header_key}:#{expected_header_value}/)
end
end
it "returns a response whose body contains the response headers of each stub trigger" do
(1..3).each do |stub_number|
(1..3).each do |trigger_number|
expected_header_key = "response_header_#{stub_number}_trigger_#{trigger_number}"
expected_header_value = "response_header_value_#{stub_number}_trigger_#{trigger_number}"
expect(response.body).to match(/#{expected_header_key}:#{expected_header_value}/)
end
end
end
it "returns a response whose body contains the response body of stub returning JSON" do
expect(response.body).to match(/#{encode_whitespace(JSON.pretty_generate({ key: "JSON body" }))}/)
end
it "returns a response whose body contains the response body of stub returning HTML" do
expect(response.body).to match(/#{encode_whitespace("<html><body>HTML body</body></html>")}/)
end
it "returns a response whose body contains the response body of a stub returning a file" do
file_link = response_document.css("a.file").first
expect(file_link["href"]).to match(/^file:\/\/[^']+\.pdf$/)
end
it "returns a response whose body contains the response body of each stub trigger" do
(1..3).each do |stub_number|
(1..3).each do |trigger_number|
expect(response.body).to match(/Body of scenario stub #{stub_number}_trigger_#{trigger_number}/)
end
end
end
it "returns a response whose body contains the response delay of each stub" do
(1..3).each { |stub_number| expect(response.body).to include("#{8 * stub_number}") }
end
it "returns a response whose body contains the response delay of each stub trigger" do
(1..3).each do |stub_number|
(1..3).each do |trigger_number|
expect(response.body).to include("#{3 * stub_number * trigger_number}")
end
end
end
end
end
describe "GET /http_stub/scenarios" do
let(:response) { HTTParty.get("#{server_uri}/http_stub/scenarios") }
let(:transactional_session_parameter) { "http_stub_session_id=#{transactional_session_id}" }
it "returns a response whose body contains the name of each scenario in alphabetical order" do
expected_scenario_names = [ "Nested scenario", "Scenario" ].map do |scenario_name_prefix|
(1..3).map { |i| "#{scenario_name_prefix} #{i}" }
end.flatten
scenario_names = response_document.css(".scenario_name").map { |element| element.text }
expect(scenario_names).to eql(expected_scenario_names)
end
it "returns a response whose body contains links to activate each scenario in the transactional session" do
expected_activation_links = [ "/http_stub/scenarios/activate?#{transactional_session_parameter}" ] * 6
activation_links = response_document.css("a.activate_scenario").map { |link| link["href"] }
expect(activation_links).to eql(expected_activation_links)
end
it "returns a response whose body contains links to the details of each scenario for the default session" do
expected_detail_links = %w{ Nested+scenario Scenario }.map do |scenario_name_prefix|
(1..3).map { |i| "/http_stub/scenarios?name=#{scenario_name_prefix}+#{i}&#{transactional_session_parameter}" }
end.flatten
detail_links = response_document.css("a.view_scenario").map { |link| link["href"] }
expect(detail_links).to eql(expected_detail_links)
end
end
describe "GET /http/scenario?name" do
(1..3).each do |stub_number|
scenario_name = "Scenario #{stub_number}"
context "when the scenario with the name '#{scenario_name}' is requested" do
let(:response) do
HTTParty.get("#{server_uri}/http_stub/scenarios?#{URI.encode_www_form(:name => scenario_name)}")
end
it "should have a detail page for the scenario" do
expect(response.code).to eql(200)
end
it "returns a response whose body contains the uri of the scenario" do
expect(response.body).to match(/#{escape_html("/path_#{stub_number}")}/)
end
it "returns a response whose body contains the uri of each stub trigger" do
(1..3).each do |trigger_number|
expect(response.body).to match(/#{escape_html("/path_#{stub_number}_trigger_#{trigger_number}")}/)
end
end
it "returns a response whose body contains the request headers of each stub" do
expect(response.body).to match(/request_header_#{stub_number}:request_header_value_#{stub_number}/)
end
it "returns a response whose body contains the request headers of each stub trigger" do
(1..3).each do |trigger_number|
expected_header_key = "request_header_#{stub_number}_trigger_#{trigger_number}"
expected_header_value = "request_header_value_#{stub_number}_trigger_#{trigger_number}"
expect(response.body).to match(/#{expected_header_key}:#{expected_header_value}/)
end
end
it "returns a response whose body contains the parameters of each stub" do
expect(response.body).to match(/parameter_#{stub_number}=parameter_value_#{stub_number}/)
end
it "returns a response whose body contains the parameters of each stub trigger" do
(1..3).each do |trigger_number|
expected_parameter_key = "parameter_#{stub_number}_trigger_#{trigger_number}"
expected_parameter_value = "parameter_value_#{stub_number}_trigger_#{trigger_number}"
expect(response.body).to match(/#{expected_parameter_key}=#{expected_parameter_value}/)
end
end
it "returns a response whose body contains the bodies of each stub" do
expect(response.body).to(
match(/#{escape_html("\"property_#{stub_number}\":{\"type\":\"property_#{stub_number}_type\"")}/)
)
end
it "returns a response whose body contains the bodies of each stub trigger" do
(1..3).each do |trigger_number|
expected_property_name = "property_#{stub_number}_trigger_#{trigger_number}"
expected_property_type = "property_#{stub_number}_trigger_#{trigger_number}_type"
expect(response.body).to(
match(/#{escape_html("\"#{expected_property_name}\":{\"type\":\"#{expected_property_type}\"")}/)
)
end
end
it "returns a response whose body contains the response status of each stub" do
expect(response.body).to match(/20#{stub_number}/)
end
it "returns a response whose body contains the response status of each stub trigger" do
(1..3).each do |trigger_number|
expect(response.body).to match(/30#{stub_number * trigger_number}/)
end
end
it "returns a response whose body contains the response headers of each stub" do
expected_header_key = "response_header_#{stub_number}"
expected_header_value = "response_header_value_#{stub_number}"
expect(response.body).to match(/#{expected_header_key}:#{expected_header_value}/)
end
it "returns a response whose body contains the response headers of each stub trigger" do
(1..3).each do |trigger_number|
expected_header_key = "response_header_#{stub_number}_trigger_#{trigger_number}"
expected_header_value = "response_header_value_#{stub_number}_trigger_#{trigger_number}"
expect(response.body).to match(/#{expected_header_key}:#{expected_header_value}/)
end
end
it "returns a response whose body supports JSON responses" do
expect(response.body).to match(/#{encode_whitespace(JSON.pretty_generate({ "key" => "JSON body" }))}/)
end if stub_number == 1
it "returns a response whose body supports HTML responses" do
expect(response.body).to match(/#{encode_whitespace("<html><body>HTML body</body></html>")}/)
end if stub_number == 2
it "returns a response whose body supports file responses" do
file_link = response_document.css("a.file").first
expect(file_link["href"]).to match(/^file:\/\/[^']+\.pdf$/)
end if stub_number == 3
it "returns a response whose body contains the response body of each stub trigger" do
(1..3).each do |trigger_number|
expect(response.body).to match(/Body of scenario stub #{stub_number}_trigger_#{trigger_number}/)
end
end
it "returns a response whose body contains the response delay of each stub" do
expect(response.body).to include("#{8 * stub_number}")
end
it "returns a response whose body contains the response delay of each stub trigger" do
(1..3).each do |trigger_number|
expect(response.body).to include("#{3 * stub_number * trigger_number}")
end
end
end
end
end
end
end
| 43.4613 | 120 | 0.628793 |
b965f72ebe267c42d1e539180f324a7108107be9 | 2,089 | describe :fiber_resume, :shared => :true do
it "can be invoked from the root Fiber" do
fiber = Fiber.new { :fiber }
fiber.send(@method).should == :fiber
end
it "passes control to the beginning of the block on first invocation" do
invoked = false
fiber = Fiber.new { invoked = true }
fiber.send(@method)
invoked.should be_true
end
it "returns the last value encountered on first invocation" do
fiber = Fiber.new { false; true }
fiber.send(@method).should be_true
end
it "runs until the end of the block" do
obj = mock('obj')
obj.should_receive(:do).once
fiber = Fiber.new { 1 + 2; a = "glark"; obj.do }
fiber.send(@method)
end
it "runs until Fiber.yield" do
obj = mock('obj')
obj.should_not_receive(:do)
fiber = Fiber.new { 1 + 2; Fiber.yield; obj.do }
fiber.send(@method)
end
it "resumes from the last call to Fiber.yield on subsequent invocations" do
fiber = Fiber.new { Fiber.yield :first; :second }
fiber.send(@method).should == :first
fiber.send(@method).should == :second
end
it "accepts any number of arguments" do
fiber = Fiber.new { |a| }
lambda { fiber.send(@method, *(1..10).to_a) }.should_not raise_error
end
it "sets the block parameters to its arguments on the first invocation" do
first = mock('first')
first.should_receive(:arg).with(:first).twice
fiber = Fiber.new { |arg| first.arg arg; Fiber.yield; first.arg arg; }
fiber.send(@method, :first)
fiber.send(@method, :second)
end
it "raises a FiberError if the Fiber is dead" do
fiber = Fiber.new { true }
fiber.send(@method)
lambda { fiber.send(@method) }.should raise_error(FiberError)
end
it "raises a LocalJumpError if the block includes a return statement" do
fiber = Fiber.new { return; }
lambda { fiber.send(@method) }.should raise_error(LocalJumpError)
end
it "raises a LocalJumpError if the block includes a break statement" do
fiber = Fiber.new { break; }
lambda { fiber.send(@method) }.should raise_error(LocalJumpError)
end
end
| 30.720588 | 77 | 0.667305 |
edcf54e67bb71f10202342a8cb54f1d4ac550d1e | 229 | # Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :address do
street "MyString"
street2 "MyString"
city "MyString"
state "MyString"
zip "MyString"
end
end
| 19.083333 | 68 | 0.707424 |
261ea2da1d00eb1d287fa8f6264196267f94aea1 | 582 | class Tasks
def initialize(communicator)
@communicator=communicator
end
def start(taskid)
@communicator.do_post "/deployit/task/#{taskid}/start", ''
end
def get(taskid)
TaskInfo.new @communicator.do_get "/deployit/task/#{taskid}"
end
def start_and_wait(taskid)
start taskid
while not [:EXECUTED, :STOPPED].include? get(taskid).state
sleep 1
end
end
def cancel (taskid)
@communicator.do_delete "/deployit/task/#{taskid}"
end
def archive(taskid)
@communicator.do_post "/deployit/task/#{taskid}/archive", ''
end
end
| 18.774194 | 64 | 0.680412 |
62af05b0ee2c13564ae3bc948821e1548e7ccb49 | 163 | require 'spec_helper'
RSpec.describe ScenicSqlserverAdapter do
it "has a version number" do
expect(ScenicSqlserverAdapter::VERSION).not_to be nil
end
end
| 20.375 | 57 | 0.785276 |
e9d6ffb4df63ae886f50e14b176ddb63501ad276 | 2,333 | # frozen_string_literal: true
require 'slack/version'
require 'slack/field'
require 'slack/message'
require 'slack/attachment'
require 'json'
require 'faraday'
module Slack
class Poster
attr_accessor :options
# Define getters and setters for the options hash keys. This will make assign of the options
# more flexible.
%i[username channel icon_url icon_emoji].each do |option_attr|
define_method(option_attr) { @options[option_attr] }
define_method("#{option_attr}=") { |value| @options[option_attr] = value }
end
# Initializes a Poster instance to post messages with an incoming webhook URL.
# It also accepts an options hash. If no options are given then the poster uses the default
# configuration from Slack integration.
#
# ==== Examples
#
# # Without options
# Slack::Poster.new('https://hooks.slack.com/services/T044G6VBA//TCIzZQQd7IKhQzCKc6W310va')
#
# # With options using a custom username and icon avatar
# Slack::Poster.new('https://hooks.slack.com/services/T044G6VBA//TCIzZQQd7IKhQzCKc6W310va',
# username: 'Ricardo',
# icon_url: 'http://www.gravatar.com/avatar/92e00fd27c64c94d04140cef88039468.png')
#
# # You can also use an emoji as avatar
# Slack::Poster.new('https://hooks.slack.com/services/T044G6VBA//TCIzZQQd7IKhQzCKc6W310va',
# username: 'Ricardo',
# icon_emoji: 'ghost')
def initialize(webhook_url, options = {})
@base_uri = webhook_url
@options = options
raise ArgumentError, 'Webhook URL is required' if webhook_url.nil?
end
# Sends a message to Slack. The message can be either plain text or a Slack::Message object.
#
# ==== Examples
#
# # Plain text message
# poster.send_message('hello world')
#
# # Using a message object
# poster.send_message(Slack::Message.new(text: 'hello world'))
#
# You can have messages with attachments if you build your message with a Slack::Message object
# and add Slack::Attachment objects.
def send_message(message)
body = message.is_a?(String) ? options.merge(text: message) : options.merge(message.as_json)
conn = Faraday.new(url: @base_uri)
response = conn.post('', payload: body.to_json)
response
end
end
end
| 33.328571 | 99 | 0.67724 |
0382de83a55352c32f5ed88286e98f7f51dc8e5e | 1,032 | # A filled-in form on a {#web_site}.
class Mdm::WebForm < ApplicationRecord
#
# Associations
#
# {Mdm::WebSite Web site} on which this form is.
belongs_to :web_site,
class_name: 'Mdm::WebSite',
inverse_of: :web_forms
#
# Attributes
#
# @!attribute created_at
# When this web form was created.
#
# @return [DateTime]
# @!attribute method
# HTTP method (or verb) used to submitted this form, such as GET or POST.
#
# @return [String]
# @!attribute path
# Path portion of URL to which this form was submitted.
#
# @return [String]
# @!attribute query
# URL query that submitted for this form.
#
# @return [String]
# @!attribute updated_at
# The last time this web form was updated.
#
# @return [DateTime]
#
# Serializations
#
# Parameters submitted in this form.
#
# @return [Array<Array(String, String)>>]
serialize :params, MetasploitDataModels::Base64Serializer.new
Metasploit::Concern.run(self)
end
| 19.111111 | 77 | 0.630814 |
03a868a4906cd66a48bdf36535120f45c0b50b0a | 435 | default['firewall']['allow_ssh'] = true
default['awesome_customers_delivery']['open_ports'] = [80, 443]
default['awesome_customers_delivery']['user'] = 'web_admin'
default['awesome_customers_delivery']['group'] = 'web_admin'
default['awesome_customers_delivery']['document_root'] = '/var/www/customers/public_html'
default['awesome_customers_delivery']['content_files'] = %w(customer.php index.php styles.css vis.js world-110m.json)
| 48.333333 | 117 | 0.767816 |
21ed2d42bfb1a8c0f19ace3050cccdada755cc57 | 262 | class Prime::Accounts::Cosmos < Prime::Account
def details
@details ||= ::Cosmos::AccountDecorator.new(::Cosmos::Chain.find_by(slug: network.primary.slug), address)
end
def rewards
@rewards ||= network.primary.client.prime_rewards(self)
end
end
| 26.2 | 109 | 0.717557 |
e90f983b49c2ddff04d9e29d3b302c69ffa7ae89 | 893 | json.total_count @end_user_used_deals.total_count
json.deals @end_user_used_deals do |end_user_deal|
deal = end_user_deal.deal
json.id end_user_deal.deal_id
json.store_name deal.store.name
json.title deal.title
json.is_expired deal.expired?
json.booking_code end_user_deal.booking_code.coupon_code
json.booking_code_id end_user_deal.booking_code_id
json.booked_date ("Expires On: " + "#{end_user_deal.deal.expiry_date.strftime('%a, %d %b %Y')}") rescue nil
json.expiry_date end_user_deal.deal.expiry_date.strftime('%a, %d %b %Y') rescue nil
json.deal_type deal.deal_type # NOTE : Changed deal_type_to_s to deal_type
if end_user_deal.transaction_detail.present?
json.paid_amount end_user_deal.transaction_detail.total_amount.to_s
json.total_quantity end_user_deal.transaction_detail.quantity
end
json.redeem_status end_user_deal.status
end
json.status status
| 42.52381 | 109 | 0.806271 |
914186da23eb1c5b8ff0b8b722677fe4801468e5 | 4,772 | # Copyright (c) 2008 [Sur http://expressica.com]
module SimpleCaptcha #:nodoc
module ViewHelpers #:nodoc
include ConfigTasks
# Simple Captcha is a very simplified captcha.
#
# It can be used as a *Model* or a *Controller* based Captcha depending on what options
# we are passing to the method show_simple_captcha.
#
# *show_simple_captcha* method will return the image, the label and the text box.
# This method should be called from the view within your form as...
#
# <%= show_simple_captcha %>
#
# The available options to pass to this method are
# * label
# * image_syle
# * object
# * distortion
#
# <b>Label:</b>
#
# default label is "type the text from the image", it can be modified by passing :label as
#
# <%= show_simple_captcha(:label => "new captcha label") %>.
#
# <b>Image Style:</b>
#
# There are eight different styles of images available as...
# * embosed_silver
# * simply_red
# * simply_green
# * simply_blue
# * distorted_black
# * all_black
# * charcoal_grey
# * almost_invisible
#
# The default image is simply_blue and can be modified by passing any of the above style as...
#
# <%= show_simple_captcha(:image_style => "simply_red") %>
#
# The images can also be selected randomly by using *random* in the image_style as
#
# <%= show_simple_captcha(:image_style => "random") %>
#
# *Object*
#
# This option is needed to create a model based captcha.
# If this option is not provided, the captcha will be controller based and
# should be checked in controller's action just by calling the method simple_captcha_valid?
#
# To make a model based captcha give this option as...
#
# <%= show_simple_captcha(:object => "user") %>
# and also call the method apply_simple_captcha in the model
# this will consider "user" as the object of the model class.
#
# *Examples*
# * controller based
# <%= show_simple_captcha(:image_style => "embosed_silver", :label => "Human Authentication: type the text from image above") %>
# * model based
# <%= show_simple_captcha(:object => "person", :image_style => "simply_blue", :label => "Human Authentication: type the text from image above") %>
#
# Find more detailed examples with sample images here on my blog http://EXPRESSICA.com
#
# All Feedbacks/CommentS/Issues/Queries are welcome.
def show_simple_captcha(options={})
options[:field_value] = set_simple_captcha_data(options[:code_type])
@simple_captcha_options =
{:image => simple_captcha_image(options),
:label => options[:label] || "(type the code from the image)",
:field => simple_captcha_field(options)}
render :partial => 'simple_captcha/simple_captcha'
end
def show_simple_captcha_for(options={})
options[:field_value] ||= set_simple_captcha_data(options[:code_type])
@simple_captcha_options =
{:image => simple_captcha_image(options),
:label => options[:label] || "(type the code from the image)",
:field_for => simple_captcha_field_for(options)}
render :partial => 'simple_captcha/simple_captcha'
end
private
def simple_captcha_image(options={})
url =
simple_captcha_url(
:action => 'simple_captcha',
:simple_captcha_key => simple_captcha_key,
:image_style => options[:image_style] || '',
:distortion => options[:distortion] || '',
:time => Time.now.to_i)
"<img src='#{url}' alt='simple_captcha.jpg' />"
end
def simple_captcha_field(options={})
options[:object] ?
text_field(options[:object], :captcha, :value => '') +
hidden_field(options[:object], :captcha_key, {:value => options[:field_value]}) :
text_field_tag(:captcha)
end
def simple_captcha_field_for(options={})
options[:object] ?
options[:object].text_field(:captcha, :value => '') +
options[:object].hidden_field(:captcha_key, {:value => options[:field_value]}) :
options[:object].text_field_tag(:captcha)
end
def set_simple_captcha_data(code_type)
key, value = simple_captcha_key, generate_simple_captcha_data(code_type)
data = SimpleCaptchaData.get_data(key)
data.value = value
data.save
key
end
def generate_simple_captcha_data(code)
value = ''
case code
when 'numeric'
6.times{value << (48 + rand(10)).chr}
else
6.times{value << (65 + rand(26)).chr}
end
return value
end
end
end
ActionView::Base.module_eval do
include SimpleCaptcha::ViewHelpers
end
| 33.843972 | 150 | 0.639983 |
6a2f8cbe25faedc0741471066b4bab1309da6d4e | 155 | require 'test_helper'
module DanvanthiriCore
class CityTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
end
| 15.5 | 42 | 0.683871 |
d54468efa4f72d769ef5e621442198f1d9214d67 | 5,427 | # The MIT License (MIT)
#
# Copyright (c) 2021 Losant IoT, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
require "json"
module LosantRest
# Class containing all the actions for the Experience Slugs Resource
class ExperienceSlugs
def initialize(client)
@client = client
end
# Returns the experience slugs for an application
#
# Authentication:
# The client must be configured with a valid api
# access token to call this action. The token
# must include at least one of the following scopes:
# all.Application, all.Application.cli, all.Application.read, all.Organization, all.Organization.read, all.User, all.User.cli, all.User.read, experienceSlugs.*, or experienceSlugs.get.
#
# Parameters:
# * {string} applicationId - ID associated with the application
# * {string} losantdomain - Domain scope of request (rarely needed)
# * {boolean} _actions - Return resource actions in response
# * {boolean} _links - Return resource link in response
# * {boolean} _embedded - Return embedded resources in response
#
# Responses:
# * 200 - Collection of experience slugs (https://api.losant.com/#/definitions/experienceSlugs)
#
# Errors:
# * 400 - Error if malformed request (https://api.losant.com/#/definitions/error)
# * 404 - Error if application was not found (https://api.losant.com/#/definitions/error)
def get(params = {})
params = Utils.symbolize_hash_keys(params)
query_params = { _actions: false, _links: true, _embedded: true }
headers = {}
body = nil
raise ArgumentError.new("applicationId is required") unless params.has_key?(:applicationId)
headers[:losantdomain] = params[:losantdomain] if params.has_key?(:losantdomain)
query_params[:_actions] = params[:_actions] if params.has_key?(:_actions)
query_params[:_links] = params[:_links] if params.has_key?(:_links)
query_params[:_embedded] = params[:_embedded] if params.has_key?(:_embedded)
path = "/applications/#{params[:applicationId]}/experience/slugs"
@client.request(
method: :get,
path: path,
query: query_params,
headers: headers,
body: body)
end
# Create a new experience slug for an application
#
# Authentication:
# The client must be configured with a valid api
# access token to call this action. The token
# must include at least one of the following scopes:
# all.Application, all.Organization, all.User, experienceSlugs.*, or experienceSlugs.post.
#
# Parameters:
# * {string} applicationId - ID associated with the application
# * {hash} experienceSlug - New experience slug information (https://api.losant.com/#/definitions/experienceSlugPost)
# * {string} losantdomain - Domain scope of request (rarely needed)
# * {boolean} _actions - Return resource actions in response
# * {boolean} _links - Return resource link in response
# * {boolean} _embedded - Return embedded resources in response
#
# Responses:
# * 201 - Successfully created experience slug (https://api.losant.com/#/definitions/experienceSlug)
#
# Errors:
# * 400 - Error if malformed request (https://api.losant.com/#/definitions/error)
# * 404 - Error if application was not found (https://api.losant.com/#/definitions/error)
def post(params = {})
params = Utils.symbolize_hash_keys(params)
query_params = { _actions: false, _links: true, _embedded: true }
headers = {}
body = nil
raise ArgumentError.new("applicationId is required") unless params.has_key?(:applicationId)
raise ArgumentError.new("experienceSlug is required") unless params.has_key?(:experienceSlug)
body = params[:experienceSlug] if params.has_key?(:experienceSlug)
headers[:losantdomain] = params[:losantdomain] if params.has_key?(:losantdomain)
query_params[:_actions] = params[:_actions] if params.has_key?(:_actions)
query_params[:_links] = params[:_links] if params.has_key?(:_links)
query_params[:_embedded] = params[:_embedded] if params.has_key?(:_embedded)
path = "/applications/#{params[:applicationId]}/experience/slugs"
@client.request(
method: :post,
path: path,
query: query_params,
headers: headers,
body: body)
end
end
end
| 42.732283 | 188 | 0.698544 |
ab339e8d2977eab23e7cd39c563cf40a1dba2960 | 3,340 | # frozen_string_literal: true
class Release < ActiveRecord::Base
NUMBER_REGEX = /\A#{Samson::RELEASE_NUMBER}\z/.freeze
VERSION_REGEX = /\Av(#{Samson::RELEASE_NUMBER})\z/.freeze
belongs_to :project, touch: true, inverse_of: :releases
belongs_to :author, class_name: "User", inverse_of: nil
before_validation :assign_release_number
before_validation :covert_ref_to_sha
validates :number, format: {with: NUMBER_REGEX, message: "may only contain numbers and decimals."}
validates :commit, format: {with: Build::SHA1_REGEX, message: "can only be a full sha"}, on: :create
# DEFAULT_RELEASE_NUMBER is the default value assigned to release#number by the database.
# This constant is here for convenience - the value that the database uses is in db/schema.rb.
DEFAULT_RELEASE_NUMBER = "1"
def changeset
@changeset ||= Changeset.new(project, previous_release&.commit, commit)
end
def previous_release
project.release_prior_to(self)
end
def author
super || NullUser.new(author_id)
end
def to_param
version
end
def self.find_by_param!(version)
if number = version[VERSION_REGEX, 1]
find_by_number!(number)
else
raise ActiveRecord::RecordNotFound
end
end
def version
"v#{number}"
end
def assign_release_number
# Detect whether the number has been overwritten by params, e.g. using the
# release-number-from-ci plugin.
return if number != DEFAULT_RELEASE_NUMBER && number.present?
errors.add :number, "Unable to auto bump version" unless self.number = next_release_number
end
def contains_commit?(other_commit)
return true if other_commit == commit
# status values documented here: http://stackoverflow.com/questions/23943855/github-api-to-compare-commits-response-status-is-diverged
['behind', 'identical'].include?(GITHUB.compare(project.repository_path, commit, other_commit).status)
rescue Octokit::NotFound
false
rescue Octokit::Error => e
Samson::ErrorNotifier.notify(
e, parameters: {
repository_path: project.repository_path, commit: commit, other_commit: other_commit
}
)
false # Err on side of caution and cause a new release to be created.
end
private
# If Github already has a version tagged for this commit, use it unless it is smaller.
# If the commit is after a known tag, bump it once.
# Othervise bump latest release number.
def next_release_number
latest_samson_number = project.releases.last&.number || "0"
next_samson_number = next_number(latest_samson_number)
return next_samson_number if commit.blank?
return next_samson_number unless fuzzy_tag = project.repository.fuzzy_tag_from_ref(commit)&.split('-', 2)
return next_samson_number unless latest_github_number = fuzzy_tag.first[VERSION_REGEX, 1]
next_github_number = (fuzzy_tag.size == 1 ? latest_github_number : next_number(latest_github_number))
if Gem::Version.new(next_samson_number) > Gem::Version.new(next_github_number)
next_samson_number
else
next_github_number
end
end
def next_number(current_version)
current_version.to_s.dup.sub!(/\d+$/) { |d| d.to_i + 1 }
end
def covert_ref_to_sha
return if commit.blank? || commit =~ Build::SHA1_REGEX
self.commit = project.repository.commit_from_ref(commit)
end
end
| 33.4 | 138 | 0.735329 |
335e0905b7dbda14f654bfaee06626bdec96c0c1 | 1,925 | class Ipfs < Formula
desc "Peer-to-peer hypermedia protocol"
homepage "https://ipfs.io/"
url "https://github.com/ipfs/go-ipfs.git",
tag: "v0.8.0",
revision: "ce693d7e81e0206b3afbce30333c21a36a9f094b"
license all_of: [
"MIT",
any_of: ["MIT", "Apache-2.0"],
]
head "https://github.com/ipfs/go-ipfs.git"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "26b96618554fa924bf04a04b0f3c68714b92b1493f9391c973b7eb7b3409a744"
sha256 cellar: :any_skip_relocation, catalina: "f75c5837fecebbeeb36be86091e719d85b0922654d406753bfe3c4f7f4a2e1a9"
sha256 cellar: :any_skip_relocation, mojave: "868371961578f442159865ff5111d778dbc730cda71058f942cbb354e6a46029"
sha256 cellar: :any_skip_relocation, x86_64_linux: "d0e0787ea04badd41ee9f03e96386847f168a9df73ba343056b0074650a0eaff"
end
depends_on "[email protected]" => :build
def install
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/ipfs/go-ipfs").install buildpath.children
cd("src/github.com/ipfs/go-ipfs") { system "make", "install" }
bin.install "bin/ipfs"
cd("src/github.com/ipfs/go-ipfs") { bash_completion.install "misc/completion/ipfs-completion.bash" }
end
plist_options manual: "ipfs daemon"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/ipfs</string>
<string>daemon</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_match "initializing IPFS node", shell_output(bin/"ipfs init")
end
end
| 31.048387 | 121 | 0.661818 |
26bcfd36bb730c3ff32f5309822b92afc6b7866d | 859 | #
# Cookbook Name:: mongodb3-test
# Recipe:: custom
#
# Copyright 2016, Sunggun Yu
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
node.set['mongodb3']['config']['mongod']['storage']['dbPath'] = '/var/lib/mongodb/custom'
node.set['mongodb3']['config']['mongod']['systemLog']['path'] = '/var/log/mongodb/custom/mongod.log'
include_recipe 'mongodb3::default'
| 35.791667 | 100 | 0.733411 |
5d4d94f128a553791b1692f3dcc3599a9473a83f | 388 | # frozen_string_literal: true
# ypserv_service.rb
# Check if ypserv services is enabled
Facter.add('srv_ypserv') do
confine :osfamily => 'RedHat'
setcode do
ret = ''
ypserv = Facter::Core::Execution.exec('systemctl is-enabled ypserv')
if (ypserv =~ %r{^Failed}) or (ypserv.empty?) then
ret = 'disabled'
else
ret = ypserv
end
ret
end
end
| 19.4 | 72 | 0.634021 |
0382c87c74fc77f642b687ce25a67846a1c4bc3c | 13,594 | require 'chunky_png/canvas/png_encoding'
require 'chunky_png/canvas/png_decoding'
require 'chunky_png/canvas/adam7_interlacing'
require 'chunky_png/canvas/stream_exporting'
require 'chunky_png/canvas/stream_importing'
require 'chunky_png/canvas/data_url_exporting'
require 'chunky_png/canvas/data_url_importing'
require 'chunky_png/canvas/operations'
require 'chunky_png/canvas/drawing'
require 'chunky_png/canvas/resampling'
require 'chunky_png/canvas/masking'
module ChunkyPNG
# The ChunkyPNG::Canvas class represents a raster image as a matrix of
# pixels.
#
# This class supports loading a Canvas from a PNG datastream, and creating a
# {ChunkyPNG::Datastream PNG datastream} based on this matrix. ChunkyPNG
# only supports 8-bit color depth, otherwise all of the PNG format's
# variations are supported for both reading and writing.
#
# This class offers per-pixel access to the matrix by using x,y coordinates.
# It uses a palette (see {ChunkyPNG::Palette}) to keep track of the
# different colors used in this matrix.
#
# The pixels in the canvas are stored as 4-byte fixnum, representing 32-bit
# RGBA colors (8 bit per channel). The module {ChunkyPNG::Color} is provided
# to work more easily with these number as color values.
#
# The module {ChunkyPNG::Canvas::Operations} is imported for operations on
# the whole canvas, like cropping and alpha compositing. Simple drawing
# functions are imported from the {ChunkyPNG::Canvas::Drawing} module.
class Canvas
include PNGEncoding
extend PNGDecoding
extend Adam7Interlacing
include StreamExporting
extend StreamImporting
include DataUrlExporting
extend DataUrlImporting
include Operations
include Drawing
include Resampling
include Masking
# @return [Integer] The number of columns in this canvas
attr_reader :width
# @return [Integer] The number of rows in this canvas
attr_reader :height
# @return [Array<ChunkyPNG::Color>] The list of pixels in this canvas.
# This array always should have +width * height+ elements.
attr_reader :pixels
#################################################################
# CONSTRUCTORS
#################################################################
# Initializes a new Canvas instance.
#
# @overload initialize(width, height, background_color)
# @param [Integer] width The width in pixels of this canvas
# @param [Integer] height The height in pixels of this canvas
# @param [Integer, ...] background_color The initial background color of
# this canvas. This can be a color value or any value that
# {ChunkyPNG::Color.parse} can handle.
#
# @overload initialize(width, height, initial)
# @param [Integer] width The width in pixels of this canvas
# @param [Integer] height The height in pixels of this canvas
# @param [Array<Integer>] initial The initial pizel values. Must be an
# array with <tt>width * height</tt> elements.
def initialize(width, height, initial = ChunkyPNG::Color::TRANSPARENT)
@width, @height = width, height
if initial.kind_of?(Array)
unless initial.length == width * height
raise ArgumentError, "The initial array should have #{width}x#{height} = #{width*height} elements!"
end
@pixels = initial
else
@pixels = Array.new(width * height, ChunkyPNG::Color.parse(initial))
end
end
# Initializes a new Canvas instances when being cloned.
# @param [ChunkyPNG::Canvas] other The canvas to duplicate
# @return [void]
# @private
def initialize_copy(other)
@width, @height = other.width, other.height
@pixels = other.pixels.dup
end
# Creates a new canvas instance by duplicating another instance.
# @param [ChunkyPNG::Canvas] canvas The canvas to duplicate
# @return [ChunkyPNG::Canvas] The newly constructed canvas instance.
def self.from_canvas(canvas)
new(canvas.width, canvas.height, canvas.pixels.dup)
end
#################################################################
# PROPERTIES
#################################################################
# Returns the dimension (width x height) for this canvas.
# @return [ChunkyPNG::Dimension] A dimension instance with the width and
# height set for this canvas.
def dimension
ChunkyPNG::Dimension.new(width, height)
end
# Returns the area of this canvas in number of pixels.
# @return [Integer] The number of pixels in this canvas
def area
pixels.length
end
# Replaces a single pixel in this canvas.
# @param [Integer] x The x-coordinate of the pixel (column)
# @param [Integer] y The y-coordinate of the pixel (row)
# @param [Integer] color The new color for the provided coordinates.
# @return [Integer] The new color value for this pixel, i.e.
# <tt>color</tt>.
# @raise [ChunkyPNG::OutOfBounds] when the coordinates are outside of the
# image's dimensions.
# @see #set_pixel
def []=(x, y, color)
assert_xy!(x, y)
@pixels[y * width + x] = ChunkyPNG::Color.parse(color)
end
# Replaces a single pixel in this canvas, without bounds checking.
#
# This method return value and effects are undefined for coordinates
# out of bounds of the canvas.
#
# @param [Integer] x The x-coordinate of the pixel (column)
# @param [Integer] y The y-coordinate of the pixel (row)
# @param [Integer] pixel The new color for the provided coordinates.
# @return [Integer] The new color value for this pixel, i.e.
# <tt>color</tt>.
def set_pixel(x, y, color)
@pixels[y * width + x] = color
end
# Replaces a single pixel in this canvas, with bounds checking. It will do
# noting if the provided coordinates are out of bounds.
#
# @param [Integer] x The x-coordinate of the pixel (column)
# @param [Integer] y The y-coordinate of the pixel (row)
# @param [Integer] pixel The new color value for the provided coordinates.
# @return [Integer] The new color value for this pixel, i.e.
# <tt>color</tt>, or <tt>nil</tt> if the coordinates are out of bounds.
def set_pixel_if_within_bounds(x, y, color)
return unless include_xy?(x, y)
@pixels[y * width + x] = color
end
# Returns a single pixel's color value from this canvas.
# @param [Integer] x The x-coordinate of the pixel (column)
# @param [Integer] y The y-coordinate of the pixel (row)
# @return [Integer] The current color value at the provided coordinates.
# @raise [ChunkyPNG::OutOfBounds] when the coordinates are outside of the
# image's dimensions.
# @see #get_pixel
def [](x, y)
assert_xy!(x, y)
@pixels[y * width + x]
end
# Returns a single pixel from this canvas, without checking bounds. The
# return value for this method is undefined if the coordinates are out of
# bounds.
#
# @param (see #[])
# @return [Integer] The current pixel at the provided coordinates.
def get_pixel(x, y)
@pixels[y * width + x]
end
# Returns an extracted row as vector of pixels
# @param [Integer] y The 0-based row index
# @return [Array<Integer>] The vector of pixels in the requested row
def row(y)
assert_y!(y)
pixels.slice(y * width, width)
end
# Returns an extracted column as vector of pixels.
# @param [Integer] x The 0-based column index.
# @return [Array<Integer>] The vector of pixels in the requested column.
def column(x)
assert_x!(x)
(0...height).inject([]) { |pixels, y| pixels << get_pixel(x, y) }
end
# Replaces a row of pixels on this canvas.
# @param [Integer] y The 0-based row index.
# @param [Array<Integer>] vector The vector of pixels to replace the row
# with.
# @return [void]
def replace_row!(y, vector)
assert_y!(y) && assert_width!(vector.length)
pixels[y * width, width] = vector
end
# Replaces a column of pixels on this canvas.
# @param [Integer] x The 0-based column index.
# @param [Array<Integer>] vector The vector of pixels to replace the column
# with.
# @return [void]
def replace_column!(x, vector)
assert_x!(x) && assert_height!(vector.length)
for y in 0...height do
set_pixel(x, y, vector[y])
end
end
# Checks whether the given coordinates are in the range of the canvas
# @param [ChunkyPNG::Point, Array, Hash, String] point_like The point to
# check.
# @return [true, false] True if the x and y coordinates of the point are
# within the limits of this canvas.
# @see ChunkyPNG.Point
def include_point?(*point_like)
dimension.include?(ChunkyPNG::Point(*point_like))
end
alias_method :include?, :include_point?
# Checks whether the given x- and y-coordinate are in the range of the
# canvas
#
# @param [Integer] x The x-coordinate of the pixel (column)
# @param [Integer] y The y-coordinate of the pixel (row)
# @return [true, false] True if the x- and y-coordinate is in the range of
# this canvas.
def include_xy?(x, y)
y >= 0 && y < height && x >= 0 && x < width
end
# Checks whether the given y-coordinate is in the range of the canvas
# @param [Integer] y The y-coordinate of the pixel (row)
# @return [true, false] True if the y-coordinate is in the range of this
# canvas.
def include_y?(y)
y >= 0 && y < height
end
# Checks whether the given x-coordinate is in the range of the canvas
# @param [Integer] x The y-coordinate of the pixel (column)
# @return [true, false] True if the x-coordinate is in the range of this
# canvas.
def include_x?(x)
x >= 0 && x < width
end
# Returns the palette used for this canvas.
# @return [ChunkyPNG::Palette] A palette which contains all the colors that
# are being used for this image.
def palette
ChunkyPNG::Palette.from_canvas(self)
end
# Equality check to compare this canvas with other matrices.
# @param other The object to compare this Matrix to.
# @return [true, false] True if the size and pixel values of the other
# canvas are exactly the same as this canvas's size and pixel values.
def eql?(other)
other.kind_of?(self.class) && other.pixels == self.pixels &&
other.width == self.width && other.height == self.height
end
alias :== :eql?
#################################################################
# EXPORTING
#################################################################
# Creates an ChunkyPNG::Image object from this canvas.
# @return [ChunkyPNG::Image] This canvas wrapped in an Image instance.
def to_image
ChunkyPNG::Image.from_canvas(self)
end
# Alternative implementation of the inspect method.
# @return [String] A nicely formatted string representation of this canvas.
# @private
def inspect
inspected = "<#{self.class.name} #{width}x#{height} ["
for y in 0...height
inspected << "\n\t[" << row(y).map { |p| ChunkyPNG::Color.to_hex(p) }.join(' ') << ']'
end
inspected << "\n]>"
end
protected
# Replaces the image, given a new width, new height, and a new pixel array.
def replace_canvas!(new_width, new_height, new_pixels)
unless new_pixels.length == new_width * new_height
raise ArgumentError, "The provided pixel array should have #{new_width * new_height} items"
end
@width, @height, @pixels = new_width, new_height, new_pixels
self
end
# Throws an exception if the x-coordinate is out of bounds.
def assert_x!(x)
unless include_x?(x)
raise ChunkyPNG::OutOfBounds, "Column index #{x} out of bounds!"
end
true
end
# Throws an exception if the y-coordinate is out of bounds.
def assert_y!(y)
unless include_y?(y)
raise ChunkyPNG::OutOfBounds, "Row index #{y} out of bounds!"
end
true
end
# Throws an exception if the x- or y-coordinate is out of bounds.
def assert_xy!(x, y)
unless include_xy?(x, y)
raise ChunkyPNG::OutOfBounds, "Coordinates (#{x},#{y}) out of bounds!"
end
true
end
# Throws an exception if the vector_length does not match this canvas'
# height.
def assert_height!(vector_length)
if height != vector_length
raise ChunkyPNG::ExpectationFailed,
"The length of the vector (#{vector_length}) does not match the canvas height (#{height})!"
end
true
end
# Throws an exception if the vector_length does not match this canvas'
# width.
def assert_width!(vector_length)
if width != vector_length
raise ChunkyPNG::ExpectationFailed,
"The length of the vector (#{vector_length}) does not match the canvas width (#{width})!"
end
true
end
# Throws an exception if the matrix width and height does not match this canvas' dimensions.
def assert_size!(matrix_width, matrix_height)
if width != matrix_width
raise ChunkyPNG::ExpectationFailed,
'The width of the matrix does not match the canvas width!'
end
if height != matrix_height
raise ChunkyPNG::ExpectationFailed,
'The height of the matrix does not match the canvas height!'
end
true
end
end
end
| 36.44504 | 109 | 0.643225 |
e85a57abfcb132fabb59a498d3fc49b5df767797 | 1,346 | require_relative 'lib/potter_world/version'
Gem::Specification.new do |spec|
spec.name = "potter_world"
spec.version = PotterWorld::VERSION
spec.authors = ["Olivia Cubela"]
spec.email = ["[email protected]"]
spec.summary = %q{TODO: Write a short summary, because RubyGems requires one.}
spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
spec.required_ruby_version = Gem::Requirement.new(">= 2.3.0")
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
end
| 44.866667 | 87 | 0.665676 |
f81bcec332f4b82dc8db4e38eb3858cb5ea62450 | 471 | require 'httparty'
class Movie < ApplicationRecord
include HTTParty
has_many :user_queues
has_many :users, through: :user_queues
default_params :output => 'json'
format :json
def self.get_movies
get("https://api.themoviedb.org/3/search/movie?api_key=3d6fab529007c80701a5d4ed2a0df61e&language=en-US&query=#{['a', 'b', 'd', 'e', 'h', 'i', 'l', 'm', 'n', 'o', 'q', 's', 't', 'v', 'w', 'y', 'z'].sample}&page=#{rand(1..60)}&include_adult=false")
end
end
| 31.4 | 250 | 0.653928 |
d538fbf2231479250447575fbc0e06a5688593bc | 7,927 | require 'test_helper'
class HTML::Pipeline::ExtendedMarkdownFilterTest < Minitest::Test
def fixture(name)
File.open(File.join("#{File.expand_path(File.dirname(__FILE__))}", 'fixtures', name)).read
end
def test_command_line
doc = ExtendedMarkdownFilter.to_document(fixture('command_line.md'), {})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('pre.command-line').size
assert_equal 2, doc.css('span.command').size
assert_equal 1, doc.css('span.comment').size
assert_equal 2, doc.css('em').size
assert_equal 1, doc.css('span.output').size
assert_equal 0, doc.css('code').size
assert_equal 0, doc.css('.command-line a').size
assert_equal 8, doc.to_html.lines.count
refute_equal 0, doc.css('pre').inner_text.length
end
def test_command_line_indented
doc = ExtendedMarkdownFilter.to_document(fixture('command_line_indented.md'), {})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('pre.command-line').size
assert_equal 2, doc.css('span.command').size
assert_equal 1, doc.css('span.comment').size
assert_equal 2, doc.css('em').size
assert_equal 1, doc.css('span.output').size
assert_equal 0, doc.css('.command-line a').size
refute_equal 0, doc.css('pre').inner_text.length
end
def test_nested_command_line
doc = ExtendedMarkdownFilter.to_document(fixture('command_line_nested.md'), {})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('ol').size
assert_equal 2, doc.css('li').size
assert_equal 2, doc.css('pre').size
list = doc.css('ol')[0]
first_list_item = doc.css('li')[0]
first_command_line_block = doc.css('pre')[0]
second_list_item = doc.css('li')[1]
second_command_line_block = doc.css('pre')[1]
assert list.children.include?(first_list_item)
assert list.children.include?(second_list_item)
assert_equal first_command_line_block.parent, first_list_item
assert_equal second_command_line_block.parent, second_list_item
end
def test_helper_works_and_requires_unsafe
doc = ExtendedMarkdownFilter.to_document(fixture('helper.md'), { unsafe: false })
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 0, doc.css('div.helper').size
doc = ExtendedMarkdownFilter.to_document(fixture('helper.md'), { unsafe: true })
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('div.helper').size
assert_equal 1, doc.css('h4.header').size
assert_equal 1, doc.css('a').size
assert_equal 1, doc.css('div.content').size
end
def test_intro
doc = ExtendedMarkdownFilter.to_document(fixture('intro.md'), {})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('div.intro').size
assert_equal 1, doc.css('a').size # the inner Markdown converted!
end
def test_block_intro
doc = ExtendedMarkdownFilter.to_document(fixture('block_intro.md'), {emf_use_blocks: true})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('div.intro').size
assert_equal 1, doc.css('a').size # the inner Markdown converted!
end
def test_intro_conversion
doc = ExtendedMarkdownFilter.to_document(fixture('intro.md'), {emf_use_blocks: true})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('div.intro').size
assert_equal 1, doc.css('a').size # the inner Markdown converted!
end
def test_os_blocks
doc = ExtendedMarkdownFilter.to_document(fixture('os_blocks.md'), {})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('div.platform-mac').size
assert_equal 1, doc.css('div.platform-windows').size
assert_equal 1, doc.css('div.platform-linux').size
assert_equal 1, doc.css('div.platform-all').size
# the inner Markdown converted!
assert_equal 3, doc.css('ol').size
assert_equal 2, doc.css('a').size
assert_equal 1, doc.css('em').size
end
def test_block_os_blocks
doc = ExtendedMarkdownFilter.to_document(fixture('block_os_blocks.md'), {emf_use_blocks: true})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('div.platform-mac').size
assert_equal 1, doc.css('div.platform-windows').size
assert_equal 1, doc.css('div.platform-linux').size
assert_equal 1, doc.css('div.platform-all').size
# the inner Markdown converted!
assert_equal 3, doc.css('ol').size
assert_equal 2, doc.css('a').size
assert_equal 1, doc.css('em').size
end
def test_block_conversion
doc = ExtendedMarkdownFilter.to_document(fixture('os_blocks.md'), {emf_use_blocks: true})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('div.platform-mac').size
assert_equal 1, doc.css('div.platform-windows').size
assert_equal 1, doc.css('div.platform-linux').size
assert_equal 1, doc.css('div.platform-all').size
# the inner Markdown converted!
assert_equal 3, doc.css('ol').size
assert_equal 2, doc.css('a').size
assert_equal 1, doc.css('em').size
end
def test_admonition
doc = ExtendedMarkdownFilter.to_document(fixture('admonition.md'), {})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('div.tip').size
assert_equal 1, doc.css('div.note').size
assert_equal 1, doc.css('div.warning').size
assert_equal 1, doc.css('div.danger').size
# the inner Markdown converted!
assert_equal 1, doc.css('strong').size
assert_equal 1, doc.css('del').size
end
def test_block_admonition
doc = ExtendedMarkdownFilter.to_document(fixture('block_admonition.md'), {emf_use_blocks: true})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('div.tip').size
assert_equal 1, doc.css('div.note').size
assert_equal 1, doc.css('div.warning').size
assert_equal 1, doc.css('div.danger').size
# the inner Markdown converted!
assert_equal 1, doc.css('strong').size
assert_equal 1, doc.css('del').size
end
def test_admonition_conversion
doc = ExtendedMarkdownFilter.to_document(fixture('admonition.md'), {emf_use_blocks: true})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('div.tip').size
assert_equal 1, doc.css('div.note').size
assert_equal 1, doc.css('div.warning').size
assert_equal 1, doc.css('div.danger').size
# the inner Markdown converted!
assert_equal 1, doc.css('strong').size
assert_equal 1, doc.css('del').size
end
def test_octicon
doc = ExtendedMarkdownFilter.to_document(fixture('octicon.md'), {})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('span.octicon-cat').size
assert_match '{{ octicon dog', doc.to_s
assert_match '<p><a href="http://alink.com">Click <span class="octicon octicon-gear" aria-label="Settings " title="Settings "></span></a></p>', doc.to_s
end
def test_block_octicon
doc = ExtendedMarkdownFilter.to_document(fixture('block_octicon.md'), {emf_use_blocks: true})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('span.octicon-cat').size
assert_match '[[ octicon dog', doc.to_s
assert_match '<p><a href="http://alink.com">Click <span class="octicon octicon-gear" aria-label="Settings " title="Settings "></span></a></p>', doc.to_s
end
def test_oction_conversion
doc = ExtendedMarkdownFilter.to_document(fixture('octicon.md'), {emf_use_blocks: true})
assert doc.kind_of?(HTML::Pipeline::DocumentFragment)
assert_equal 1, doc.css('span.octicon-cat').size
assert_match '{{ octicon dog', doc.to_s
assert_match '<p><a href="http://alink.com">Click <span class="octicon octicon-gear" aria-label="Settings" title="Settings"></span></a></p>', doc.to_s
end
end
| 38.480583 | 156 | 0.712249 |
e90ffe0cb033708b0d238f79685665993f1d5f4e | 2,522 | # frozen_string_literal: true
require "active_record_doctor/detectors/base"
module ActiveRecordDoctor
module Detectors
# Detect indexes whose function can be overtaken by other indexes. For example, an index on columns A, B, and C
# can also serve as an index on A and A, B.
class ExtraneousIndexes < Base
@description = "Detect extraneous indexes"
private
def message(extraneous_index:, replacement_indexes:)
if replacement_indexes.nil?
"remove #{extraneous_index} - coincides with the primary key on the table"
else
"remove #{extraneous_index} - can be replaced by #{replacement_indexes.join(' or ')}"
end
end
def detect
subindexes_of_multi_column_indexes
indexed_primary_keys
end
def subindexes_of_multi_column_indexes
tables.each do |table|
next if table == "schema_migrations"
indexes = indexes(table)
maximal_indexes = indexes.select do |index|
maximal?(indexes, index)
end
indexes.each do |index|
next if maximal_indexes.include?(index)
replacement_indexes = maximal_indexes.select do |maximum_index|
cover?(maximum_index, index)
end.map(&:name).sort
problem!(
extraneous_index: index.name,
replacement_indexes: replacement_indexes
)
end
end
end
def indexed_primary_keys
tables.each do |table|
next if table == "schema_migrations"
indexes(table).each do |index|
next if index.columns != ["id"]
problem!(
extraneous_index: index.name,
replacement_indexes: nil
)
end
end
end
def maximal?(indexes, index)
indexes.all? do |another_index|
index == another_index || !cover?(another_index, index)
end
end
# Does lhs cover rhs?
def cover?(lhs, rhs)
case [lhs.unique, rhs.unique]
when [true, true]
lhs.columns == rhs.columns
when [false, true]
false
else
prefix?(rhs, lhs)
end
end
def prefix?(lhs, rhs)
lhs.columns.count <= rhs.columns.count &&
rhs.columns[0...lhs.columns.count] == lhs.columns
end
def indexes(table_name)
super.select { |index| index.columns.is_a?(Array) }
end
end
end
end
| 26.547368 | 115 | 0.584853 |
6266c51e3c242fd09555309e8a0778aec8fcf5d4 | 2,146 | # server-based syntax
# ======================
# Defines a single server with a list of roles and multiple properties.
# You can define all roles on a single server, or split them:
# server "example.com", user: "deploy", roles: %w{app db web}, my_property: :my_value
# server "example.com", user: "deploy", roles: %w{app web}, other_property: :other_value
# server "db.example.com", user: "deploy", roles: %w{db}
server "space.fail", user: "rails", roles: %w(app db web)
# role-based syntax
# ==================
# Defines a role with one or multiple servers. The primary server in each
# group is considered to be the first unless any hosts have the primary
# property set. Specify the username and a domain or IP for the server.
# Don't use `:all`, it's a meta role.
# role :app, %w{[email protected]}, my_property: :my_value
# role :web, %w{[email protected] [email protected]}, other_property: :other_value
# role :db, %w{[email protected]}
# Configuration
# =============
# You can set any configuration variable like in config/deploy.rb
# These variables are then only loaded and set in this stage.
# For available Capistrano configuration variables see the documentation page.
# http://capistranorb.com/documentation/getting-started/configuration/
# Feel free to add new variables to customise your setup.
# Custom SSH Options
# ==================
# You may pass any option but keep in mind that net/ssh understands a
# limited set of options, consult the Net::SSH documentation.
# http://net-ssh.github.io/net-ssh/classes/Net/SSH.html#method-c-start
#
# Global options
# --------------
# set :ssh_options, {
# keys: %w(/home/rlisowski/.ssh/id_rsa),
# forward_agent: false,
# auth_methods: %w(password)
# }
#
# The server-based syntax can be used to override options:
# ------------------------------------
# server "example.com",
# user: "user_name",
# roles: %w{web app},
# ssh_options: {
# user: "user_name", # overrides user setting above
# keys: %w(/home/user_name/.ssh/id_rsa),
# forward_agent: false,
# auth_methods: %w(publickey password)
# # password: "please use keys"
# }
| 34.063492 | 88 | 0.669152 |
1af1ebb63d7faa5571917956271cf2cda3659853 | 447 | #
# Cookbook Name:: cookbook-openshift3
# Recipe:: adhoc_reset
#
# Copyright (c) 2015 The Authors, All Rights Reserved.
server_info = OpenShiftHelper::NodeHelper.new(node)
is_control_plane_server = server_info.on_control_plane_server?
openshift_reset_host node['fqdn'] do
not_if { is_control_plane_server }
end
include_recipe 'cookbook-openshift3::docker'
file node['cookbook-openshift3']['adhoc_reset_control_flag'] do
action :delete
end
| 23.526316 | 63 | 0.794183 |
edb85b55c7fe986479a1542e4728411a7c06caaf | 1,879 |
# Licensed to the Apache Software Foundation (ASF) under one or more contributor
# license agreements. See the NOTICE.txt file distributed with this work for
# additional information regarding copyright ownership. The ASF licenses this
# file to you under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
desc 'query (uses sql-like syntax)'
long_desc 'query may contain "from" clause instead of specifying collection_name'
arg_name '[collection_name] query'
command :query do |c|
c.action do |global_options,options,args|
case args.size
when 2
type = args[0]
query = args[1]
when 1
query = args[0]
else
help_now!
end
parsed_query = parse_sql query
if type == nil && parsed_query['from']
type = parsed_query['from']
query.gsub! /from\s+#{type}/i, ''
end
help_now! 'collection_name or sql from clause is required' unless type
params = {}
if parsed_query['limit']
limit = parsed_query['limit']
query.gsub! /limit\s+#{limit}/i, ''
params[:limit] = limit
end
resource = $application[type]
if $settings.show_curl?
options = options.merge({ql: query}) if query
puts_curl(:get, resource)
else
response = resource.query query, params
format_collection response.collection, parsed_query['select']
save_response response
end
end
end
| 30.803279 | 81 | 0.691325 |
01c92f92a884511f7826065af7a9d49aaa8a55f8 | 1,989 | # frozen_string_literal: true
RSpec.describe Lic::Plugin::API::Source do
let(:uri) { "uri://to/test" }
let(:type) { "spec_type" }
subject(:source) do
klass = Class.new
klass.send :include, Lic::Plugin::API::Source
klass.new("uri" => uri, "type" => type)
end
describe "attributes" do
it "allows access to uri" do
expect(source.uri).to eq("uri://to/test")
end
it "allows access to name" do
expect(source.name).to eq("spec_type at uri://to/test")
end
end
context "post_install" do
let(:installer) { double(:installer) }
before do
allow(Lic::Source::Path::Installer).to receive(:new) { installer }
end
it "calls Path::Installer's post_install" do
expect(installer).to receive(:post_install).once
source.post_install(double(:spec))
end
end
context "install_path" do
let(:uri) { "uri://to/a/repository-name" }
let(:hash) { Digest(:SHA1).hexdigest(uri) }
let(:install_path) { Pathname.new "/lic/install/path" }
before do
allow(Lic).to receive(:install_path) { install_path }
end
it "returns basename with uri_hash" do
expected = Pathname.new "#{install_path}/repository-name-#{hash[0..11]}"
expect(source.install_path).to eq(expected)
end
end
context "to_lock" do
it "returns the string with remote and type" do
expected = strip_whitespace <<-L
PLUGIN SOURCE
remote: #{uri}
type: #{type}
specs:
L
expect(source.to_lock).to eq(expected)
end
context "with additional options to lock" do
before do
allow(source).to receive(:options_to_lock) { { "first" => "option" } }
end
it "includes them" do
expected = strip_whitespace <<-L
PLUGIN SOURCE
remote: #{uri}
type: #{type}
first: option
specs:
L
expect(source.to_lock).to eq(expected)
end
end
end
end
| 23.963855 | 78 | 0.600804 |
bb9d61a6b16a3e77ad13f1caa008655081a1f1e8 | 3,153 | # Play nice with Ruby 3 (and rubocop)
# frozen_string_literal: true
module PafsCore
module StandardOfProtection
STANDARD_OF_PROTECTION_FLOODING = %i[
very_significant
significant
moderate
low
].freeze
STANDARD_OF_PROTECTION_COASTAL_BEFORE = %i[
less_than_one_year
one_to_four_years
five_to_nine_years
ten_years_or_more
].freeze
STANDARD_OF_PROTECTION_COASTAL_AFTER = %i[
less_than_ten_years
ten_to_nineteen_years
twenty_to_fortynine_years
fifty_years_or_more
].freeze
%i[flood_protection_before
flood_protection_after
coastal_protection_before
coastal_protection_after].each do |a|
delegate a, "#{a}=", to: :project
end
# flood protection levels are stored as integers that correlate to
# the category of risk of flooding
# 0 - Very significant
# 1 - Significant
# 2 - Moderate
# 3 - Low
def flood_risk_options
STANDARD_OF_PROTECTION_FLOODING
end
def flood_risk_symbol(n)
STANDARD_OF_PROTECTION_FLOODING[n]
end
# coastal erosion protection levels are stored as integers that correlate to
# the category of risk, before the project
# 0 - Less than 1 year
# 1 - 1 to 4 years
# 2 - 5 to 9 years
# 3 - 10 years or more
def coastal_erosion_before_options
STANDARD_OF_PROTECTION_COASTAL_BEFORE
end
def coastal_risk_before_symbol(n)
STANDARD_OF_PROTECTION_COASTAL_BEFORE[n]
end
# coastal erosion protection levels are stored as integers that correlate to
# the category of risk, after the project
# 0 - Less than 10 years
# 1 - 10 to 19 years
# 2 - 20 to 49 years
# 3 - 50 years or more
def coastal_erosion_after_options
STANDARD_OF_PROTECTION_COASTAL_AFTER
end
def coastal_risk_after_symbol(n)
STANDARD_OF_PROTECTION_COASTAL_AFTER[n]
end
def standard_of_protection_label(t)
I18n.t(t, scope: "pafs_core.standard_of_protection")
end
# validation for flood protection
def flood_protection_should_be_same_or_better_for(attr)
if flood_protection_before.present? && flood_protection_after.present? &&
flood_protection_before > flood_protection_after
errors.add(attr, "^Once the project is complete, the flood risk "\
"must not be greater than it is now")
end
end
# validation for coastal erosion protection
def coastal_erosion_protection_should_be_same_or_better_for(attr)
if coastal_protection_before.present? &&
coastal_protection_after.present? &&
coastal_erosion_before_options[coastal_protection_before] == :ten_years_or_more &&
coastal_erosion_after_options[coastal_protection_after] == :less_than_ten_years
errors.add(attr, "^Once the project is complete, the length of time before"\
" coastal erosion affects the area must not be less than it"\
" is now")
end
end
def sop_from_string(value)
I18n.t("pafs_core.fcerm1.standard_of_protection").invert[value]
end
end
end
| 29.745283 | 91 | 0.697748 |
abfca768ecad2f51488891e48451a277a6527e2e | 1,983 | require 'json'
package = JSON.parse(File.read(File.join(__dir__, 'package.json')))
appPackage = JSON.parse(File.read(File.join('..', 'app', 'package.json')))
coreVersionDetected = appPackage['version']
coreVersionRequired = package['peerDependencies'][appPackage['name']]
firebase_sdk_version = appPackage['sdkVersions']['ios']['firebase']
if coreVersionDetected != coreVersionRequired
Pod::UI.warn "NPM package '#{package['name']}' depends on '#{appPackage['name']}' v#{coreVersionRequired} but found v#{coreVersionDetected}, this might cause build issues or runtime crashes."
end
Pod::Spec.new do |s|
s.name = "RNFBFunctions"
s.version = package["version"]
s.description = package["description"]
s.summary = <<-DESC
A well tested feature rich Firebase implementation for React Native, supporting iOS & Android.
DESC
s.homepage = "http://invertase.io/oss/react-native-firebase"
s.license = package['license']
s.authors = "Invertase Limited"
s.source = { :git => "https://github.com/invertase/react-native-firebase.git", :tag => "v#{s.version}" }
s.social_media_url = 'http://twitter.com/invertaseio'
s.ios.deployment_target = "9.0"
s.source_files = 'ios/**/*.{h,m}'
# React Native dependencies
s.dependency 'React'
s.dependency 'RNFBApp'
if defined?($FirebaseSDKVersion)
Pod::UI.puts "#{s.name}: Using user specified Firebase SDK version '#{$FirebaseSDKVersion}'"
firebase_sdk_version = $FirebaseSDKVersion
end
# Firebase dependencies
s.dependency 'Firebase/Functions', firebase_sdk_version
if defined?($RNFirebaseAsStaticFramework)
Pod::UI.puts "#{s.name}: Using overridden static_framework value of '#{$RNFirebaseAsStaticFramework}'"
s.static_framework = $RNFirebaseAsStaticFramework
else
s.static_framework = false
end
end
| 43.108696 | 193 | 0.660615 |
79404691b1ad66cecd3410df0edd9eaad02a81a4 | 820 | module Battle
module Effects
class Ability
class BadDreams < Ability
# Function called at the end of a turn
# @param logic [Battle::Logic] logic of the battle
# @param scene [Battle::Scene] battle scene
# @param battlers [Array<PFM::PokemonBattler>] all alive battlers
def on_end_turn_event(logic, scene, battlers)
return unless battlers.include?(@target)
sleeping_foes = logic.foes_of(@target).select(&:asleep?)
scene.visual.show_ability(@target) if sleeping_foes.any?
sleeping_foes.each do |sleeping_foe|
hp = sleeping_foe.max_hp / 8
logic.damage_handler.damage_change(hp.clamp(1, Float::INFINITY), @target)
end
end
end
register(:bad_dreams, BadDreams)
end
end
end
| 34.166667 | 85 | 0.639024 |
79206c1004b837421c4b96f7395958023b6cbfc2 | 766 | require 'json'
package = JSON.parse(File.read(File.join(__dir__, 'package.json')))
Pod::Spec.new do |s|
s.name = 'RNXMPP'
s.version = package['version']
s.summary = package['description']
s.description = package['description']
s.homepage = package['homepage']
s.license = package['license']
s.author = package['author']
s.source = { :git => 'https://github.com/AssacNetworks/react-native-xmpp-assac.git', :tag => s.version }
s.requires_arc = true
s.platform = :ios, '8.0'
s.dependency 'React'
s.dependency 'XMPPFramework'
s.preserve_paths = 'package.json', 'index.js'
s.source_files = 'RNXMPP/*.{h,m}'
end
| 33.304348 | 119 | 0.558747 |
016cdeb94f01acefb775ea707358f991aa628538 | 1,031 | # frozen_string_literal: true
require 'rails_helper'
require 'models/shared_examples/shared_examples_for_loadable'
require 'models/shared_examples/shared_examples_for_exportable'
RSpec.describe IpedsIcPy, type: :model do
it_behaves_like 'a loadable model', skip_lines: 0
it_behaves_like 'an exportable model', skip_lines: 0
describe 'when validating' do
subject(:ipeds_ic_py) { build :ipeds_ic_py }
it 'has a valid factory' do
expect(ipeds_ic_py).to be_valid
end
it 'requires cross' do
expect(build(:ipeds_ic_py, cross: nil)).not_to be_valid
end
it 'requires numeric chg1py3' do
expect(build(:ipeds_ic_py, chg1py3: 'abc')).not_to be_valid
end
it 'requires numeric books' do
expect(build(:ipeds_ic_py, books: 'abc')).not_to be_valid
end
it 'sets tuition_in_state and tuition_out_of_state' do
expect(ipeds_ic_py.tuition_in_state).to eq(ipeds_ic_py.chg1py3)
expect(ipeds_ic_py.tuition_out_of_state).to eq(ipeds_ic_py.chg1py3)
end
end
end
| 28.638889 | 73 | 0.739088 |
f7a02a72e99d843f401c65b8a6a7a01314345be0 | 1,827 | module SemanticFormHelper
def wrapping(type, field_name, label, field, options = {})
help = %Q{<span class="help">#{options[:help]}</span>} if options[:help]
to_return = []
to_return << %Q{<div class="#{type}-field #{options[:class]}">}
to_return << %Q{<label for="#{field_name}">#{label}#{help}</label>} unless ["radio","check", "submit"].include?(type)
to_return << %Q{<div class="input">}
to_return << options[:before] if options[:before]
to_return << field
to_return << options[:after] if options[:after]
to_return << %Q{<label for="#{field_name}">#{label}</label>} if ["radio","check"].include?(type)
to_return << %Q{</div></div>}
end
def semantic_group(type, field_name, label, fields, options = {})
help = %Q{<span class="help">#{options[:help]}</span>} if options[:help]
to_return = []
to_return << %Q{<div class="#{type}-fields #{options[:class]}">}
to_return << %Q{<label for="#{field_name}">#{label}#{help}</label>}
to_return << %Q{<div class="input">}
to_return << fields.join
to_return << %Q{</div></div>}
end
def boolean_field_wrapper(input, name, value, text, help = nil)
field = []
field << %Q{<label>#{input} #{text}</label>}
field << %Q{<div class="help">#{help}</div>} if help
field
end
def check_box_tag_group(name, values, options = {})
selections = []
values.each do |item|
if item.is_a?(Hash)
value = item[:value]
text = item[:label]
help = item.delete(:help)
else
value = item
text = item
end
box = check_box_tag(name, value)
selections << boolean_field_wrapper(box, name, value, text)
end
label = options[:label]
semantic_group("check-box", name, label, selections, options)
end
end
| 35.134615 | 121 | 0.592775 |
6ad685524c03f370dea0054e9b8bccbb433fafeb | 6,650 | #
# Cookbook Name:: rabbitmq
# Recipe:: default
#
# Copyright 2009, Benjamin Black
# Copyright 2009-2013, Opscode, Inc.
# Copyright 2012, Kevin Nuckolls <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
class Chef::Resource
include Opscode::RabbitMQ
end
include_recipe 'erlang'
## Install the package
case node['platform_family']
when 'debian'
# installs the required setsid command -- should be there by default but just in case
package 'util-linux'
if node['rabbitmq']['use_distro_version']
package 'rabbitmq-server'
else
# we need to download the package
deb_package = "https://www.rabbitmq.com/releases/rabbitmq-server/v#{node['rabbitmq']['version']}/rabbitmq-server_#{node['rabbitmq']['version']}-1_all.deb"
remote_file "#{Chef::Config[:file_cache_path]}/rabbitmq-server_#{node['rabbitmq']['version']}-1_all.deb" do
source deb_package
action :create_if_missing
end
dpkg_package "#{Chef::Config[:file_cache_path]}/rabbitmq-server_#{node['rabbitmq']['version']}-1_all.deb"
end
# Configure job control
if node['rabbitmq']['job_control'] == 'upstart'
# We start with stock init.d, remove it if we're not using init.d, otherwise leave it alone
service node['rabbitmq']['service_name'] do
action [:stop]
only_if { File.exists?('/etc/init.d/rabbitmq-server') }
end
execute 'remove rabbitmq init.d command' do
command 'update-rc.d -f rabbitmq-server remove'
end
file '/etc/init.d/rabbitmq-server' do
action :delete
end
template "/etc/init/#{node['rabbitmq']['service_name']}.conf" do
source 'rabbitmq.upstart.conf.erb'
owner 'root'
group 'root'
mode 0644
variables(:max_file_descriptors => node['rabbitmq']['max_file_descriptors'])
end
service node['rabbitmq']['service_name'] do
provider Chef::Provider::Service::Upstart
action [:enable, :start]
# restart_command "stop #{node['rabbitmq']['service_name']} && start #{node['rabbitmq']['service_name']}"
end
end
## You'll see setsid used in all the init statements in this cookbook. This
## is because there is a problem with the stock init script in the RabbitMQ
## debian package (at least in 2.8.2) that makes it not daemonize properly
## when called from chef. The setsid command forces the subprocess into a state
## where it can daemonize properly. -Kevin (thanks to Daniel DeLeo for the help)
if node['rabbitmq']['job_control'] == 'initd'
service node['rabbitmq']['service_name'] do
start_command 'setsid /etc/init.d/rabbitmq-server start'
stop_command 'setsid /etc/init.d/rabbitmq-server stop'
restart_command 'setsid /etc/init.d/rabbitmq-server restart'
status_command 'setsid /etc/init.d/rabbitmq-server status'
supports :status => true, :restart => true
action [:enable, :start]
end
end
when 'rhel', 'fedora'
# This is needed since Erlang Solutions' packages provide "esl-erlang"; this package just requires "esl-erlang" and provides "erlang".
if node['erlang']['install_method'] == 'esl'
remote_file "#{Chef::Config[:file_cache_path]}/esl-erlang-compat.rpm" do
source 'https://github.com/jasonmcintosh/esl-erlang-compat/blob/master/rpmbuild/RPMS/noarch/esl-erlang-compat-R14B-1.el6.noarch.rpm?raw=true'
end
rpm_package "#{Chef::Config[:file_cache_path]}/esl-erlang-compat.rpm"
end
if node['rabbitmq']['use_distro_version']
package 'rabbitmq-server'
else
# We need to download the rpm
rpm_package = "https://www.rabbitmq.com/releases/rabbitmq-server/v#{node['rabbitmq']['version']}/rabbitmq-server-#{node['rabbitmq']['version']}-1.noarch.rpm"
remote_file "#{Chef::Config[:file_cache_path]}/rabbitmq-server-#{node['rabbitmq']['version']}-1.noarch.rpm" do
source rpm_package
action :create_if_missing
end
rpm_package "#{Chef::Config[:file_cache_path]}/rabbitmq-server-#{node['rabbitmq']['version']}-1.noarch.rpm"
end
service node['rabbitmq']['service_name'] do
action [:enable, :start]
end
when 'suse'
# rabbitmq-server-plugins needs to be first so they both get installed
# from the right repository. Otherwise, zypper will stop and ask for a
# vendor change.
package 'rabbitmq-server-plugins'
package 'rabbitmq-server'
service node['rabbitmq']['service_name'] do
action [:enable, :start]
end
when 'smartos'
package 'rabbitmq'
service 'epmd' do
action :start
end
service node['rabbitmq']['service_name'] do
action [:enable, :start]
end
end
if node['rabbitmq']['logdir']
directory node['rabbitmq']['logdir'] do
owner 'rabbitmq'
group 'rabbitmq'
mode '775'
recursive true
end
end
directory node['rabbitmq']['mnesiadir'] do
owner 'rabbitmq'
group 'rabbitmq'
mode '775'
recursive true
end
template "#{node['rabbitmq']['config_root']}/rabbitmq-env.conf" do
source 'rabbitmq-env.conf.erb'
owner 'root'
group 'root'
mode 00644
notifies :restart, "service[#{node['rabbitmq']['service_name']}]"
end
template "#{node['rabbitmq']['config_root']}/rabbitmq.config" do
source 'rabbitmq.config.erb'
owner 'root'
group 'root'
mode 00644
variables(
:kernel => format_kernel_parameters
)
notifies :restart, "service[#{node['rabbitmq']['service_name']}]"
end
if File.exists?(node['rabbitmq']['erlang_cookie_path'])
existing_erlang_key = File.read(node['rabbitmq']['erlang_cookie_path']).strip
else
existing_erlang_key = ''
end
if node['rabbitmq']['cluster'] && (node['rabbitmq']['erlang_cookie'] != existing_erlang_key)
template node['rabbitmq']['erlang_cookie_path'] do
source 'doterlang.cookie.erb'
owner 'rabbitmq'
group 'rabbitmq'
mode 00400
notifies :stop, "service[#{node['rabbitmq']['service_name']}]", :immediately
notifies :start, "service[#{node['rabbitmq']['service_name']}]", :immediately
notifies :run, 'execute[reset-node]', :immediately
end
# Need to reset for clustering #
execute 'reset-node' do
command 'rabbitmqctl stop_app && rabbitmqctl reset && rabbitmqctl start_app'
action :nothing
end
end
| 33.25 | 161 | 0.699398 |
280b981223017c7ebd562a9d695b2ee11f10ea12 | 10,630 | # frozen_string_literal: true
require 'rails_helper'
# This spec was generated by rspec-rails when you ran the scaffold generator.
# It demonstrates how one might use RSpec to specify the controller code that
# was generated by Rails when you ran the scaffold generator.
#
# It assumes that the implementation code is generated by the rails scaffold
# generator. If you are using any extension libraries to generate different
# controller code, this generated spec may or may not pass.
#
# It only uses APIs available in rails and/or rspec-rails. There are a number
# of tools you can use to make these specs even more expressive, but we're
# sticking to rails and rspec-rails APIs to keep things simple and stable.
#
# Compared to earlier versions of this generator, there is very limited use of
# stubs and message expectations in this spec. Stubs are only used when there
# is no simpler way to get a handle on the object needed for the example.
# Message expectations are only used when there is no simpler way to specify
# that an instance is receiving a specific message.
#
# Also compared to earlier versions of this generator, there are no longer any
# expectations of assigns and templates rendered. These features have been
# removed from Rails core in Rails 5, but can be added back in via the
# `rails-controller-testing` gem.
RSpec.describe RequestsController, type: :controller do
let(:user) { FactoryBot.create :employee }
let(:sudo_user) { FactoryBot.create :user }
let(:sudo_user2) { FactoryBot.create :admin }
let(:project) { FactoryBot.create :project }
# This should return the minimal set of attributes required to create a valid
# Request. As you add validations to Request, be sure to
# adjust the attributes here as well.
let(:valid_attributes) do
{
name: 'myvm',
cpu_cores: 2,
ram_gb: 1,
storage_gb: 2,
operating_system: 'MyOS',
description: 'Description',
comment: 'Comment',
status: 'pending',
user: user,
responsible_user_ids: [user.id],
sudo_user_ids: ['', sudo_user.id.to_s, sudo_user2.id.to_s],
user_ids: ['', user.id.to_s],
project_id: project.id
}
end
let(:invalid_attributes) do
{
name: '',
cpu_cores: 2,
ram_gb: 1000,
storage_gb: -2000,
operating_system: '',
description: '',
comment: 'Comment',
status: 'pending',
user: user,
sudo_user_ids: ['', sudo_user.id.to_s], # the first parameter is for some reason always empty
project_id: project.id
}
end
# one of the attributes should be higher than the max resource of the host list
let(:to_high_resource_attributes) do
{
name: 'myvm',
cpu_cores: 1_000_000_000_000_000,
ram_gb: 1,
storage_gb: 2,
operating_system: 'MyOS',
description: 'Description',
comment: 'Comment',
status: 'pending',
user: user,
responsible_user_ids: [user.id],
sudo_user_ids: ['', sudo_user.id.to_s, sudo_user2.id.to_s],
project_id: project.id
}
end
let(:host) do
v_sphere_host_mock('someHost')
end
# Authenticate an user
before do
sign_in user
allow(VSphere::Host).to receive(:all).and_return [host]
end
# This should return the minimal set of values that should be in the session
# in order to pass any filters (e.g. authentication) defined in
# RequestsController. Be sure to keep this updated too.
describe 'GET #index' do
it 'returns a success response' do
Request.create! valid_attributes
get :index, params: {}
expect(response).to be_successful
end
end
describe 'GET #show' do
it 'returns a success response' do
request = Request.create! valid_attributes
get :show, params: { id: request.to_param }
expect(response).to be_successful
end
end
describe 'GET #new' do
it 'returns a success response' do
get :new, params: {}
expect(response).to be_successful
end
end
describe 'GET #edit' do
it 'returns a success response' do
request = Request.create! valid_attributes
get :edit, params: { id: request.to_param }
expect(response).to be_successful
end
end
describe 'POST #create' do
context 'with valid params' do
it 'creates a new Request' do
expect do
post :create, params: { request: valid_attributes }
end.to change(Request, :count).by(1)
end
it 'redirects to the request page index page' do
post :create, params: { request: valid_attributes }
expect(response).to redirect_to(requests_path)
end
it 'correctly assigns the sudo users' do
post :create, params: { request: valid_attributes }
expect(assigns(:request).sudo_users).to match_array([sudo_user, sudo_user2])
end
end
context 'with invalid params' do
it 'does not create a request if responible users are empty' do
request_count = Request.all.size
post :create, params: { request: valid_attributes.except(:responsible_user_ids) }
expect(request_count).to equal(Request.all.size)
end
it 'returns a success response (i.e. to display the "new" template)' do
post :create, params: { request: invalid_attributes }
expect(response).to be_successful
end
# regression test for #320
it 'assigns the sudo users to the request' do
post :create, params: { request: invalid_attributes }
expect(assigns(:request).sudo_users).to match_array([sudo_user])
end
# regression test for #320
it 'does not persist the sudo users' do
post :create, params: { request: invalid_attributes }
expect(assigns(:request).sudo_user_assignments).to all(be_changed)
end
end
context 'with to high resource params' do
it 'does not create a request' do
request_count = Request.all.size
post :create, params: { request: to_high_resource_attributes }
expect(request_count).to equal(Request.all.size)
end
it 'returns a success response' do
post :create, params: { request: to_high_resource_attributes }
expect(response).to be_successful
end
end
end
describe 'PATCH #reject' do
let(:rejection_information) do
'I do not want this request'
end
let(:rejection_attributes) do
{ rejection_information: rejection_information }
end
let(:the_request) do
Request.create! valid_attributes
end
it 'rejects the request' do
patch :reject, params: { id: the_request.to_param, request: rejection_attributes }
the_request.reload
expect(the_request).to be_rejected
end
it 'updates the rejection information' do
patch :reject, params: { id: the_request.to_param, request: rejection_attributes }
the_request.reload
expect(the_request.rejection_information).to eql(rejection_information)
end
it 'redirects to requests page' do
patch :reject, params: { id: the_request.to_param, request: rejection_attributes }
expect(response).to redirect_to(requests_path)
end
end
describe 'PATCH #update' do
context 'accepts the request' do
let(:new_attributes) do
{
name: 'mynewvm',
cpu_cores: 3,
ram_gb: 2,
storage_gb: 3,
operating_system: 'MyNewOS',
comment: 'newComment',
user: user,
sudo_user_ids: ['', sudo_user.id.to_s, user.id.to_s],
user_ids: ['']
}
end
# this variable may not be called request, because it would then override an internal RSpec variable
let(:the_request) do
request = Request.create! valid_attributes
request.assign_sudo_users valid_attributes[:sudo_user_ids][1..-1]
request.save!
request
end
it 'redirects to the index page if no cluster is available' do
allow(VSphere::Cluster).to receive(:all).and_return []
patch :update, params: { id: the_request.to_param, request: new_attributes }
expect(response).to redirect_to(requests_path)
end
it 'redirects to the index page if the cluster does not have a network' do
cluster = double
allow(cluster).to receive(:networks).and_return []
allow(VSphere::Cluster).to receive(:all).and_return [cluster]
patch :update, params: { id: the_request.to_param, request: new_attributes }
expect(response).to redirect_to(requests_path)
end
context 'update already performed' do
before do
patch :update, params: { id: the_request.to_param, request: new_attributes }
the_request.reload
end
it 'updates the request' do
expect(the_request.name).to eq('mynewvm')
end
it 'redirects to the new VMS config' do
expect(response).to redirect_to(edit_config_path(the_request.name))
end
it 'accepts the request' do
expect(the_request).to be_accepted
end
it 'correctly updates the sudo users' do
expect(the_request.sudo_users).to match_array([sudo_user, user])
end
end
end
context 'with invalid params' do
let(:the_request) do
Request.create! valid_attributes
end
it 'returns a success respond (i.e. to display the "edit" template)' do
patch :update, params: { id: the_request.to_param, request: invalid_attributes }
expect(response).to be_successful
end
end
end
describe 'DELETE #destroy' do
it 'destroys the requested request' do
request = Request.create! valid_attributes
expect do
delete :destroy, params: { id: request.to_param }
end.to change(Request, :count).by(-1)
end
it 'redirects to the requests list' do
request = Request.create! valid_attributes
delete :destroy, params: { id: request.to_param }
expect(response).to redirect_to(requests_url)
end
end
describe 'POST #push_to_git' do
before do
request = Request.create! valid_attributes
allow(request).to receive(:push_to_git).and_return(notice: 'Successfully pushed to git.')
request_class = class_double('Request')
.as_stubbed_const(transfer_nested_constants: true)
expect(request_class).to receive(:find) { request }
end
it 'redirects with a success message' do
post :push_to_git, params: { id: request.to_param }
expect(response).to redirect_to(requests_url)
end
end
end
| 32.408537 | 106 | 0.667074 |
1ca9979ce0326c45822ec034179bd782489ba9cb | 821 | $:.push File.expand_path("lib", __dir__)
# Maintain your gem's version:
require "embed_me/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |spec|
spec.name = "embed_me"
spec.version = EmbedMe::VERSION
spec.authors = ["Tobias Bohn"]
spec.email = ["[email protected]"]
spec.homepage = "https://github.com/tobiasbhn/embed-me"
spec.summary = "EmbedMe allows you and your users to easily embed your rails application or parts of it on other websites."
spec.description = "EmbedMe allows you and your users to easily embed your rails application or parts of it on other websites."
spec.license = "MIT"
spec.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.md"]
spec.add_development_dependency "sqlite3"
end
| 39.095238 | 129 | 0.700365 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.