hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e87a5e04d190ed42dba8f366f0970a658584f6a3 | 4,084 | require 'erpify'
require 'locomotive/mounter'
require 'locomotive/wagon/server'
require 'ostruct'
require 'cgi'
require 'delegate'
require 'ooor/rack'
module Erpify
module ContentEntryHelper
include Ooor::LocaleMapper
def content_type
locale = Locomotive::Mounter.locale.to_s
context = {'lang' => to_erp_locale(locale)}
@content_type ||= OpenStruct.new(slug: self.class.param_key(context))
end
def content_entry
self
end
def _slug
to_param
end
def _permalink
to_param
end
def _label
name
end
end
end
Ooor::Base.send :include, Erpify::ContentEntryHelper
class SlugDecorator < SimpleDelegator
def initialize(obj, slug)
super(obj)
@delegate_slug = slug
end
def slug
@delegate_slug
end
end
module Locomotive
module Mounter
module Models
class Page < Base
def content_type_with_erpify
c_type = content_type_without_erpify
if c_type && c_type.slug == 'ooor_entries'
SlugDecorator.new(c_type, self.to_s.split('/')[0])
else
c_type
end
end
alias_method :content_type_without_erpify, :content_type
alias_method :content_type, :content_type_with_erpify
end
end
end
end
module Locomotive::Wagon
class Server
class Page < Middleware # sadly unlike Rails Wagon offers us no way to place our OOOR middleware at the right position, that's why we monkey patch instead
def call_with_erpify(env)
ooor_rack = Ooor::Rack.new(@app)
ooor_rack.set_ooor!(env)
erpify_assigns = {
"ooor_public_model" => Erpify::Liquid::Drops::OoorPublicModel.new(),
"ooor_model" => Erpify::Liquid::Drops::OoorPublicModel.new(), #no authentication in Wagon
}
env["wagon.liquid_assigns"].merge!(erpify_assigns)
call_without_erpify(env)
end
alias_method :call_without_erpify, :call
alias_method :call, :call_with_erpify
def set_ooor_context!(env)
end
end
class TemplatizedPage < Middleware
def set_content_entry!(env)
%r(^#{self.page.safe_fullpath.gsub('*', '([^\/]+)')}$) =~ self.path
permalink = $1
if page.content_type_without_erpify.slug == 'ooor_entries'
method_or_key = self.path.split('/')[0].gsub('-', '.')
lang = env['ooor']['ooor_context']['lang'] || 'en_US'
model = Ooor::Base.connection_handler.retrieve_connection(Ooor.default_config).const_get(method_or_key, lang)
env['wagon.content_entry'] = model.find_by_param(CGI::unescape(permalink))
else
if content_entry = self.page.content_type.find_entry(permalink)
env['wagon.content_entry'] = content_entry
else
env['wagon.page'] = nil
end
end
end
end
end
end
module Locomotive
module Wagon
module Liquid
module Tags
module PathHelper
def retrieve_page_from_handle_with_erpify(context)
mounting_point = context.registers[:mounting_point]
context.scopes.reverse_each do |scope|
handle = scope[@handle] || @handle
if handle.is_a?(Ooor::Base)
return fetch_page(mounting_point, handle, true)
end
end
retrieve_page_from_handle_without_erpify(context)
end
alias_method :retrieve_page_from_handle_without_erpify, :retrieve_page_from_handle
alias_method :retrieve_page_from_handle, :retrieve_page_from_handle_with_erpify
end
end
end
end
end
begin
config_file = "#{Dir.pwd}/config/ooor.yml"
config = YAML.load_file(config_file)['development']
Ooor.default_config = HashWithIndifferentAccess.new(config).merge(locale_rack_key: 'wagon.locale')
rescue SystemCallError
puts """failed to load OOOR yaml configuration file.
make sure your app has a #{config_file} file correctly set up\n\n"""
end
| 25.848101 | 158 | 0.644221 |
ed763db979a1fd051754a1a4f1ed378ee102a842 | 9,199 | # frozen_string_literal: true
# Cloud Foundry Java Buildpack
# Copyright 2013-2020 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'fileutils'
require 'java_buildpack/component/versioned_dependency_component'
require 'java_buildpack/framework'
module JavaBuildpack
module Framework
# Encapsulates the functionality for enabling zero-touch AppDynamics support.
class AppDynamicsAgent < JavaBuildpack::Component::VersionedDependencyComponent
def initialize(context)
super(context)
@logger = JavaBuildpack::Logging::LoggerFactory.instance.get_logger AppDynamicsAgent
end
# (see JavaBuildpack::Component::BaseComponent#compile)
def compile
download_zip(false, @droplet.sandbox, 'AppDynamics Agent')
# acessor for resources dir through @droplet?
resources_dir = Pathname.new(File.expand_path('../../../resources', __dir__)).freeze
default_conf_dir = resources_dir + @droplet.component_id + 'defaults'
copy_appd_default_configuration(default_conf_dir)
override_default_config_remote
override_default_config_local
@droplet.copy_resources
end
# (see JavaBuildpack::Component::BaseComponent#release)
def release
credentials = @application.services.find_service(FILTER, 'host-name')['credentials']
java_opts = @droplet.java_opts
java_opts.add_javaagent(@droplet.sandbox + 'javaagent.jar')
application_name java_opts, credentials
tier_name java_opts, credentials
node_name java_opts, credentials
account_access_key java_opts, credentials
account_name java_opts, credentials
host_name java_opts, credentials
port java_opts, credentials
ssl_enabled java_opts, credentials
unique_host_name java_opts
end
protected
# (see JavaBuildpack::Component::VersionedDependencyComponent#supports?)
def supports?
@application.services.one_service? FILTER, 'host-name'
end
private
CONFIG_FILES = %w[logging/log4j2.xml logging/log4j.xml app-agent-config.xml controller-info.xml
service-endpoint.xml transactions.xml custom-interceptors.xml
custom-activity-correlation.xml].freeze
FILTER = /app[-]?dynamics/.freeze
private_constant :CONFIG_FILES, :FILTER
def application_name(java_opts, credentials)
name = credentials['application-name'] || @configuration['default_application_name'] ||
@application.details['application_name']
java_opts.add_system_property('appdynamics.agent.applicationName', "\\\"#{name}\\\"")
end
def account_access_key(java_opts, credentials)
account_access_key = credentials['account-access-key'] || credentials.dig('account-access-secret', 'secret')
java_opts.add_system_property 'appdynamics.agent.accountAccessKey', account_access_key if account_access_key
end
def account_name(java_opts, credentials)
account_name = credentials['account-name']
java_opts.add_system_property 'appdynamics.agent.accountName', account_name if account_name
end
def host_name(java_opts, credentials)
host_name = credentials['host-name']
raise "'host-name' credential must be set" unless host_name
java_opts.add_system_property 'appdynamics.controller.hostName', host_name
end
def node_name(java_opts, credentials)
name = credentials['node-name'] || @configuration['default_node_name']
java_opts.add_system_property('appdynamics.agent.nodeName', name.to_s)
end
def port(java_opts, credentials)
port = credentials['port']
java_opts.add_system_property 'appdynamics.controller.port', port if port
end
def ssl_enabled(java_opts, credentials)
ssl_enabled = credentials['ssl-enabled']
java_opts.add_system_property 'appdynamics.controller.ssl.enabled', ssl_enabled if ssl_enabled
end
def tier_name(java_opts, credentials)
name = credentials['tier-name'] || @configuration['default_tier_name'] ||
@application.details['application_name']
java_opts.add_system_property('appdynamics.agent.tierName', name.to_s)
end
def unique_host_name(java_opts)
name = @configuration['default_unique_host_name'] || @application.details['application_name']
java_opts.add_system_property('appdynamics.agent.uniqueHostId', name.to_s)
end
# Copy default configuration present in resources folder of app_dynamics_agent ver* directories present in sandbox
#
# @param [Pathname] default_conf_dir the 'defaults' directory present in app_dynamics_agent resources.
# @return [Void]
def copy_appd_default_configuration(default_conf_dir)
return unless default_conf_dir.exist?
Dir.glob(@droplet.sandbox + 'ver*') do |target_directory|
FileUtils.cp_r "#{default_conf_dir}/.", target_directory
end
end
# Check if configuration file exists on the server before download
# @param [ResourceURI] uri URI of the remote configuration server
# @param [ConfigFileName] conf_file Name of the configuration file
# @return [Boolean] returns true if files exists on path specified by APPD_CONF_HTTP_URL, false otherwise
def check_if_resource_exists(resource_uri, conf_file)
# check if resource exists on remote server
begin
opts = { use_ssl: true } if resource_uri.scheme == 'https'
response = Net::HTTP.start(resource_uri.host, resource_uri.port, opts) do |http|
req = Net::HTTP::Head.new(resource_uri)
if resource_uri.user != '' || resource_uri.password != ''
req.basic_auth(resource_uri.user, resource_uri.password)
end
http.request(req)
end
rescue StandardError => e
@logger.error { "Request failure: #{e.message}" }
return false
end
case response
when Net::HTTPSuccess
true
when Net::HTTPRedirection
location = response['location']
@logger.info { "redirected to #{location}" }
check_if_resource_exists(location, conf_file)
else
@logger.info { "Could not retrieve #{resource_uri}. Code: #{response.code} Message: #{response.message}" }
false
end
end
# Check for configuration files on a remote server. If found, copy to conf dir under each ver* dir
# @return [Void]
def override_default_config_remote
return unless @application.environment['APPD_CONF_HTTP_URL']
JavaBuildpack::Util::Cache::InternetAvailability.instance.available(
true, 'The AppDynamics remote configuration download location is always accessible'
) do
agent_root = @application.environment['APPD_CONF_HTTP_URL'].chomp('/') + '/java/'
@logger.info { "Downloading override configuration files from #{agent_root}" }
CONFIG_FILES.each do |conf_file|
uri = URI(agent_root + conf_file)
# `download()` uses retries with exponential backoff which is expensive
# for situations like 404 File not Found. Also, `download()` doesn't expose
# an api to disable retries, which makes this check necessary to prevent
# long install times.
next unless check_if_resource_exists(uri, conf_file)
download(false, uri.to_s) do |file|
Dir.glob(@droplet.sandbox + 'ver*') do |target_directory|
FileUtils.cp_r file, target_directory + '/conf/' + conf_file
end
end
end
end
end
# Check for configuration files locally. If found, copy to conf dir under each ver* dir
# @return [Void]
def override_default_config_local
return unless @application.environment['APPD_CONF_DIR']
app_conf_dir = @application.root + @application.environment['APPD_CONF_DIR']
raise "AppDynamics configuration source dir #{app_conf_dir} does not exist" unless Dir.exist?(app_conf_dir)
@logger.info { "Copy override configuration files from #{app_conf_dir}" }
CONFIG_FILES.each do |conf_file|
conf_file_path = app_conf_dir + conf_file
next unless File.file?(conf_file_path)
Dir.glob(@droplet.sandbox + 'ver*') do |target_directory|
FileUtils.cp_r conf_file_path, target_directory + '/conf/' + conf_file
end
end
end
end
end
end
| 40.70354 | 120 | 0.684205 |
e86f692ad86365a34780a4ba87e7fbcc295b0296 | 602 | class Micropost < ApplicationRecord
belongs_to :user
mount_uploader :picture, PictureUploader
validates :user_id, presence: true
validates :content, presence: true, length: {maximum: Settings.content_micropost.maximum_length}
validate :picture_size
scope :order_by_created_at, ->{order created_at: :desc}
scope :find_user_id, ->(following_ids, id){where "user_id IN (?) OR user_id = ?", following_ids, id}
private
def picture_size
if picture.size > Settings.micropost.picture_size.megabytes
errors.add :picture, I18n.t(".microposts.model.picture_error")
end
end
end
| 33.444444 | 102 | 0.752492 |
33e8ef51c7af2594ce10c31a329eeb7e983d7ef7 | 757 | require 'rubygems'
require 'bundler/setup'
require 'dm-core'
require 'dm-timestamps'
require 'dm-validations'
require 'dm-aggregates'
require 'dm-migrations'
require 'haml'
require 'ostruct'
require 'sinatra' unless defined?(Sinatra)
configure do
SiteConfig = OpenStruct.new(
:title => 'Your Application Name',
:author => 'Your Name',
:url_base => 'http://localhost:4567/'
)
# load models
$LOAD_PATH.unshift("#{File.dirname(__FILE__)}/lib")
Dir.glob("#{File.dirname(__FILE__)}/lib/*.rb") { |lib| require File.basename(lib, '.*') }
DataMapper.setup(:default, (ENV["DATABASE_URL"] || "sqlite3:///#{File.expand_path(File.dirname(__FILE__))}/#{Sinatra::Base.environment}.db"))
end
| 29.115385 | 143 | 0.645971 |
61f375999fb863ad7190d59febabf8b57ada9494 | 1,503 | #
# Be sure to run `pod lib lint TTCardView.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'TTCardView'
s.version = '0.1.1'
s.summary = 'CardView ported to Objective-C'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
This is a port of the CardView UIView subclass from https://github.com/aclissold/CardView
DESC
s.homepage = 'https://github.com/dhiraj/TTCardView'
s.screenshots = 'https://cloud.githubusercontent.com/assets/43901/16893729/aced4a8e-4b5e-11e6-9082-3f2828d1a3d3.png'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.author = { 'Dhiraj Gupta' => '[email protected]' }
s.source = { :git => 'https://github.com/dhiraj/TTCardView.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/dhiraj'
s.ios.deployment_target = '8.0'
s.source_files = 'TTCardView/Classes/**/*'
s.public_header_files = 'TTCardView/Classes/**/*.h'
end
| 44.205882 | 122 | 0.66334 |
ede0ac3c7d9bda7f18533e6bb0ab06811920bd85 | 4,002 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Auxiliary
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::Scanner
def initialize
super(
'Name' => 'TP-Link Wireless Lite N Access Point Directory Traversal Vulnerability',
'Description' => %q{
This module tests whether a directory traversal vulnerability is present in
versions of TP-Link Access Point 3.12.16 Build 120228 Rel.37317n.
},
'References' =>
[
[ 'CVE', '2012-5687' ],
[ 'OSVDB', '86881' ],
[ 'BID', '57969' ],
[ 'EDB', '24504' ],
[ 'URL', 'http://www.s3cur1ty.de/m1adv2013-011' ]
],
'Author' => [ 'Michael Messner <devnull[at]s3cur1ty.de>' ],
'License' => MSF_LICENSE
)
register_options(
[
OptPath.new('SENSITIVE_FILES', [ true, "File containing senstive files, one per line",
File.join(Msf::Config.data_directory, "wordlists", "sensitive_files.txt") ]),
])
end
def extract_words(wordfile)
return [] unless wordfile && File.readable?(wordfile)
begin
words = File.open(wordfile, "rb") do |f|
f.read
end
rescue
return []
end
save_array = words.split(/\r?\n/)
return save_array
end
def find_files(file)
traversal = '/../..'
res = send_request_cgi(
{
'method' => 'GET',
'uri' => '/help' << traversal << file,
})
return if res.nil?
return if (res.headers['Server'].nil? or res.headers['Server'] !~ /TP-LINK Router/)
return if (res.code == 404)
return if (res.code == 501)
if (res and res.code == 200 and res.body !~ /\<\/HTML/)
out = false
print_good("#{rhost}:#{rport} - Request may have succeeded on file #{file}")
report_web_vuln({
:host => rhost,
:port => rport,
:vhost => datastore['VHOST'],
:path => "/",
:pname => normalize_uri(traversal, file),
:risk => 3,
:proof => normalize_uri(traversal, file),
:name => self.fullname,
:category => "web",
:method => "GET"
})
loot = store_loot("tplink.traversal.data","text/plain",rhost, res.body,file)
vprint_good("#{rhost}:#{rport} - File #{file} downloaded to: #{loot}")
if datastore['VERBOSE']
vprint_good("#{rhost}:#{rport} - Response - File #{file}:")
res.body.each_line do |line|
# the following is the last line of the useless response
if line.to_s =~ /\/\/--><\/SCRIPT>/
# setting out = true to print all of the following stuff
out = true
next
end
if out == true
if line =~ /<META/ or line =~ /<Script/
# we are finished :)
# the next line is typical code from the website and nothing from us
# this means we can skip this stuff ...
out = false
next
else
#it is our output *h00ray*
#output our stuff ...
print_line("#{line}")
end
end
end
out = false
end
elsif res && res.code
vprint_error("#{rhost}:#{rport} - File->#{file} not found")
end
end
def run_host(ip)
begin
vprint_status("#{rhost}:#{rport} - Fingerprinting...")
res = send_request_cgi(
{
'method' => 'GET',
'uri' => '/',
})
return if (res.headers['Server'].nil? or res.headers['Server'] !~ /TP-LINK Router/)
rescue ::Rex::ConnectionError
vprint_error("#{rhost}:#{rport} - Failed to connect to the web server")
return
end
extract_words(datastore['SENSITIVE_FILES']).each do |files|
find_files(files) unless files.empty?
end
end
end
| 29.211679 | 96 | 0.53973 |
18d0cb135e0e0f07db93d000f01f8d427c223f54 | 3,924 | require 'json'
module Agents
class GoogleCalendarPublishAgent < Agent
cannot_be_scheduled!
no_bulk_receive!
gem_dependency_check { defined?(Google) && defined?(Google::APIClient) }
description <<-MD
The Google Calendar Publish Agent creates events on your Google Calendar.
#{'## Include `google-api-client` in your Gemfile to use this Agent!' if dependencies_missing?}
This agent relies on service accounts, rather than oauth.
Setup:
1. Visit [the google api console](https://code.google.com/apis/console/b/0/)
2. New project -> Huginn
3. APIs & Auth -> Enable google calendar
4. Credentials -> Create new Client ID -> Service Account
5. Persist the generated private key to a path, ie: `/home/huginn/a822ccdefac89fac6330f95039c492dfa3ce6843.p12`
6. Grant access via google calendar UI to the service account email address for each calendar you wish to manage. For a whole google apps domain, you can [delegate authority](https://developers.google.com/+/domains/authentication/delegation)
Agent Configuration:
`calendar_id` - The id the calendar you want to publish to. Typically your google account email address. Liquid formatting (e.g. `{{ cal_id }}`) is allowed here in order to extract the calendar_id from the incoming event.
`google` A hash of configuration options for the agent.
`google` `service_account_email` - The authorised service account.
`google` `key_file` OR `google` `key` - The path to the key file or the key itself. Liquid formatting is supported if you want to use a Credential. (E.g., `{% credential google_key %}`)
`google` `key_secret` - The secret for the key, typically 'notasecret'
Set `expected_update_period_in_days` to the maximum amount of time that you'd expect to pass between Events being created by this Agent.
Use it with a trigger agent to shape your payload!
A hash of event details. See the [Google Calendar API docs](https://developers.google.com/google-apps/calendar/v3/reference/events/insert)
Example payload for trigger agent:
<pre><code>{
"message": {
"visibility": "default",
"summary": "Awesome event",
"description": "An example event with text. Pro tip: DateTimes are in RFC3339",
"start": {
"dateTime": "2014-10-02T10:00:00-05:00"
},
"end": {
"dateTime": "2014-10-02T11:00:00-05:00"
}
}
}</code></pre>
MD
event_description <<-MD
{
'success' => true,
'published_calendar_event' => {
....
},
'agent_id' => 1234,
'event_id' => 3432
}
MD
def validate_options
errors.add(:base, "expected_update_period_in_days is required") unless options['expected_update_period_in_days'].present?
end
def working?
event_created_within?(options['expected_update_period_in_days']) && most_recent_event && most_recent_event.payload['success'] == true && !recent_error_logs?
end
def default_options
{
'expected_update_period_in_days' => "10",
'calendar_id' => '[email protected]',
'google' => {
'key_file' => '/path/to/private.key',
'key_secret' => 'notasecret',
'service_account_email' => ''
}
}
end
def receive(incoming_events)
incoming_events.each do |event|
calendar = GoogleCalendar.new(interpolate_options(options, event), Rails.logger)
calendar_event = JSON.parse(calendar.publish_as(interpolated(event)['calendar_id'], event.payload["message"]).response.body)
create_event :payload => {
'success' => true,
'published_calendar_event' => calendar_event,
'agent_id' => event.agent_id,
'event_id' => event.id
}
end
end
end
end
| 35.672727 | 247 | 0.649083 |
2169ec95c41e278d5a9efdceaeb262150c5b59c8 | 561 | # frozen_string_literal: true
require 'simplecov'
SimpleCov.start
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# Require this file using `require "spec_helper.rb"` to ensure that it is only
# loaded once.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require 'lograge'
require 'action_pack'
require 'support/examples'
RSpec.configure do |config|
config.run_all_when_everything_filtered = true
config.filter_run :focus
end
| 28.05 | 78 | 0.773619 |
015a6c7640e335a004246229ac4a22624a1691ce | 2,971 | server ENV["CAPISTRANO_SERVER"],
port: ENV["CAPISTRANO_PORT"],
roles: [:web, :app, :db],
primary: true
set :repo_url, ENV["CAPISTRANO_REPO_URL"]
set :application, ENV["CAPISTRANO_APPLICATION"]
set :user, ENV["CAPISTRANO_USER"]
set :puma_threads, [4, 16]
set :puma_workers, 0
set :pty, true
set :use_sudo, false
set :stage, :production
set :deploy_via, :remote_cache
set :deploy_to, "/home/#{fetch(:user)}/apps/#{fetch(:application)}"
set :puma_bind, "unix://#{shared_path}/tmp/sockets/#{fetch(:application)}-puma.sock"
set :puma_state, "#{shared_path}/tmp/pids/puma.state"
set :puma_pid, "#{shared_path}/tmp/pids/puma.pid"
set :puma_access_log, "#{release_path}/log/puma.error.log"
set :puma_error_log, "#{release_path}/log/puma.access.log"
set :ssh_options, forward_agent: true, user: fetch(:user), keys: %w(~/.ssh/id_rsa.pub)
set :puma_preload_app, true
set :puma_worker_timeout, nil
set :puma_init_active_record, true
set :tests, []
# For capistrano-db-tasks
set :assets_dir, %w(public/assets)
set :local_assets_dir, %w(public/assets)
set :disallow_pushing, true
set :linked_dirs, %w(
log tmp/pids tmp/cache
tmp/sockets vendor/bundle public/system
public/cards solr
)
namespace :puma do
desc "Create directories for Puma pids and socket"
task :make_dirs do
on roles(:app) do
execute "mkdir #{shared_path}/tmp/sockets -p"
execute "mkdir #{shared_path}/tmp/pids -p"
end
end
before :start, :make_dirs
end
namespace :deploy do
desc "Make sure local git is in sync with remote."
task :check_revision do
on roles(:app) do
unless `git rev-parse HEAD` == `git rev-parse origin/master`
puts "WARNING: HEAD is not the same as origin/master"
puts "Run `git push` to sync changes."
exit
end
end
end
desc "Initial deploy"
task :initial do
on roles(:app) do
before "deploy:restart", "puma:start"
invoke "deploy"
end
end
desc "Restart application"
task :restart do
on roles(:app), in: :sequence, wait: 5 do
invoke "puma:restart"
end
end
before :starting, :check_revision
after :finishing, :compile_assets
after :finishing, :cleanup
after :finishing, :restart
end
namespace :bower do
desc "Install Bower"
task :install do
on roles(:web) do
within release_path do
with rails_env: fetch(:rails_env) do
execute :rake, "bower:install CI=true"
end
end
end
end
desc "Copy bower_components"
task :copy_from_current_release do
on roles(:web) do
within release_path do
execute :cp, "-r #{current_path}/vendor/assets/bower_components/ \
#{release_path}/vendor/assets/"
end
end
end
end
if ENV["COPY_BOWER"]
before "deploy:compile_assets", "bower:copy_from_current_release"
else
before "deploy:compile_assets", "bower:install"
end
| 26.292035 | 90 | 0.664086 |
1134d70550c1fbd90104f3c1107b69a680421829 | 2,008 | ################################################################################
#
# Copyright (C) 2006 Peter J Jones ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
################################################################################
class PDF::Reader
################################################################################
# An internal PDF::Reader class that represents a single token from a PDF file.
#
# Behaves exactly like a Ruby String - it basically exists for convenience.
class Token < String
################################################################################
# Creates a new token with the specified value
def initialize (val)
super
end
################################################################################
end
################################################################################
end
################################################################################
| 47.809524 | 84 | 0.539841 |
e2134d97bceb9f290b526a062507e676f165c13a | 1,221 | # frozen_string_literal: true
describe Blacklight::SuggestHelperBehavior do
before do
allow(helper).to receive(:blacklight_config).and_return(blacklight_config)
end
describe '#autocomplete_enabled?' do
describe 'with autocomplete config' do
let(:blacklight_config) do
Blacklight::Configuration.new.configure do |config|
config.autocomplete_enabled = true
config.autocomplete_path = 'suggest'
end
end
it 'is enabled' do
expect(helper.autocomplete_enabled?).to be true
end
end
describe 'without disabled config' do
let(:blacklight_config) do
Blacklight::Configuration.new.configure do |config|
config.autocomplete_enabled = false
config.autocomplete_path = 'suggest'
end
end
it 'is disabled' do
expect(helper.autocomplete_enabled?).to be false
end
end
describe 'without path config' do
let(:blacklight_config) do
Blacklight::Configuration.new.configure do |config|
config.autocomplete_enabled = true
end
end
it 'is disabled' do
expect(helper.autocomplete_enabled?).to be false
end
end
end
end | 29.780488 | 78 | 0.665029 |
1db4c00738c9d4e799a04bf0a015ebbbe69e37a8 | 629 | $:.push File.join(File.dirname(__FILE__), 'lib')
require 'view_matchers/version'
Gem::Specification.new do |s|
s.name = 'view-matchers'
s.version = ViewMatchers::VERSION
s.date = Time.now.strftime('%Y-%m-%d')
s.summary = 'RSpec matchers for ascii tables and form fields.'
s.description = 'Expressive RSpec matchers for ascii tables and form fields.'
s.authors = ['Tom König']
s.email = '[email protected]'
s.files = `git ls-files`.split($/)
s.require_paths = ['lib']
s.homepage = 'https://github.com/TomKnig/view-matchers'
s.license = 'MIT'
end
| 37 | 81 | 0.615262 |
1c8e4eaeb291338fff448148962197ee76272c7b | 155 | # frozen_string_literal: true
Factory.define :task do |f|
f.sequence(:description) { |i| "description-#{i}" }
f.association(:user)
f.timestamps
end
| 19.375 | 53 | 0.696774 |
01967c4e94341b0b902e43da8a0d787ca84c6d21 | 273 | class StaticPagesController < ApplicationController
def home
if logged_in?
@micropost = current_user.microposts.build
@feed_items = current_user.feed.paginate(page: params[:page])
end
end
def help
end
def about
end
def contact
end
end
| 14.368421 | 65 | 0.703297 |
18a1aa1cb47320432fcab212fb00b0110a5df720 | 3,068 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'decks routing', :type => :routing do
it 'routes decks endpoint' do
route = '/api/decks'
expect(:get => route).to route_to 'decks#index'
expect(:patch => route).not_to be_routable
expect(:put => route).not_to be_routable
expect(:post => route).to route_to 'decks#create'
expect(:delete => route).not_to be_routable
end
it 'routes deck endpoint' do
route = '/api/decks/foo'
expect(:get => route).to route_to 'decks#show', :id => 'foo'
expect(:patch => route).to route_to 'decks#update', :id => 'foo'
expect(:put => route).to route_to 'decks#update', :id => 'foo'
expect(:post => route).not_to be_routable
expect(:delete => route).to route_to 'decks#destroy', :id => 'foo'
end
it 'routes deck owner relationship endpoint' do
route = '/api/decks/foo/relationships/owner'
params = { :deck_id => 'foo', :relationship => 'owner' }
expect(:get => '/api/decks/foo/owner').to route_to 'users#get_related_resource', params.merge(:source => 'decks')
expect(:get => route).to route_to 'decks#show_relationship', params
expect(:patch => route).not_to be_routable
expect(:put => route).not_to be_routable
expect(:post => route).not_to be_routable
expect(:delete => route).not_to be_routable
end
it 'routes deck collaborators relationship endpoint' do
route = '/api/decks/foo/relationships/collaborators'
params = { :deck_id => 'foo', :relationship => 'collaborators' }
expect(:get => '/api/decks/foo/collaborators').to route_to 'users#get_related_resources', params.merge(:source => 'decks')
expect(:get => route).to route_to 'decks#show_relationship', params
expect(:patch => route).not_to be_routable
expect(:put => route).not_to be_routable
expect(:post => route).not_to be_routable
expect(:delete => route).not_to be_routable
end
it 'routes deck assets relationship endpoint' do
route = '/api/decks/foo/relationships/assets'
params = { :deck_id => 'foo', :relationship => 'assets' }
expect(:get => '/api/decks/foo/assets').to route_to 'assets#get_related_resources', params.merge(:source => 'decks')
expect(:get => route).to route_to 'decks#show_relationship', params
expect(:patch => route).not_to be_routable
expect(:put => route).not_to be_routable
expect(:post => route).not_to be_routable
expect(:delete => route).not_to be_routable
end
it 'routes deck conversations relationship endpoint' do
route = '/api/decks/foo/relationships/conversations'
params = { :deck_id => 'foo', :relationship => 'conversations' }
expect(:get => '/api/decks/foo/conversations').to route_to 'conversations#get_related_resources', params.merge(:source => 'decks')
expect(:get => route).to route_to 'decks#show_relationship', params
expect(:patch => route).not_to be_routable
expect(:put => route).not_to be_routable
expect(:post => route).not_to be_routable
expect(:delete => route).not_to be_routable
end
end
| 39.333333 | 134 | 0.684811 |
6201881c92eb14e888a0d8d9fccaa6e10232c34d | 2,690 | # frozen_string_literal: true
require "ipaddr"
module Doorkeeper
module IPAddrLoopback
def loopback?
case @family
when Socket::AF_INET
@addr & 0xff000000 == 0x7f000000
when Socket::AF_INET6
@addr == 1
else
raise AddressFamilyError, "unsupported address family"
end
end
end
# For backward compatibility with old rubies
if Gem::Version.new(RUBY_VERSION) < Gem::Version.new("2.5.0")
IPAddr.include Doorkeeper::IPAddrLoopback
end
module OAuth
module Helpers
module URIChecker
def self.valid?(url)
return true if oob_uri?(url)
uri = as_uri(url)
valid_scheme?(uri) && iff_host?(uri) && uri.fragment.nil? && uri.opaque.nil?
rescue URI::InvalidURIError
false
end
def self.matches?(url, client_url)
url = as_uri(url)
client_url = as_uri(client_url)
unless client_url.query.nil?
return false unless query_matches?(url.query, client_url.query)
# Clear out queries so rest of URI can be tested. This allows query
# params to be in the request but order not mattering.
client_url.query = nil
end
# RFC8252, Paragraph 7.3
# @see https://tools.ietf.org/html/rfc8252#section-7.3
if loopback_uri?(url) && loopback_uri?(client_url)
url.port = nil
client_url.port = nil
end
url.query = nil
url == client_url
end
def self.loopback_uri?(uri)
IPAddr.new(uri.host).loopback?
rescue IPAddr::Error
false
end
def self.valid_for_authorization?(url, client_url)
valid?(url) && client_url.split.any? { |other_url| matches?(url, other_url) }
end
def self.as_uri(url)
URI.parse(url)
end
def self.query_matches?(query, client_query)
return true if client_query.blank? && query.blank?
return false if client_query.nil? || query.nil?
# Will return true independent of query order
client_query.split("&").sort == query.split("&").sort
end
def self.valid_scheme?(uri)
return false if uri.scheme.nil?
%w[localhost].include?(uri.scheme) == false
end
def self.hypertext_scheme?(uri)
%w[http https].include?(uri.scheme)
end
def self.iff_host?(uri)
!(hypertext_scheme?(uri) && uri.host.nil?)
end
def self.oob_uri?(uri)
NonStandard::IETF_WG_OAUTH2_OOB_METHODS.include?(uri)
end
end
end
end
end
| 26.372549 | 87 | 0.585874 |
e949834d6cdb8e917081dad91abb2d5fe8045f7b | 1,126 | # -*- encoding: utf-8 -*-
$:.push File.expand_path('../lib', __FILE__)
Gem::Specification.new do |s|
s.name = 'fluent-plugin-elasticsearch'
s.version = '1.7.0'
s.authors = ['diogo', 'pitr']
s.email = ['[email protected]', '[email protected]']
s.description = %q{ElasticSearch output plugin for Fluent event collector}
s.summary = s.description
s.homepage = 'https://github.com/uken/fluent-plugin-elasticsearch'
s.license = 'MIT'
s.files = `git ls-files`.split($/)
s.executables = s.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.require_paths = ['lib']
s.required_ruby_version = Gem::Requirement.new(">= 2.0".freeze)
s.add_runtime_dependency 'fluentd', '>= 0.10.43'
s.add_runtime_dependency 'excon', '>= 0'
s.add_runtime_dependency 'elasticsearch', '< 1.1'
s.add_development_dependency 'rake', '>= 0'
s.add_development_dependency 'webmock', '~> 1'
s.add_development_dependency 'test-unit', '~> 3.1.0'
s.add_development_dependency 'minitest', '~> 5.8'
end
| 36.322581 | 78 | 0.636767 |
1c3cdce12ce7c36b44fa51d96033abfa6ef282fb | 6,212 | # frozen_string_literal: true
module Epics
class UpdateService < Epics::BaseService
EPIC_DATE_FIELDS = %I[
start_date_fixed
start_date_is_fixed
due_date_fixed
due_date_is_fixed
].freeze
def execute(epic)
reposition_on_board(epic)
# start_date and end_date columns are no longer writable by users because those
# are composite fields managed by the system.
params.extract!(:start_date, :end_date)
update_task_event(epic) || update(epic)
if saved_change_to_epic_dates?(epic)
Epics::UpdateDatesService.new([epic]).execute
track_start_date_fixed_events(epic)
track_due_date_fixed_events(epic)
track_fixed_dates_updated_events(epic)
epic.reset
end
track_changes(epic)
assign_parent_epic_for(epic)
assign_child_epic_for(epic)
epic
end
def handle_changes(epic, options)
old_associations = options.fetch(:old_associations, {})
old_mentioned_users = old_associations.fetch(:mentioned_users, [])
old_labels = old_associations.fetch(:labels, [])
if has_changes?(epic, old_labels: old_labels)
todo_service.resolve_todos_for_target(epic, current_user)
end
todo_service.update_epic(epic, current_user, old_mentioned_users)
if epic.saved_change_to_attribute?(:confidential)
handle_confidentiality_change(epic)
end
end
def handle_confidentiality_change(epic)
if epic.confidential?
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_confidential_action(author: current_user)
# don't enqueue immediately to prevent todos removal in case of a mistake
::TodosDestroyer::ConfidentialEpicWorker.perform_in(::Todo::WAIT_FOR_DELETE, epic.id)
else
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_visible_action(author: current_user)
end
end
def handle_task_changes(epic)
todo_service.resolve_todos_for_target(epic, current_user)
todo_service.update_epic(epic, current_user)
end
private
def track_fixed_dates_updated_events(epic)
fixed_start_date_updated = epic.saved_change_to_attribute?(:start_date_fixed)
fixed_due_date_updated = epic.saved_change_to_attribute?(:due_date_fixed)
return unless fixed_start_date_updated || fixed_due_date_updated
if fixed_start_date_updated
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_fixed_start_date_updated_action(author: current_user)
end
if fixed_due_date_updated
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_fixed_due_date_updated_action(author: current_user)
end
end
def track_start_date_fixed_events(epic)
return unless epic.saved_change_to_attribute?(:start_date_is_fixed)
if epic.start_date_is_fixed?
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_start_date_set_as_fixed_action(author: current_user)
else
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_start_date_set_as_inherited_action(author: current_user)
end
end
def track_due_date_fixed_events(epic)
return unless epic.saved_change_to_attribute?(:due_date_is_fixed)
if epic.due_date_is_fixed?
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_due_date_set_as_fixed_action(author: current_user)
else
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_due_date_set_as_inherited_action(author: current_user)
end
end
def reposition_on_board(epic)
return unless params[:move_between_ids]
return unless epic_board_id
fill_missing_positions_before
epic_board_position = issuable_for_positioning(epic.id, epic_board_id, create_missing: true)
handle_move_between_ids(epic_board_position)
epic_board_position.save!
end
# we want to create missing only for the epic being moved
# other records are handled by PositionCreateService
def issuable_for_positioning(id, board_id, create_missing: false)
return unless id
position = Boards::EpicBoardPosition.find_by_epic_id_and_epic_board_id(id, board_id)
return position if position
Boards::EpicBoardPosition.create!(epic_id: id, epic_board_id: board_id) if create_missing
end
def fill_missing_positions_before
before_id = params[:move_between_ids].compact.max
list_id = params.delete(:list_id)
board_group = params.delete(:board_group)
return unless before_id
# if position for the epic above exists we don't need to create positioning records
return if issuable_for_positioning(before_id, epic_board_id)
service_params = {
board_id: epic_board_id,
list_id: list_id, # we need to have positions only for the current list
from_id: before_id # we need to have positions only for the epics above
}
Boards::Epics::PositionCreateService.new(board_group, current_user, service_params).execute
end
def epic_board_id
params[positioning_scope_key]
end
def positioning_scope_key
:board_id
end
def saved_change_to_epic_dates?(epic)
(epic.saved_changes.keys.map(&:to_sym) & EPIC_DATE_FIELDS).present?
end
def track_changes(epic)
if epic.saved_change_to_attribute?(:title)
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_title_changed_action(author: current_user)
end
if epic.saved_change_to_attribute?(:description)
::Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_description_changed_action(author: current_user)
track_task_changes(epic)
end
end
def track_task_changes(epic)
return if epic.updated_tasks.blank?
epic.updated_tasks.each do |task|
if task.complete?
Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_task_checked(author: current_user)
else
Gitlab::UsageDataCounters::EpicActivityUniqueCounter.track_epic_task_unchecked(author: current_user)
end
end
end
end
end
| 33.76087 | 130 | 0.741951 |
b99d8ad995feb83436c881470c575a298185a2af | 3,943 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "trase_new_api_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.505263 | 102 | 0.758559 |
ab2849cd38c600b2330dd42384d727ac3121eab6 | 2,413 | #
# Copyright 2008-2017, Chef Software, Inc.
# Copyright 2017, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'poise_build_essential/build_essential_providers/base'
module PoiseBuildEssential
module BuildEssentialProviders
# A provider for `poise_build_essential` to install on macOS platforms.
#
# @see PoiseBuildEssential::Resources::PoiseBuildEssential::Resource
# @provides poise_build_essential
class MacOSX < Base
provides(:poise_build_essential, platform_family: 'mac_os_x')
private
# (see Base#install_build_essential)
def install_build_essential
# This script was graciously borrowed and modified from Tim Sutton's
# osx-vm-templates at https://github.com/timsutton/osx-vm-templates/blob/b001475df54a9808d3d56d06e71b8fa3001fff42/scripts/xcode-cli-tools.sh
execute 'install XCode Command Line tools' do
command <<-EOH
# create the placeholder file that's checked by CLI updates' .dist code
# in Apple's SUS catalog
touch /tmp/.com.apple.dt.CommandLineTools.installondemand.in-progress
# find the CLI Tools update
PROD=$(softwareupdate -l | grep "\*.*Command Line" | head -n 1 | awk -F"*" '{print $2}' | sed -e 's/^ *//' | tr -d '\n')
# install it
softwareupdate -i "$PROD" --verbose
# Remove the placeholder to prevent perpetual appearance in the update utility
rm -f /tmp/.com.apple.dt.CommandLineTools.installondemand.in-progress
EOH
not_if 'pkgutil --pkgs=com.apple.pkg.CLTools_Executables'
end
end
# (see Base#upgrade_build_essential)
def upgrade_build_essential
# Make upgrade the same as install on Mac.
install_build_essential
end
# (see Base#remove_build_essential)
def remove_build_essential
# Not sure how to do this, ignoring for now.
raise NotImplementedError
end
end
end
end
| 36.014925 | 148 | 0.723581 |
e8cb4ec81212613b4ac09f10c94396522dd90216 | 90 | # frozen_string_literal: true
Administrate::Engine.add_javascript 'admin/application.js'
| 22.5 | 58 | 0.833333 |
e9d80b35440a5c06d868169ba7e5212f72f91869 | 985 | class IdentitiesController < ApplicationController
before_filter :auth_anybody!, only: :new
before_action :check_accounts_limit
def new
@identity = env['omniauth.identity'] || Identity.new
end
def edit
@identity = current_user.identity
end
def update
@identity = current_user.identity
unless @identity.authenticate(params[:identity][:old_password])
redirect_to edit_identity_path, alert: t('.auth-error') and return
end
if @identity.authenticate(params[:identity][:password])
redirect_to edit_identity_path, alert: t('.auth-same') and return
end
if @identity.update_attributes(identity_params)
current_user.send_password_changed_notification
clear_all_sessions current_user.id
reset_session
redirect_to signin_path, notice: t('.notice')
else
render :edit
end
end
private
def identity_params
params.required(:identity).permit(:password, :password_confirmation)
end
end
| 24.02439 | 72 | 0.726904 |
ed6c7d40b202285e9ee727fe6b778a38fbfd9200 | 3,353 | ## Copied from Rack 1.4.1 on 19 June 2012.
## see http://github.com/rack/rack/blob/1.4.1/lib/rack/urlmap.rb
# Copyright (c) 2007, 2008, 2009, 2010 Christian Neukirchen <purl.org/net/chneukirchen>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
module Rack
# Rack::URLMap takes a hash mapping urls or paths to apps, and
# dispatches accordingly. Support for HTTP/1.1 host names exists if
# the URLs start with <tt>http://</tt> or <tt>https://</tt>.
#
# URLMap modifies the SCRIPT_NAME and PATH_INFO such that the part
# relevant for dispatch is in the SCRIPT_NAME, and the rest in the
# PATH_INFO. This should be taken care of when you need to
# reconstruct the URL in order to create links.
#
# URLMap dispatches in such a way that the longest paths are tried
# first, since they are most specific.
class URLMap
NEGATIVE_INFINITY = -1.0 / 0.0
def initialize(map = {})
remap(map)
end
def remap(map)
@mapping = map.map { |location, app|
if location =~ %r{\Ahttps?://(.*?)(/.*)}
host, location = $1, $2
else
host = nil
end
unless location[0] == ?/
raise ArgumentError, "paths need to start with /"
end
location = location.chomp('/')
match = Regexp.new("^#{Regexp.quote(location).gsub('/', '/+')}(.*)", nil, 'n')
[host, location, match, app]
}.sort_by do |(host, location, _, _)|
[host ? -host.size : NEGATIVE_INFINITY, -location.size]
end
end
def call(env)
path = env["PATH_INFO"]
script_name = env['SCRIPT_NAME']
hHost = env['HTTP_HOST']
sName = env['SERVER_NAME']
sPort = env['SERVER_PORT']
@mapping.each do |host, location, match, app|
unless hHost == host \
|| sName == host \
|| (!host && (hHost == sName || hHost == sName+':'+sPort))
next
end
next unless m = match.match(path.to_s)
rest = m[1]
next unless !rest || rest.empty? || rest[0] == ?/
env['SCRIPT_NAME'] = (script_name + location)
env['PATH_INFO'] = rest
return app.call(env)
end
[404, {"Content-Type" => "text/plain", "X-Cascade" => "pass"}, ["Not Found: #{path}"]]
ensure
env['PATH_INFO'] = path
env['SCRIPT_NAME'] = script_name
end
end
end
| 34.56701 | 92 | 0.640322 |
18212265d9e47b7b27937506756d649b74518fcc | 1,040 | class Modulo < Base
Arbol.add_mapped_class(
'modulo',
Modulo,
%{void modulo(long op1[3], long op2[3], long out[3]) {
out[0] = op1[0] % op2[0];
out[1] = op1[1] % op2[1];
out[2] = op1[2] % op2[2];
}}
)
attr_accessor :op1
attr_accessor :op2
def param_keys
[:op1, :op2]
end
def arduino_code
unless @frame_optimized
[
"modulo(#{@op1.name}, #{@op2.name}, #{@name});"
]
else
[]
end
end
def cycle_level_arduino_code
if @frame_optimized
[
"modulo(#{@op1.name}, #{@op2.name}, #{@name});"
]
else
[]
end
end
def top_level_scope_code
[
"long #{@name}[3];"
]
end
end
module Arbol
class Documentation
def mod
%{--
### mod(operator1, operator2)
* **operator1**
* **operator2**
Modulo of the two operators. Can also be used with the form `operator1 % operator2`.
}
end
end
end
def mod(op1, op2)
h = ArbolHash.new
h[:type] = 'modulo'
h[:op1] = resolve(op1)
h[:op2] = resolve(op2)
h
end | 14.857143 | 84 | 0.551923 |
6a5381dc6178c7e6afc558b5157c078bf6e9ab97 | 871 | require 'sinatra'
get '/' do
<<-EOHTML
<html>
<body>
<a href='/dom/#/param/some-name'>DOM link</a>
</body>
</html>
EOHTML
end
get '/inputtable' do
<<-EOHTML
<html>
<body>
<a href='/dom/#/input1/value1/input2/value2'>DOM link</a>
</body>
</html>
EOHTML
end
get '/dom/' do
<<-EOHTML
<html>
<script>
function getQueryVariable(variable) {
var splits = window.location.hash.split('/');
return decodeURI( splits[splits.indexOf( variable ) + 1] );
}
</script>
<body>
<div id="container">
</div>
<script>
document.getElementById('container').innerHTML = getQueryVariable('param');
</script>
</body>
</html>
EOHTML
end
| 19.795455 | 91 | 0.474168 |
7928090282a3ec296a88ea2132237156f0705957 | 97 | # encoding: utf-8
module HashStringsUTF8
def self.literal_hash
{"foo" => "bar"}
end
end
| 12.125 | 23 | 0.659794 |
08ac53c7fa7ce1dd966378c6ca34baee6bb5a974 | 641 | # -*- encoding: utf-8 -*-
require './lib/ember/data/version'
Gem::Specification.new do |gem|
gem.name = "ember-data-source"
gem.authors = ["Yehuda Katz"]
gem.email = ["[email protected]"]
gem.date = Time.now.strftime("%Y-%m-%d")
gem.summary = %q{ember-data source code wrapper.}
gem.description = %q{ember-data source code wrapper for use with Ruby libs.}
gem.homepage = "https://github.com/emberjs/data"
gem.version = Ember::Data::VERSION.sub('-', '.')
gem.add_dependency "ember-source"
gem.files = %w(VERSION) + Dir['dist/ember-data*.js', 'lib/ember/data/*.rb']
end
| 35.611111 | 80 | 0.609984 |
39a41fe6ef1884e13222d4cdcbefbabbb9601283 | 590 | Pod::Spec.new do |s|
s.name = 'TTTAttributedLabel'
s.version = '1.5.0'
s.authors = {'Mattt Thompson' => '[email protected]'}
s.homepage = 'https://github.com/mattt/TTTAttributedLabel/'
s.summary = 'A drop-in replacement for UILabel that supports attributes, data detectors, links, and more.'
s.source = {:git => 'https://github.com/mattt/TTTAttributedLabel.git', :tag => '1.5.0'}
s.license = 'MIT'
s.platform = :ios
s.requires_arc = true
s.compiler_flags = '-Wno-arc-bridge-casts-disallowed-in-nonarc'
s.frameworks = 'CoreText'
s.source_files = 'TTTAttributedLabel'
end
| 36.875 | 108 | 0.684746 |
6aa834daa38a074dbf770f25525d6eb928ba92ac | 304 | class CreateNodes < ActiveRecord::Migration[5.2]
def change
create_table :nodes do |t|
t.integer :row
t.integer :col
t.belongs_to :maze
t.integer :north_neighbor
t.integer :east_neighbor
t.integer :south_neighbor
t.integer :west_neighbor
end
end
end
| 21.714286 | 48 | 0.657895 |
ab94fd8d0945ed5622e86da6c9da98908f7033af | 2,098 | class Xml2 < Formula
desc "Makes XML and HTML more amenable to classic UNIX text tools"
homepage "https://web.archive.org/web/20160730094113/www.ofb.net/~egnor/xml2/"
url "https://web.archive.org/web/20160427221603/download.ofb.net/gale/xml2-0.5.tar.gz"
sha256 "e3203a5d3e5d4c634374e229acdbbe03fea41e8ccdef6a594a3ea50a50d29705"
license "GPL-2.0"
livecheck do
skip "Upstream is gone and the formula uses archive.org URLs"
end
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_monterey: "2a8bc2144c34a087bd0ec424f1079c0813e3de5e069e1cf23fa5c904125d42be"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "23f1ef27cd811f9b846f80775e4f0981998a7cad1230b0f98261ba42dc85c325"
sha256 cellar: :any_skip_relocation, monterey: "52c859eeda679620c5b02db82a7a7d1353c53bd43fae6e53c9028b99afe48b1d"
sha256 cellar: :any_skip_relocation, big_sur: "c6e91ba5879e8891be5aca28eba77249f18c8860d2d387447da0ca13efbe066c"
sha256 cellar: :any_skip_relocation, catalina: "832aa209cf47c4f18ad512f7eca2acf76aa047522b3a417466722203203bd71e"
sha256 cellar: :any_skip_relocation, mojave: "63b136beee1c47726c6756f3c57bf55fcff4e660cd280d090aa35640138465b6"
sha256 cellar: :any_skip_relocation, high_sierra: "548421fe00487faa136c700e4d18f48b6bc349956044e2aa0f65667c3856883d"
sha256 cellar: :any_skip_relocation, sierra: "d8d4bb9ceb9d97b648d3fd3cffb1e2fad2e4d82aa6aa3397c22f53fe5468ac56"
sha256 cellar: :any_skip_relocation, el_capitan: "85e939873edbb3dd1b072437992a0c404534a5084cccd6f9f76d99b09ddda695"
sha256 cellar: :any_skip_relocation, yosemite: "3883d5997021b3a5bd57d8830906cb9b370da0f6e1927b6c7e9dcd6740e05c5c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "51048810fc19ddd5955049fb8d358b0f4862e9333b056efd6d28a0e6c42c6e4e"
end
depends_on "pkg-config" => :build
uses_from_macos "libxml2"
def install
system "./configure", "--prefix=#{prefix}"
system "make", "install"
end
test do
assert_equal "/test", pipe_output("#{bin}/xml2", "<test/>", 0).chomp
end
end
| 52.45 | 123 | 0.795043 |
7a6aee034266c964b693dc6087b1b64c6aed59b0 | 150 | FactoryBot.define do
factory :user do
name { 'kalu' }
password { '123456' }
sequence(:email) { |i| "my-email-#{i}@mail.com" }
end
end
| 18.75 | 53 | 0.586667 |
4a169cf5f0682ebbf421e9473e00b0f1daa6cc23 | 11,400 | #
# Author:: Adam Jacob (<[email protected]>)
# Copyright:: Copyright (c) 2008, 2009 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'rubygems'
require 'json'
require 'chef'
require 'chef/role'
require 'chef/cookbook/metadata'
require 'tempfile'
require 'rake'
# Allow REMOTE options to be overridden on the command line
REMOTE_HOST = ENV["REMOTE_HOST"] if ENV["REMOTE_HOST"] != nil
REMOTE_SUDO = ENV["REMOTE_SUDO"] if ENV["REMOTE_SUDO"] != nil
if defined? REMOTE_HOST
REMOTE_PATH_PREFIX = "#{REMOTE_HOST}:"
REMOTE_EXEC_PREFIX = "ssh #{REMOTE_HOST}"
REMOTE_EXEC_PREFIX += " sudo" if defined? REMOTE_SUDO
LOCAL_EXEC_PREFIX = ""
else
REMOTE_PATH_PREFIX = ""
REMOTE_EXEC_PREFIX = ""
LOCAL_EXEC_PREFIX = "sudo"
end
desc "Update your repository from source control"
task :update do
puts "** Updating your repository"
case $vcs
when :svn
sh %{svn up}
when :git
pull = false
IO.foreach(File.join(TOPDIR, ".git", "config")) do |line|
pull = true if line =~ /\[remote "origin"\]/
end
if pull
sh %{git pull}
else
puts "* Skipping git pull, no origin specified"
end
else
puts "* No SCM configured, skipping update"
end
end
desc "Test your cookbooks for syntax errors"
task :test_recipes do
puts "** Testing your cookbooks for syntax errors"
if File.exists?(TEST_CACHE)
cache = JSON.load(open(TEST_CACHE).read)
trap("INT") { puts "INT received, flushing test cache"; write_cache(cache) }
else
cache = {}
end
recipes = ["*cookbooks"].map { |folder|
Dir[File.join(TOPDIR, folder, "**", "*.rb")]
}.flatten
recipes.each do |recipe|
print "Testing recipe #{recipe}: "
recipe_mtime = File.stat(recipe).mtime.to_s
if cache.has_key?(recipe)
if cache[recipe]["mtime"] == recipe_mtime
puts "No modification since last test."
next
end
else
cache[recipe] = {}
end
sh %{ruby -c #{recipe}} do |ok, res|
if ok
cache[recipe]["mtime"] = recipe_mtime
else
write_cache(cache)
raise "Syntax error in #{recipe}"
end
end
end
write_cache(cache)
end
desc "Test your templates for syntax errors"
task :test_templates do
puts "** Testing your cookbooks for syntax errors"
if File.exists?(TEST_CACHE)
cache = JSON.load(open(TEST_CACHE).read)
trap("INT") { puts "INT received, flushing test cache"; write_cache(cache) }
else
cache = {}
end
templates = ["*cookbooks"].map { |folder|
Dir[File.join(TOPDIR, folder, "**", "*.erb")]
}.flatten
templates.each do |template|
print "Testing template #{template}: "
template_mtime = File.stat(template).mtime.to_s
if cache.has_key?(template)
if cache[template]["mtime"] == template_mtime
puts "No change since last test."
next
end
else
cache[template] = {}
end
sh %{erubis -x #{template} | ruby -c} do |ok, res|
if ok
cache[template]["mtime"] = template_mtime
else
write_cache(cache)
raise "Syntax error in #{template}"
end
end
end
write_cache(cache)
end
desc "Test your cookbooks for syntax errors"
task :test => [ :test_recipes , :test_templates ]
def write_cache(cache)
File.open(TEST_CACHE, "w") { |f| JSON.dump(cache, f) }
end
desc "Install the latest copy of the repository on this Chef Server"
task :install => [ :update, :test, :metadata, :roles ] do
puts "** Installing your cookbooks"
directories = [
COOKBOOK_PATH,
SITE_COOKBOOK_PATH,
CHEF_CONFIG_PATH
]
puts "* Creating Directories"
directories.each do |dir|
sh "#{LOCAL_EXEC_PREFIX} #{REMOTE_EXEC_PREFIX} mkdir -p #{dir}"
sh "#{LOCAL_EXEC_PREFIX} #{REMOTE_EXEC_PREFIX} chown root #{dir}"
end
puts "* Installing new Cookbooks"
sh "#{LOCAL_EXEC_PREFIX} rsync -rlt --delete --exclude '.svn' --exclude '.git*' cookbooks/ #{REMOTE_PATH_PREFIX}#{COOKBOOK_PATH}"
puts "* Installing new Site Cookbooks"
sh "#{LOCAL_EXEC_PREFIX} rsync -rlt --delete --exclude '.svn' --exclude '.git*' site-cookbooks/ #{REMOTE_PATH_PREFIX}#{SITE_COOKBOOK_PATH}"
puts "* Installing new Node Roles"
sh "#{LOCAL_EXEC_PREFIX} rsync -rlt --delete --exclude '.svn' --exclude '.git*' roles/ #{REMOTE_PATH_PREFIX}#{ROLE_PATH}"
if File.exists?(File.join(TOPDIR, "config", "server.rb"))
puts "* Installing new Chef Server Config"
sh "#{LOCAL_EXEC_PREFIX} rsync -rlt --delete --exclude '.svn' --exclude '.git*' config/server.rb #{REMOTE_PATH_PREFIX}#{CHEF_SERVER_CONFIG}"
end
if File.exists?(File.join(TOPDIR, "config", "client.rb"))
puts "* Installing new Chef Client Config"
sh "#{LOCAL_EXEC_PREFIX} rsync -rlt --delete --exclude '.svn' --exclude '.git*' config/client.rb #{REMOTE_PATH_PREFIX}#{CHEF_CLIENT_CONFIG}"
end
end
desc "By default, run rake test"
task :default => [ :test ]
desc "Create a new cookbook (with COOKBOOK=name, optional CB_PREFIX=site-)"
task :new_cookbook do
create_cookbook(File.join(TOPDIR, "#{ENV["CB_PREFIX"]}cookbooks"))
create_readme(File.join(TOPDIR, "#{ENV["CB_PREFIX"]}cookbooks"))
create_metadata(File.join(TOPDIR, "#{ENV["CB_PREFIX"]}cookbooks"))
end
def create_cookbook(dir)
raise "Must provide a COOKBOOK=" unless ENV["COOKBOOK"]
puts "** Creating cookbook #{ENV["COOKBOOK"]}"
sh "mkdir -p #{File.join(dir, ENV["COOKBOOK"], "attributes")}"
sh "mkdir -p #{File.join(dir, ENV["COOKBOOK"], "recipes")}"
sh "mkdir -p #{File.join(dir, ENV["COOKBOOK"], "definitions")}"
sh "mkdir -p #{File.join(dir, ENV["COOKBOOK"], "libraries")}"
sh "mkdir -p #{File.join(dir, ENV["COOKBOOK"], "resources")}"
sh "mkdir -p #{File.join(dir, ENV["COOKBOOK"], "providers")}"
sh "mkdir -p #{File.join(dir, ENV["COOKBOOK"], "files", "default")}"
sh "mkdir -p #{File.join(dir, ENV["COOKBOOK"], "templates", "default")}"
unless File.exists?(File.join(dir, ENV["COOKBOOK"], "recipes", "default.rb"))
open(File.join(dir, ENV["COOKBOOK"], "recipes", "default.rb"), "w") do |file|
file.puts <<-EOH
#
# Cookbook Name:: #{ENV["COOKBOOK"]}
# Recipe:: default
#
# Copyright #{Time.now.year}, #{COMPANY_NAME}
#
EOH
case NEW_COOKBOOK_LICENSE
when :apachev2
file.puts <<-EOH
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
EOH
when :none
file.puts <<-EOH
# All rights reserved - Do Not Redistribute
#
EOH
end
end
end
end
def create_readme(dir)
raise "Must provide a COOKBOOK=" unless ENV["COOKBOOK"]
puts "** Creating README for cookbook: #{ENV["COOKBOOK"]}"
unless File.exists?(File.join(dir, ENV["COOKBOOK"], "README.rdoc"))
open(File.join(dir, ENV["COOKBOOK"], "README.rdoc"), "w") do |file|
file.puts <<-EOH
= DESCRIPTION:
= REQUIREMENTS:
= ATTRIBUTES:
= USAGE:
EOH
end
end
end
def create_metadata(dir)
raise "Must provide a COOKBOOK=" unless ENV["COOKBOOK"]
puts "** Creating metadata for cookbook: #{ENV["COOKBOOK"]}"
case NEW_COOKBOOK_LICENSE
when :apachev2
license = "Apache 2.0"
when :none
license = "All rights reserved"
end
unless File.exists?(File.join(dir, ENV["COOKBOOK"], "metadata.rb"))
open(File.join(dir, ENV["COOKBOOK"], "metadata.rb"), "w") do |file|
if File.exists?(File.join(dir, ENV["COOKBOOK"], 'README.rdoc'))
long_description = "long_description IO.read(File.join(File.dirname(__FILE__), 'README.rdoc'))"
end
file.puts <<-EOH
maintainer "#{COMPANY_NAME}"
maintainer_email "#{SSL_EMAIL_ADDRESS}"
license "#{license}"
description "Installs/Configures #{ENV["COOKBOOK"]}"
#{long_description}
version "0.1"
EOH
end
end
end
desc "Create a new self-signed SSL certificate for FQDN=foo.example.com"
task :ssl_cert do
$expect_verbose = true
fqdn = ENV["FQDN"]
fqdn =~ /^(.+?)\.(.+)$/
hostname = $1
domain = $2
raise "Must provide FQDN!" unless fqdn && hostname && domain
puts "** Creating self signed SSL Certificate for #{fqdn}"
sh("(cd #{CADIR} && openssl genrsa 2048 > #{fqdn}.key)")
sh("(cd #{CADIR} && chmod 644 #{fqdn}.key)")
puts "* Generating Self Signed Certificate Request"
tf = Tempfile.new("#{fqdn}.ssl-conf")
ssl_config = <<EOH
[ req ]
distinguished_name = req_distinguished_name
prompt = no
[ req_distinguished_name ]
C = #{SSL_COUNTRY_NAME}
ST = #{SSL_STATE_NAME}
L = #{SSL_LOCALITY_NAME}
O = #{COMPANY_NAME}
OU = #{SSL_ORGANIZATIONAL_UNIT_NAME}
CN = #{fqdn}
emailAddress = #{SSL_EMAIL_ADDRESS}
EOH
tf.puts(ssl_config)
tf.close
sh("(cd #{CADIR} && openssl req -config '#{tf.path}' -new -x509 -nodes -sha1 -days 3650 -key #{fqdn}.key > #{fqdn}.crt)")
sh("(cd #{CADIR} && openssl x509 -noout -fingerprint -text < #{fqdn}.crt > #{fqdn}.info)")
sh("(cd #{CADIR} && cat #{fqdn}.crt #{fqdn}.key > #{fqdn}.pem)")
sh("(cd #{CADIR} && chmod 644 #{fqdn}.pem)")
end
desc "Build cookbook metadata.json from metadata.rb"
task :metadata do
Chef::Config[:cookbook_path] = [ File.join(TOPDIR, 'cookbooks'), File.join(TOPDIR, 'site-cookbooks') ]
cl = Chef::CookbookLoader.new
cl.each do |cookbook|
if ENV['COOKBOOK']
next unless cookbook.name.to_s == ENV['COOKBOOK']
end
cook_meta = Chef::Cookbook::Metadata.new(cookbook)
Chef::Config.cookbook_path.each do |cdir|
metadata_rb_file = File.join(cdir, cookbook.name.to_s, 'metadata.rb')
metadata_json_file = File.join(cdir, cookbook.name.to_s, 'metadata.json')
if File.exists?(metadata_rb_file)
puts "Generating metadata for #{cookbook.name}"
cook_meta.from_file(metadata_rb_file)
File.open(metadata_json_file, "w") do |f|
f.write(JSON.pretty_generate(cook_meta))
end
end
end
end
end
desc "Build roles from roles/role_name.json from role_name.rb"
task :roles do
Chef::Config[:role_path] = File.join(TOPDIR, 'roles')
Dir[File.join(TOPDIR, 'roles', '**', '*.rb')].each do |role_file|
short_name = File.basename(role_file, '.rb')
puts "Generating role JSON for #{short_name}"
role = Chef::Role.new
role.name(short_name)
role.from_file(role_file)
File.open(File.join(TOPDIR, 'roles', "#{short_name}.json"), "w") do |f|
f.write(JSON.pretty_generate(role))
end
end
end
| 31.578947 | 144 | 0.65807 |
01e77bdfba999cac70e6d977fd1af0e6536c88c1 | 1,489 | # -*- encoding: utf-8 -*-
# stub: octokit 4.6.2 ruby lib
Gem::Specification.new do |s|
s.name = "octokit".freeze
s.version = "4.6.2"
s.required_rubygems_version = Gem::Requirement.new(">= 1.3.5".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Wynn Netherland".freeze, "Erik Michaels-Ober".freeze, "Clint Shryock".freeze]
s.date = "2016-11-22"
s.description = "Simple wrapper for the GitHub API".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze, "[email protected]".freeze]
s.homepage = "https://github.com/octokit/octokit.rb".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.0.0".freeze)
s.rubygems_version = "2.5.2".freeze
s.summary = "Ruby toolkit for working with the GitHub API".freeze
s.installed_by_version = "2.5.2" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>.freeze, ["~> 1.0"])
s.add_runtime_dependency(%q<sawyer>.freeze, [">= 0.5.3", "~> 0.8.0"])
else
s.add_dependency(%q<bundler>.freeze, ["~> 1.0"])
s.add_dependency(%q<sawyer>.freeze, [">= 0.5.3", "~> 0.8.0"])
end
else
s.add_dependency(%q<bundler>.freeze, ["~> 1.0"])
s.add_dependency(%q<sawyer>.freeze, [">= 0.5.3", "~> 0.8.0"])
end
end
| 40.243243 | 116 | 0.660846 |
abc79eefa06768b6ae787408fdff753d3ea91079 | 195 | class CreateTodos < ActiveRecord::Migration[6.1]
def change
create_table :todos do |t|
t.string :todo
t.text :notes
t.string :status
t.timestamps
end
end
end
| 16.25 | 48 | 0.625641 |
39f75b81fda38e60299f7d4558abf47e1f7141ea | 2,195 | # frozen_string_literal: true
require 'rails_helper'
RSpec.feature 'Course: Videos: Management' do
let(:instance) { create(:instance, :with_video_component_enabled) }
with_tenant(:instance) do
let(:course) { create(:course, :with_video_component_enabled) }
before { login_as(user, scope: :user) }
context 'As a Course Teaching Assistant' do
let(:user) { create(:course_teaching_assistant, course: course).user }
scenario 'I can create a video' do
video = build_stubbed(:video)
visit course_videos_path(course)
click_link(nil, href: new_course_video_path(course, tab: course.default_video_tab))
expect(page).to have_selector('h1', text: I18n.t('course.video.videos.new.header'))
fill_in 'video_title', with: video.title
select 'default', from: 'video_tab_id'
fill_in 'video_description', with: video.description
fill_in 'video_start_at', with: video.start_at
fill_in 'video_url', with: video.url
click_button 'submit'
expect(current_path).to eq(course_videos_path(course))
expect(page).to have_selector('div.alert.alert-success')
video_created = course.videos.last
expect(page).to have_content_tag_for(video_created)
end
scenario 'I can edit or delete a video from the videos page' do
unpublished_video = create(:video, course: course)
edit_path = edit_course_video_path(course, unpublished_video)
# Edit video Page
visit course_videos_path(course)
within find(content_tag_selector(unpublished_video)) do
find('.btn.btn-default.edit').click
end
expect(current_path).to eq(edit_path)
new_title = 'zzz'
fill_in 'video_title', with: new_title
click_button 'submit'
expect(unpublished_video.reload.title).to eq(new_title)
expect(current_path).to eq(course_videos_path(course))
# Delete video
expect do
within find(content_tag_selector(unpublished_video)) do
find('.btn.btn-danger.delete').click
end
end.to change { course.videos.count }.by(-1)
end
end
end
end
| 34.296875 | 91 | 0.671982 |
bfd0af56450b6af96755c597dd93524c93661118 | 151 | class SomeWorker
include Sidekiq::Worker
sidekiq_options merger: { key: "foo" }
def perform(*ids)
puts "Get IDs: #{ids.inspect}"
end
end
| 15.1 | 40 | 0.668874 |
28f5d81774110644761b689f82e1a3b19bce1a78 | 217 | class ChangeResourceFileSize < ActiveRecord::Migration
def up
change_column :file_assets, :resource_file_size, :bigint
end
def down
change_column :file_assets, :resource_file_size, :integer
end
end
| 21.7 | 61 | 0.764977 |
61ebdd3e0890ffe81858b53084366f045a4ecddd | 978 | module Aviator
define_request :update_snapshot, inherit: [:openstack, :common, :v2, :public, :base] do
meta :service, :volume
meta :api_version, :v2
link 'documentation', 'http://docs.openstack.org/api/openstack-block-storage/2.0/content/PUT_updateSnapshot__v2__tenant_id__snapshots__snapshot_id__Snapshots.html'
param :snapshot_id, required: true, alias: :id
param :name, required: false
param :description, required: false
def body
p = {
snapshot: {}
}
optional_params.each do |key|
p[:snapshot][key] = params[key] if params[key]
end
p
end
def headers
super
end
def http_method
:put
end
def url
service_spec = session_data[:catalog].find{|s| s[:type] == 'volumev2' }
v2_url = service_spec[:endpoints].find{|e| e[:interface] == 'admin'}[:url]
"#{ v2_url }/snapshots/#{ params[:snapshot_id] }"
end
end
end
| 23.285714 | 167 | 0.616564 |
1c8dff477239a7ede0f57b1dc34ba68ac104fb03 | 1,699 | # :stopdoc:
#
# It happens sometimes that it is very expensive to construct a logging
# message; for example, if a large object structure has to be traversed
# during execution of an `object.to_s` method. It would be convenient to
# delay creation of the message until the log event actually takes place.
#
# For example, with a logger configured only to show WARN messages and higher,
# creating the log message for an INFO message would be wasteful. The INFO log
# event would never be generated in this case.
#
# Log message creation can be performed lazily by wrapping the expensive
# message generation code in a block and passing that to the logging method.
require 'logging'
Logging.logger.root.appenders = Logging.appenders.stdout
Logging.logger.root.level = :info
# We use this dummy method in order to see if the method gets called, but in practice,
# this method might do complicated string operations to construct a log message.
def expensive_method
puts "Called!"
"Expensive message"
end
log = Logging.logger['Lazy']
# If you log this message the usual way, expensive_method gets called before
# debug, hence the Logging framework has no chance to stop it from being executed
# immediately.
log.info("Normal")
log.debug(expensive_method)
# If we put the message into a block, then the block is not executed, if
# the message is not needed with the current log level.
log.info("Block unused")
log.debug { expensive_method }
# If the log message is needed with the current log level, then the block is of
# course executed and the log message appears as expected.
log.info("Block used")
log.warn { expensive_method }
# :startdoc:
| 36.934783 | 88 | 0.749853 |
e9f066f33ca4ba695dbdf89db22b06e955a1cc74 | 2,327 | require 'matrix'
require 'test/unit'
require_relative '../lib/csr_matrix'
require_relative '../lib/csr_matrix_factory'
require_relative '../lib/diagonal_matrix'
require_relative '../lib/diagonal_matrix_factory'
require_relative '../lib/dok_matrix'
require_relative '../lib/dok_matrix_factory'
require_relative '../lib/sparse_matrix_factory'
class SparseMatrixFactoryTest < Test::Unit::TestCase
def test_build_csr_from_matrix
matrix_input = Matrix[[1, 0, 0], [0, 1, 0], [0, 0, 1]]
built = SparseMatrixFactory.build(matrix_input, CSRMatrixFactory.new)
assert(built.is_a?(CSRMatrix))
assert_equal([1, 1, 1], built.read_all)
end
def test_build_dok_from_matrix
matrix_input = Matrix[[1, 0, 0], [0, 1, 0], [0, 0, 1]]
built = SparseMatrixFactory.build(matrix_input, DOKMatrixFactory.new)
assert(built.is_a?(DOKMatrix))
assert_equal([1, 1, 1], built.read_all)
end
def test_build_diagonal_from_matrix
matrix_input = Matrix[[1, 0, 0], [0, 1, 0], [0, 0, 1]]
built = SparseMatrixFactory.build(matrix_input, DiagonalMatrixFactory.new)
assert(built.is_a?(DiagonalMatrix))
assert_equal([1, 1, 1], built.read_all)
end
def test_build_csr_from_array
array_input = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
built = SparseMatrixFactory.build(array_input, CSRMatrixFactory.new)
assert(built.is_a?(CSRMatrix))
assert_equal([1, 1, 1], built.read_all)
end
def test_build_dok_from_array
array_input = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
built = SparseMatrixFactory.build(array_input, DOKMatrixFactory.new)
assert(built.is_a?(DOKMatrix))
assert_equal([1, 1, 1], built.read_all)
end
def test_build_diagonal_from_array
array_input = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
built = SparseMatrixFactory.build(array_input, DiagonalMatrixFactory.new)
assert(built.is_a?(DiagonalMatrix))
assert_equal([1, 1, 1], built.read_all)
end
def test_build_from_unsupported
assert_raise(TypeError) { SparseMatrixFactory.build({}) }
assert_raise(TypeError) { SparseMatrixFactory.build(0) }
assert_raise(TypeError) { SparseMatrixFactory.build('Hello, world!') }
end
def test_build_without_factory
matrix_input = Matrix[[1, 0, 0], [0, 1, 0], [0, 0, 1]]
assert_raise(TypeError) { SparseMatrixFactory.build(matrix_input, {}) }
end
end
| 35.8 | 78 | 0.707778 |
7991900354363af63ba16ef649001e859a627299 | 209 | class CreateShoppingListsItemsTable < ActiveRecord::Migration
def change
create_table :items_shopping_lists, id: false do |t|
t.belongs_to :shopping_list
t.belongs_to :item
end
end
end
| 23.222222 | 61 | 0.736842 |
6af82f753fc3eabe704eedac95c55706cea457ae | 1,693 | PRODUCT_COLLSCAN = <<-EOF
{
"queryPlanner" => {
"plannerVersion" => 1,
"namespace" => "awesome_explain.products",
"indexFilterSet" => false,
"parsedQuery" => {
"name" => {
"$eq" => "Coffee Beans - Chocolate"
}
},
"winningPlan" => {
"stage" => "COLLSCAN",
"filter" => {
"name" => {
"$eq" => "Coffee Beans - Chocolate"
}
},
"direction" => "forward"
},
"rejectedPlans" => []
},
"executionStats" => {
"executionSuccess" => true,
"nReturned" => 0,
"executionTimeMillis" => 2,
"totalKeysExamined" => 0,
"totalDocsExamined" => 1000,
"executionStages" => {
"stage" => "COLLSCAN",
"filter" => {
"name" => {
"$eq" => "Coffee Beans - Chocolate"
}
},
"nReturned" => 0,
"executionTimeMillisEstimate" => 0,
"works" => 1002,
"advanced" => 0,
"needTime" => 1001,
"needYield" => 0,
"saveState" => 7,
"restoreState" => 7,
"isEOF" => 1,
"invalidates" => 0,
"direction" => "forward",
"docsExamined" => 1000
},
"allPlansExecution" => []
},
"serverInfo" => {
"host" => "Ahmeds-MacBook-Pro.local",
"port" => 27017,
"version" => "3.4.10",
"gitVersion" => "078f28920cb24de0dd479b5ea6c66c644f6326e9"
},
"ok" => 1.0
}
EOF
| 28.216667 | 62 | 0.391022 |
1a2ea9550e61fda112ae1d0bd714ce13097fe6bc | 771 | # note this example is inplace and destructive
def thomas(a, b, c, d)
# set the initial elements
c[0] = c[0] / b[0]
d[0] = d[0] / b[0]
n = d.length # number of equations to solve
(1...n).each do |i|
scale = 1 / (b[i] - c[i - 1] * a[i]) # scale factor for c and d
c[i] *= scale
d[i] = (d[i] - a[i] * d[i - 1]) * scale
end
# do the back substitution
(n - 2).downto(0).each do |j|
d[j] -= c[j] * d[j + 1]
end
d
end
# example for matrix
# [1 4 0][x] [7]
# [2 3 5][y] = [5]
# [0 3 6][z] [3]
# [.8666]
# soln will equal [1.533]
# [-.266]
# note we index a from 1 and c from 0
a = [0.0, 2.0, 3.0]
b = [1.0, 3.0, 6.0]
c = [4.0, 5.0, 0.0]
d = [7.0, 5.0, 3.0]
soln = thomas(a, b, c, d)
puts soln
| 19.769231 | 67 | 0.470817 |
d5984f64898a75f304c280f059bc34d0ad20048c | 167 | dir = File.dirname(__FILE__) + '/facets/'
Dir.new(dir).each do |path|
next unless File.extname(path) == '.rb'
require('facets/' + path) #require(dir + path)
end
| 23.857143 | 49 | 0.646707 |
2658ed9437006da23f845f6162a19b3218637890 | 6,874 |
require 'spec_helper'
RSpec.describe 'Trello::Schema::Attribute::BoardPref' do
let(:attribute) { Trello::Schema::Attribute::BoardPref.new(name: name, options: options, serializer: serializer) }
describe '#build_attributes' do
let(:name) { :visibility_level }
let(:options) { { remote_key: :permissionLevel } }
let(:serializer) { double('serializer') }
let(:default) { nil }
before do
allow(serializer)
.to receive(:deserialize)
.with(raw_value, default)
.and_return(deserialize_result)
end
let(:build_attributes) { attribute.build_attributes(params, attributes) }
let(:attributes) { { name: 'John' } }
context 'when prefs and name key are both missing' do
let(:raw_value) { nil }
let(:deserialize_result) { nil }
let(:params) { {} }
it 'deserialize and set the attribute to nil' do
expect(build_attributes).to match_hash_with_indifferent_access({ name: 'John', visibility_level: nil })
end
end
context "when params's key prefs exits" do
context 'and target key is missing' do
let(:raw_value) { nil }
let(:deserialize_result) { nil }
let(:params) { { 'prefs' => {} } }
it 'deserialize and set the attribute to nil' do
expect(build_attributes).to match_hash_with_indifferent_access({ name: 'John', visibility_level: nil })
end
end
context 'and target value is nil' do
let(:raw_value) { nil }
let(:deserialize_result) { nil }
let(:params) { { 'prefs' => { 'permissionLevel' => nil } } }
it 'deserialize and set the attribute to nil' do
expect(build_attributes).to match_hash_with_indifferent_access({ name: 'John', visibility_level: nil })
end
end
context 'and target value is false' do
let(:raw_value) { false }
let(:deserialize_result) { false }
let(:params) { { 'prefs' => { 'permissionLevel' => false } } }
it 'deserialize and set the attribute to false' do
expect(build_attributes).to match_hash_with_indifferent_access({ name: 'John', visibility_level: false })
end
end
context 'and target value is not nil or false' do
let(:raw_value) { 'org' }
let(:deserialize_result) { 'org' }
let(:params) { { 'prefs' => { 'permissionLevel' => 'org' } } }
it 'deserialize and set the attribute' do
expect(build_attributes).to match_hash_with_indifferent_access({ name: 'John', visibility_level: 'org' })
end
end
end
context "when params's key prefs does not exits" do
context 'and target value is nil' do
let(:raw_value) { nil }
let(:deserialize_result) { nil }
let(:params) { { visibility_level: nil } }
it 'get and deserialize that value and set to attributes with symbolize name' do
expect(build_attributes).to match_hash_with_indifferent_access({ name: 'John', visibility_level: nil })
end
end
context 'and target value is false' do
let(:raw_value) { false }
let(:deserialize_result) { false }
let(:params) { { visibility_level: false } }
it 'get and deserialize that value and set to attributes with symbolize name' do
expect(build_attributes).to match_hash_with_indifferent_access({ name: 'John', visibility_level: false })
end
end
context 'and target value is not nil or false ' do
let(:raw_value) { 'org' }
let(:deserialize_result) { 'org' }
let(:params) { { visibility_level: 'org' } }
it 'get and deserialize that value and set to attributes with symbolize name' do
expect(build_attributes).to match_hash_with_indifferent_access({ name: 'John', visibility_level: 'org' })
end
end
end
end
describe '#build_payload_for_create' do
let(:name) { :visibility_level }
let(:options) { { remote_key: :permissionLevel } }
let(:serializer) { double('serializer') }
let(:default) { nil }
before do
allow(serializer)
.to receive(:serialize)
.with('org')
.and_return('org')
end
let(:build_attributes) { attribute.build_payload_for_create(attributes, payload) }
let(:payload) { { 'name' => 'John' } }
context 'when attribute is for action create' do
context 'when attribute value is nil' do
let(:attributes) { { visibility_level: nil } }
it "won't put it in payload" do
expect(build_attributes).to eq({ 'name' => 'John' })
end
end
context 'when attributes does not contain the attribute key' do
let(:attributes) { {} }
it "won't put it in payload" do
expect(build_attributes).to eq({ 'name' => 'John' })
end
end
context 'when attribute value is normal' do
let(:attributes) { { visibility_level: 'org' } }
it 'get and serialize that value and set to payload with stringify name' do
expect(build_attributes).to eq({ 'name' => 'John', 'prefs_permissionLevel' => 'org' })
end
end
end
context 'when attribute is not for action create' do
let(:options) { { remote_key: :permissionLevel, update_only: true } }
let(:attributes) { { visibility_level: 'org' } }
it "won't put it in payload" do
expect(build_attributes).to eq({ 'name' => 'John' })
end
end
end
describe '#build_payload_for_update' do
let(:name) { :visibility_level }
let(:serializer) { double('serializer') }
let(:default) { nil }
before do
allow(serializer)
.to receive(:serialize)
.with('org')
.and_return('org')
end
let(:build_attributes) { attribute.build_payload_for_update(attributes, payload) }
let(:payload) { { 'name' => 'John' } }
context 'when attribute is for action create' do
let(:options) { { remote_key: :permissionLevel } }
context 'when attributes does not contain the attribute key' do
let(:attributes) { {} }
it "won't put it in payload" do
expect(build_attributes).to eq({ 'name' => 'John' })
end
end
context 'when attribute value is normal' do
let(:attributes) { { visibility_level: 'org' } }
it 'get and serialize that value and set to payload with stringify name' do
expect(build_attributes).to eq({ 'name' => 'John', 'prefs/permissionLevel' => 'org' })
end
end
end
context 'when attribute is not for action update' do
let(:options) { { remote_key: :permissionLevel, create_only: true } }
let(:attributes) { { visibility_level: 'org' } }
it "won't put it in payload" do
expect(build_attributes).to eq({ 'name' => 'John' })
end
end
end
end
| 32.889952 | 116 | 0.615944 |
eda0bc463e9a2cd9e16a4098e0de5cddc4f1a4df | 1,931 | # frozen_string_literal: true
module Liquid
class TableRow < Block
Syntax = /(\w+)\s+in\s+(#{QuotedFragment}+)/o
attr_reader :variable_name, :collection_name, :attributes
def initialize(tag_name, markup, options)
super
if markup =~ Syntax
@variable_name = Regexp.last_match(1)
@collection_name = parse_expression(Regexp.last_match(2))
@attributes = {}
markup.scan(TagAttributes) do |key, value|
@attributes[key] = parse_expression(value)
end
else
raise SyntaxError, options[:locale].t("errors.syntax.table_row")
end
end
def render_to_output_buffer(context, output)
(collection = context.evaluate(@collection_name)) || (return '')
from = @attributes.key?('offset') ? context.evaluate(@attributes['offset']).to_i : 0
to = @attributes.key?('limit') ? from + context.evaluate(@attributes['limit']).to_i : nil
collection = Utils.slice_collection(collection, from, to)
length = collection.length
cols = context.evaluate(@attributes['cols']).to_i
output << "<tr class=\"row1\">\n"
context.stack do
tablerowloop = Liquid::TablerowloopDrop.new(length, cols)
context['tablerowloop'] = tablerowloop
collection.each do |item|
context[@variable_name] = item
output << "<td class=\"col#{tablerowloop.col}\">"
super
output << '</td>'
if tablerowloop.col_last && !tablerowloop.last
output << "</tr>\n<tr class=\"row#{tablerowloop.row + 1}\">"
end
tablerowloop.send(:increment!)
end
end
output << "</tr>\n"
output
end
class ParseTreeVisitor < Liquid::ParseTreeVisitor
def children
super + @node.attributes.values + [@node.collection_name]
end
end
end
Template.register_tag('tablerow', TableRow)
end
| 28.820896 | 98 | 0.612118 |
ac03cd3785ac12fdd64b7ac97ff0121659d12a06 | 1,361 | require 'test_helper'
class DiscountVotesControllerTest < ActionController::TestCase
setup do
@discount_vote = discount_votes(:one)
end
test "should get index" do
get :index, token: 'token_1'
assert_response :success
assert_not_nil assigns(:discount_votes)
end
test "should get new" do
get :new, token: 'token_1'
assert_response :success
end
test "should create discount_vote" do
assert_difference('DiscountVote.count') do
post :create, discount_vote: { discount_id: 3, user_id: 1 }, token: 'token_1'
end
assert_redirected_to discount_vote_path(assigns(:discount_vote))
end
test "should show discount_vote" do
get :show, id: @discount_vote, token: 'token_1'
assert_response :success
end
test "should get edit" do
get :edit, id: @discount_vote, token: 'token_1'
assert_response :success
end
test "should update discount_vote" do
patch :update, id: @discount_vote, discount_vote: { discount_id: @discount_vote.discount_id, user_id: @discount_vote.user_id }, token: 'token_1'
assert_redirected_to discount_vote_path(assigns(:discount_vote))
end
test "should destroy discount_vote" do
assert_difference('DiscountVote.count', -1) do
delete :destroy, id: @discount_vote, token: 'token_1'
end
assert_redirected_to discount_votes_path
end
end
| 27.22 | 148 | 0.726672 |
3327a8a6ef76cfe3f3e30affe8fac1b665b615e3 | 117,139 | require 'test_helper'
module Credits
class CanonicalNamesTest < ActiveSupport::TestCase
include AssertContributorNames
test 'タコ焼き仮面' do
assert_contributor_names '84403ae', 'Takoyaki Kamen'
end
test '松田 明' do
assert_contributor_names 'bb33432', 'Akira Matsuda'
end
test '簡煒航' do
assert_contributor_names 'c32978d', 'Tony Jian'
end
test '簡煒航 (Jian Weihang)' do
assert_contributor_names '4459576', '簡煒航 (Jian Weihang)'
end
test '1334' do
assert_contributor_names '47d95c8', 'Iñigo Solano Pàez'
end
test '90yukke' do
assert_contributor_names 'b289519', 'Alexander Karmes'
end
test '_tiii' do
assert_contributor_names 'a4b02be', 'Titus Ramczykowski'
end
test 'Aaron' do
assert_contributor_names '1477a61', 'Aaron Eisenberger'
end
test 'aarongray' do
assert_contributor_names 'b30805b', 'Aaron Gray'
end
test 'abhay' do
assert_contributor_names '3353b85', 'Abhay Kumar'
end
test 'abonec' do
assert_contributor_names '20519ef', 'Alexander Baronec'
end
test 'acapilleri' do
assert_contributor_names 'c08c468', 'Angelo Capilleri'
end
test 'Accessd' do
assert_contributor_names 'db25ca7', 'Andrey Morskov'
end
test 'acechase' do
assert_contributor_names '331d9c0', 'Andrew Chase'
end
test 'Adam' do
assert_contributor_names '5dc1f09', 'Adam Magan'
end
test "adam\100the-kramers.net" do
assert_contributor_names '01cfd2b', 'Adam Kramer'
end
test 'Adam89' do
assert_contributor_names '52720b4', 'Adam Magan'
end
test 'adamj' do
assert_contributor_names '4d96ece', 'Adam Johnson'
end
test "adamm\100galacticasoftware.com" do
assert_contributor_names '10a86b2', 'Adam Majer'
end
test 'adamwiggins' do
assert_contributor_names 'ee6b607', 'Adam Wiggins'
end
test "adelle\100bullet.net.au" do
assert_contributor_names '101968f', 'Adelle Hartley'
end
test 'Aditya' do
assert_contributor_names 'd67adf1', 'Aditya Chadha'
end
test 'aditya-kapoor' do
assert_contributor_names '426f42c', 'Aditya Kapoor'
end
test 'adman65' do
assert_contributor_names '7dfa8c0', 'Adam Hawkins'
end
test 'adymo' do
assert_contributor_names '9d03813', 'Alexander Dymo'
end
test 'aeden' do
assert_contributor_names 'c9770d8', 'Anthony Eden'
end
test 'Agis-' do
assert_contributor_names '666a248', 'Agis Anastasopoulos'
end
test 'agius' do
assert_contributor_names '1ff67d8', 'Andrew Evans'
end
test 'aguynamedryan' do
assert_contributor_names '4eaa8ba', 'Ryan Duryea'
end
test 'aiwilliams' do
assert_contributor_names 'dd605e9', 'Adam Williams'
end
test 'akaspick' do
assert_contributor_names '0d82b14', 'Andrew Kaspick'
end
test "akaspick\100gmail.com" do
assert_contributor_names 'e30699f', 'Andrew Kaspick'
end
test 'Akshay' do
assert_contributor_names '4d62704', 'Akshay Mohite'
end
test 'Akshat Sharma' do
assert_contributor_names '2438a1c', 'Pramod Sharma'
end
test 'alancfrancis' do
assert_contributor_names '0b45b89', 'Alan Francis'
end
test "alancfrancis\100gmail.com" do
assert_contributor_names 'dfd0bdf', 'Alan Francis'
end
test 'Alberto Almagro Sotelo' do
assert_contributor_names '5c62bd5', 'Gannon McGibbon', 'Alberto Almagro'
end
test 'Aleksey Kondratenko' do
assert_contributor_names 'a9113b8', 'Aliaksey Kandratsenka'
end
test "alex.borovsky\100gmail.com" do
assert_contributor_names 'f1a01c8', 'Alexander Borovsky'
end
test "alex\100byzantine.no" do
assert_contributor_names 'ad63c96', 'Alexander Staubo'
end
test "alex\100msgpad.com" do
assert_contributor_names '4277568', 'Alex Pooley'
end
test "alex\100purefiction.net" do
assert_contributor_names 'd016d9a', 'Alexander Staubo'
end
test 'Alexander' do
assert_contributor_names 'bdcc271', 'Alexander Baronec'
assert_contributor_names '9e39dc4', 'Alexander Baronec'
assert_contributor_names '7c643d9', 'Alexander Quine'
assert_contributor_names 'ca6a12d', 'Dembskiy Alexander'
end
test 'alexbel' do
assert_contributor_names '6aaf4bf', 'Alexander Belaev'
end
test 'alexch' do
assert_contributor_names '2559feb', 'Alex Chaffee'
end
test 'Alexey' do
assert_contributor_names 'd336ca5', 'Alexey Zatsepin'
end
test 'alexey' do
assert_contributor_names '52fe604', 'Alexey Zatsepin'
end
test 'Alexey Markov' do
assert_contributor_names '0c85705', 'Markov Alexey'
end
test "alexkwolfe\100gmail.com" do
assert_contributor_names 'b5c2366', 'Alex Wolfe'
end
test 'alfa-jpn' do
assert_contributor_names '9bd4386', 'Atsushi Nakamura'
end
test "alles\100atomicobject.com" do
assert_contributor_names '68dfe3e', 'Micah Alles'
end
test 'alloy' do
assert_contributor_names '4d1c87a', 'Eloy Duran'
end
test 'ambethia' do
assert_contributor_names '18c663e', 'Jason L Perry'
end
test 'amishyn' do
assert_contributor_names 'e32149a', 'Alex Mishyn'
end
test 'amitkumarsuroliya' do
assert_contributor_names '44e94a3', 'Amit Kumar Suroliya'
end
test 'anamba' do
assert_contributor_names '6ccbef5', 'Aaron Namba'
end
test 'Anand' do
assert_contributor_names '25f60cc', 'Anand Muthukrishnan'
end
test 'Anatoly Makarevich' do
assert_contributor_names 'fce0d08', 'Anatoli Makarevich'
end
test 'andrea longhi' do
assert_contributor_names 'd7f0e43', 'Andrea Longhi'
end
test 'Andrew' do
assert_contributor_names '3d6ed50', 'Andrew Chase'
end
test "andrew.john.peters\100gmail.com" do
assert_contributor_names '03097d3', 'Andrew Peters'
end
test "andrew\100redlinesoftware.com" do
assert_contributor_names 'd3cf2a6', 'Andrew Kaspick'
end
test "andrey.nering\100gmail.com" do
assert_contributor_names '6d59473', 'Andrey Nering'
end
test "andy\100tinnedfruit.org" do
assert_contributor_names 'ab7c7a8', 'Andrew A. Smith'
end
test "andylien\100gmail.com" do
assert_contributor_names '35240ba', 'Andy Lien'
end
test 'Angelo capilleri' do
assert_contributor_names 'b97e0a1', 'Angelo Capilleri'
end
test 'angelo giovanni capilleri' do
assert_contributor_names '64af96b', 'Angelo Capilleri'
end
test 'anilmaurya' do
assert_contributor_names '41722dd', 'Anil Kumar Maurya'
end
test 'Ankit Gupta-FIR' do
assert_contributor_names '6a71d09', 'Ankit Gupta'
end
test 'ankit1910' do
assert_contributor_names '3900671', 'Ankit Bansal'
end
test 'ankit8898' do
assert_contributor_names '46a0eac', 'Ankit Gupta'
end
test 'Ankit gupta' do
assert_contributor_names '72c5b5', 'Ankit Gupta'
end
test 'anna' do
assert_contributor_names '9326222', 'maiha'
end
test "anna\100wota.jp" do
assert_contributor_names 'e72ff35', 'maiha'
end
test 'AnnaErshova' do
assert_contributor_names '0166adc', 'Anna Ershova'
end
test 'anshkakashi' do
assert_contributor_names 'ab09984', 'Jason Ketterman'
end
test 'Anthony' do
assert_contributor_names '78f5874', 'Anthony Alberto'
end
test 'anthonynavarre' do
assert_contributor_names 'bdc5141', 'Anthony Navarre'
end
test 'Anton' do
assert_contributor_names 'f0ae503', 'Tõnis Simo'
end
test 'Antonio Tapiador' do
assert_contributor_names '5dd80db', 'Antonio Tapiador del Dujo'
end
test 'antramm' do
assert_contributor_names '083b0b7', 'Ant Ramm'
end
test 'anuj dutta' do
assert_contributor_names 'd572bf9', 'Anuj Dutta'
end
test 'aquajach' do
assert_contributor_names 'c0eb542', 'aquajach'
end
test 'ara.t.howard' do
assert_contributor_names '99c08c7', 'Ara T Howard'
end
test "arc\100uchicago.edu" do
assert_contributor_names '5177333', 'Shu-yu Guo'
end
test 'ariabov' do
assert_contributor_names '34a3d42', 'Alex Riabov'
end
test 'ariejan' do
assert_contributor_names '388e5d3', 'Ariejan de Vroom'
end
test 'arktisklada' do
assert_contributor_names 'd8bd9cf', 'Clayton Liggitt'
end
test 'Arsen7' do
assert_contributor_names 'f756bfb', 'Mariusz Pękala'
end
test 'artemave' do
assert_contributor_names '6c5a3bb', 'Artem Avetisyan'
end
test 'artemk' do
assert_contributor_names 'b386951', 'Artem Kramarenko'
end
test 'Arthur Nogueira Neves' do
assert_contributor_names '5772ffe', 'Arthur Neves'
end
test 'arthurgeek' do
assert_contributor_names '6ddde02', 'Arthur Zapparoli'
end
test 'arton' do
assert_contributor_names 'c11e78c', 'Akio Tajima'
end
test 'arvida' do
assert_contributor_names '2a7230a', 'Arvid Andersson'
end
test 'arvind' do
assert_contributor_names 'dad0c26', 'Arvind Mehra'
end
test "ask\100develooper.com" do
assert_contributor_names '17ef706', 'Ask Bjørn Hansen'
end
test 'asmega' do
assert_contributor_names '61fa600', 'Phil Lee'
end
test 'Assaf' do
assert_contributor_names '87ef365', 'Assaf Arkin'
end
test "assaf.arkin\100gmail.com" do
assert_contributor_names '3142502', 'Assaf Arkin'
end
test 'athonlab' do
assert_contributor_names 'ce2eadb', 'AthonLab'
end
test "augustz\100augustz.com" do
assert_contributor_names '3d99d33', 'August Zajonc'
end
test 'AvnerCohen' do
assert_contributor_names 'd20a529', 'Avner Cohen'
end
test 'awilliams' do
assert_contributor_names 'b045b5c', 'Adam Williams'
end
test 'Ayose' do
assert_contributor_names '6ad8f6e', 'Ayose Cazorla'
end
test 'Azzurrio' do
assert_contributor_names '80e8259', 'Karim El-Husseiny'
end
test "babie7a0\100ybb.ne.jp" do
assert_contributor_names '9ded584', 'Michiaki Baba'
end
test 'backspace' do
assert_contributor_names '3b795c1', 'Ken Gerrard'
end
test 'bagwanpankaj' do
assert_contributor_names 'c424fb2', 'Bagwan Pankaj'
end
test 'Bart' do
assert_contributor_names 'c2f59f3', 'Bart de Water'
end
test 'Bas Van Klinkenberg' do
assert_contributor_names 'b99914c', 'Bas van Klinkenberg'
end
test 'Ben A. Morgan' do
assert_contributor_names 'bee4c8f', 'Ben A. Morgan'
end
test 'bastilian' do
assert_contributor_names '071f48b', 'Sebastian Graessl'
end
test 'beerlington' do
assert_contributor_names '3da275c', 'Pete Brown'
end
test "bellis\100deepthought.org" do
assert_contributor_names 'dc87eba', 'Brad Ellis'
end
test "ben\100bensinclair.com" do
assert_contributor_names '1d9905a', 'Ben Sinclair'
end
test "ben\100groovie.org" do
assert_contributor_names 'b9c79f1', 'Ben Bangert'
end
test 'benedikt' do
assert_contributor_names 'b17fd25', 'Benedikt Deicke'
end
test 'Benjamin Klotz' do
assert_contributor_names 'd5847f4', 'Benny Klotz'
end
test "benji\100silverinsanity.com" do
assert_contributor_names 'd08f838', 'Brian Gernhardt'
end
test 'benmmurphy' do
assert_contributor_names 'c8168a7', 'Ben Murphy'
end
test 'benolee' do
assert_contributor_names '008023c', 'Ben Holley'
end
test 'bermi' do
assert_contributor_names '6ca789b', 'Bermi Ferrer'
end
test 'BertG' do
assert_contributor_names '06afb8c', 'Bert Goethals'
end
test 'bgipsy' do
assert_contributor_names '88f2284', 'Serge Balyuk'
end
test 'bgreenlee' do
assert_contributor_names '083b0b7', 'Brad Greenlee'
end
test 'bitsweat' do
assert_contributor_names '253a2bb', 'Jeremy Daer'
end
test 'Blaine' do
assert_contributor_names 'f5977b2', 'Blaine Cook'
end
test 'blaine' do
assert_contributor_names '7d517a1', 'Blaine Cook'
end
test "blaine\100odeo.com" do
assert_contributor_names 'bf3f920', 'Blaine Cook'
end
test "blair\100orcaware.com" do
assert_contributor_names '46796e7', 'Blair Zajac'
end
test "blake\100near-time.com" do
assert_contributor_names '604eb8a', 'Blake Watters'
end
test 'BlueHotDog' do
assert_contributor_names '8642c2a', 'Danni Friedland'
end
test 'BMorearty' do
assert_contributor_names '436da68', 'Brian Morearty'
end
test "bob\100sporkmonger.com" do
assert_contributor_names 'ce458a7', 'Bob Aman'
end
test 'bobbus' do
assert_contributor_names '7ded3b8', 'Adrien Coquio'
end
test 'BobSilva' do
assert_contributor_names '0c94868', 'Bob Silva'
end
test 'Bodacious' do
assert_contributor_names '39b9c94', 'Gavin Morrice'
end
test 'bogdan' do
assert_contributor_names 'b644964', 'Bogdan Gusiev'
end
test 'Bogdan' do
assert_contributor_names '2686130', 'bogdanvlviv'
end
test 'boone' do
assert_contributor_names '3486d54', 'Mike Boone'
end
test 'Bounga' do
assert_contributor_names '39de84d', 'Nicolas Cavigneaux'
end
test "brad\100madriska.com" do
assert_contributor_names '785e1fa5', 'Brad Ediger'
end
test 'bradediger' do
assert_contributor_names '6c77370', 'Brad Ediger'
end
test 'bradrobertson' do
assert_contributor_names '0252376', 'Brad Robertson'
end
test 'brainopia' do
assert_contributor_names 'da82b0a', 'Ravil Bayramgalin'
end
test 'brandon' do
assert_contributor_names '35ffc1a', 'Brandon Keepers'
end
test "brandon\100opensoul.org" do
assert_contributor_names 'fe4d5ea', 'Brandon Keepers'
end
test "brandt\100kurowski.net" do
assert_contributor_names '6d7175d', 'Brandt Kurowski'
end
test 'brendan' do
assert_contributor_names '88f2284', 'Brendan Baldwin'
end
test "brianegge\100yahoo.com" do
assert_contributor_names 'a092749', 'Brian Egge'
end
test 'brianp' do
assert_contributor_names '50a7391', 'Brian Pearce'
end
test 'bronson' do
assert_contributor_names 'cb1f569', 'Scott Bronson'
end
test 'brupm' do
assert_contributor_names '4e7d332', 'Bruno Miranda'
end
test 'brynary' do
assert_contributor_names '5dc831f', 'Bryan Helmkamp'
end
test 'bscofield' do
assert_contributor_names '81991d6', 'Ben Scofield'
end
test 'buddhamagnet' do
assert_contributor_names 'a85729c', 'Dave Goodchild'
end
test 'c.r.mcgrath' do
assert_contributor_names '838ae35', 'Chris McGrath'
end
test "c.r.mcgrath\100gmail.com" do
assert_contributor_names '6a51940', 'Chris McGrath'
end
test 'caio' do
assert_contributor_names 'c089974', 'Caio Chassot'
end
test 'calavera' do
assert_contributor_names '4196616', 'David Calavera'
end
test "caleb\100aei-tech.com" do
assert_contributor_names 'd5b67ed', 'Caleb Tennis'
end
test 'canadaduane' do
assert_contributor_names 'cab2494', 'Duane Johnson'
end
test 'careo' do
assert_contributor_names '50ee332', 'Dane Jensen'
end
test 'Carlhuda' do
assert_contributor_names 'c102db9', 'Yehuda Katz', 'Carl Lerche'
end
test 'CassioMarques' do
assert_contributor_names '053afbe', 'Cássio Marques'
end
test 'Catfish' do
assert_contributor_names '9679cb4', 'Jonathan del Strother'
end
test 'catfish' do
assert_contributor_names 'eff27ab', 'Jonathan del Strother'
end
test 'cavalle' do
assert_contributor_names 'b96db52', 'Luismi Cavallé'
end
test 'cavelle' do
assert_contributor_names '9e45586', 'Luismi Cavallé'
end
test 'cch1' do
assert_contributor_names '569a78c', 'Chris Hapgood'
end
test 'cczona' do
assert_contributor_names '6ee8e92', 'Carina C. Zona'
end
test "cdcarter\100gmail.com" do
assert_contributor_names '2139921', 'Chris Carter'
end
test 'Cédric FABIANSKI' do
assert_contributor_names '9f54921', 'Cédric Fabianski'
end
test 'ceefour' do
assert_contributor_names '7e33de4', 'Hendy Irawan'
end
test 'ch33hau' do
assert_contributor_names 'ac85125', 'Lim Chee Hau'
end
test 'chaitanyav' do
assert_contributor_names '449cf50', 'Chaitanya Vellanki'
end
test "charles.gerungan\100gmail.com" do
assert_contributor_names '3c0e7b1', 'Charles M. Gerungan'
end
test 'chas' do
assert_contributor_names '6f63287', 'Chas Grundy'
end
test 'chocoby' do
assert_contributor_names '04907b6', 'Kenta Okamoto'
end
test 'choonkeat' do
assert_contributor_names '099c206', 'Choon Keat'
end
test "choonkeat\100gmail.com" do
assert_contributor_names '89840c4', 'Choon Keat'
end
test "chris\100chrisbrinker.com" do
assert_contributor_names 'a685579', 'Chris Brinker'
end
test 'chris finne' do
assert_contributor_names 'b80fa81', 'Chris Finne'
end
test "chris\100octopod.info" do
assert_contributor_names '3c0e7b1', 'Chris McGrath'
end
test "chris\100ozmm.org" do
assert_contributor_names '11c715a', 'Chris Wanstrath'
end
test "chris\100seagul.co.uk" do
assert_contributor_names '760bcc6', 'Chris Roos'
end
test 'chrisfinne' do
assert_contributor_names '76d2c45', 'Chris Finne'
end
test 'chrisk' do
assert_contributor_names '19a1586', 'Chris Kampmeier'
end
test 'chriskohlbrenner' do
assert_contributor_names '2ec51d0', 'Chris Kohlbrenner'
end
test 'chrismear' do
assert_contributor_names 'afd288c', 'Chris Mear'
end
test 'chrisroos' do
assert_contributor_names '50253ed', 'Chris Roos'
end
test "chriztian.steinmeier\100gmail.com" do
assert_contributor_names 'd40af24', 'Chriztian Steinmeier'
end
test 'Chu Yeow' do
assert_contributor_names 'dc3e55d', 'Cheah Chu Yeow'
end
test 'chuyeow' do
assert_contributor_names '56e6462', 'Cheah Chu Yeow'
end
test 'ciastek' do
assert_contributor_names '2bcfdec', 'Sebastian Spieszko'
end
test 'cjheath' do
assert_contributor_names '12d8d48', 'Clifford Heath'
end
test 'Claudio B' do
assert_contributor_names '0b0042c', 'Claudio Baccigalupo'
end
test 'Claudio B.' do
assert_contributor_names '2651810', 'Claudio Baccigalupo'
end
test 'claudiob' do
assert_contributor_names '0e56c1d', 'Claudio Baccigalupo'
end
test 'claudiofullscreen' do
assert_contributor_names '0b725aa', 'Claudio Baccigalupo'
end
test 'cluon' do
assert_contributor_names 'deda0ee', 'Phil Orwig'
end
test 'cnaize' do
assert_contributor_names 'bf15169', 'Nikita Loskutov'
end
test 'codafoo' do
assert_contributor_names 'be827f9', 'Cesar Ho'
end
test 'codahale' do
assert_contributor_names '4aabe46', 'Coda Hale'
end
test 'codeape' do
assert_contributor_names '9a42096', 'Dan Cheail'
end
test 'codebrulee' do
assert_contributor_names 'ebe8dd6', 'Kevin Smith'
end
test 'codesnik' do
assert_contributor_names '96d4da1', 'Alexey Trofimenko'
end
test "codyfauser\100gmail.com" do
assert_contributor_names 'f49ba11', 'Cody Fauser'
end
test 'coffee2code' do
assert_contributor_names 'ab9f324', 'Scott Reilly'
end
test "cohen.jeff\100gmail.com" do
assert_contributor_names 'e57bd72', 'Jeff Cohen'
end
test "colman\100rominato.com" do
assert_contributor_names 'b762e01', 'Colman Nady'
end
test "contact\100lukeredpath.co.uk" do
assert_contributor_names 'e9d4b36', 'Luke Redpath'
end
test "contact\100maik-schmidt.de" do
assert_contributor_names '2d24bed', 'Maik Schmidt'
end
test 'coreyhaines' do
assert_contributor_names 'df755d4', 'Corey Haines'
end
test 'court3nay' do
assert_contributor_names '891a962', 'Courtenay Gasking'
end
test 'Court3nay' do
assert_contributor_names 'ee87dbe', 'Courtenay Gasking'
end
test "court3nay\100gmail.com" do
assert_contributor_names 'df97ed5', 'Courtenay Gasking'
end
test 'courtenay' do
assert_contributor_names '14e7c7c', 'Courtenay Gasking'
end
test 'cpytel' do
assert_contributor_names 'f254616', 'Chad Pytel'
end
test 'Cristi BALAN' do
assert_contributor_names '6d566e8', 'Cristi Balan'
end
test 'ctm' do
assert_contributor_names 'c26cca3', 'Clifford T. Matthews'
end
test 'cyu' do
assert_contributor_names '2b68762', 'Calvin Yu'
end
test 'dacat' do
assert_contributor_names 'f854ecd', 'Felix Dominguez'
end
test 'dancroak' do
assert_contributor_names '569a78c', 'Dan Croak'
end
test 'danger' do
assert_contributor_names '1dd0034', 'Jack Danger Canty'
end
test 'Danger' do
assert_contributor_names '2c6e616', 'Jack Danger Canty'
end
test 'Daniel Burnette' do
assert_contributor_names 'b93ae0c', 'Daniel Burnette'
end
test "daniel\100nightrunner.com" do
assert_contributor_names 'ba309a3', 'Daniel Hobe'
end
test "daniel\100nouvelles-solutions.com" do
assert_contributor_names '1671609', 'Daniel Wanja'
end
test 'danielc192' do
assert_contributor_names '0fc481d', 'Daniel Cohen'
end
test 'danielmorrison' do
assert_contributor_names 'cb5b8a7', 'Daniel Morrison'
end
test "daniels\100pronto.com.au" do
assert_contributor_names '6a1a1e5', 'Daniel Sheppard'
end
test "daniluk\100yahoo.com" do
assert_contributor_names 'c99df46', 'Grzegorz Daniluk'
end
test "dansketcher\100gmail.com" do
assert_contributor_names 'fb619127', 'Dan Sketcher'
end
test "darashi\100gmail.com" do
assert_contributor_names '17d2732', 'Yoji Shidara'
end
test 'dasil003' do
assert_contributor_names '2a07886', 'Gabe da Silveira'
end
test "dave\100cherryville.org" do
assert_contributor_names 'b66b1ff', 'Dave Lee'
end
test "dave-ml\100dribin.org" do
assert_contributor_names '2fe8610', 'Dave Dribin'
end
test "dave\100pragprog.com" do
assert_contributor_names 'c80c636', 'Dave Thomas'
end
test 'davetoxa' do
assert_contributor_names 'cc585c8', 'Anton Cherepanov'
end
test 'david.calavera' do
assert_contributor_names '7e1c04d', 'David Calavera'
end
test "david.felstead\100gmail.com" do
assert_contributor_names '8dda7c5', 'David Felstead'
end
test 'David FRANCOIS' do
assert_contributor_names '18aa1ae', 'David François'
end
test 'DAVID MOORE' do
assert_contributor_names '4c945cc', 'Dave Moore'
end
test "david\100ruppconsulting.com" do
assert_contributor_names 'c4a3634', 'David Rupp'
end
test 'davidjrice' do
assert_contributor_names '82a85e8', 'David Rice'
end
test 'davidw' do
assert_contributor_names '1f80296', 'David N. Welton'
end
test 'DawidJanczak' do
assert_contributor_names '89a8143', 'Dawid Janczak'
end
test 'Dawnthorn' do
assert_contributor_names 'f999ab0', 'Peter Haight'
end
test 'dblack' do
assert_contributor_names '11a5492', 'David A. Black'
end
test "dblack\100wobblini.net" do
assert_contributor_names '91247b6', 'David A. Black'
end
test 'dbussink' do
assert_contributor_names '78727dd', 'Dirkjan Bussink'
end
test 'dchelimsky' do
assert_contributor_names '42ebf55', 'David Chelimsky'
end
test 'dcmanges' do
assert_contributor_names '16fde4c', 'Dan Manges'
end
test 'dcurtis' do
assert_contributor_names '248fa70', 'Dustin Curtis'
end
test 'ddemaree' do
assert_contributor_names 'f90160c', 'David Demaree'
end
test 'ddollar' do
assert_contributor_names '8ff9e93', 'David Dollar'
end
test 'Dee.Zsombor' do
assert_contributor_names '2bf2230', 'Dee Zsombor'
end
test "Dee.Zsombor\100gmail.com" do
assert_contributor_names '26022d8', 'Dee Zsombor'
end
test 'deepblue' do
assert_contributor_names '2a34e08', 'Bryan Kang'
end
test 'defeated' do
assert_contributor_names 'dcaa074', 'Eddie Cianci'
end
test 'defunkt' do
assert_contributor_names '49cb412', 'Chris Wanstrath'
end
test 'DefV' do
assert_contributor_names 'c71de03', 'Jan De Poorter'
end
test "deirdre\100deirdre.net" do
assert_contributor_names '9105cd1', 'Deirdre Saoirse'
end
test 'DeLynn' do
assert_contributor_names 'aa09c77', 'DeLynn Berry'
end
test 'DeLynn B' do
assert_contributor_names '6cd3bda', 'DeLynn Berry'
end
test 'DeLynn Barry' do
assert_contributor_names 'f2e6945', 'DeLynn Berry'
end
test 'delynnb' do
assert_contributor_names '665ab93', 'DeLynn Berry'
end
test 'DelynnB' do
assert_contributor_names 'ba96827', 'DeLynn Berry'
end
test 'DeLynnB' do
assert_contributor_names 'ed46cc3', 'DeLynn Berry'
end
test 'demetrius' do
assert_contributor_names 'ec6f0a1', 'Demetrius Nunes'
end
test 'Demetrius' do
assert_contributor_names '93ec130', 'Demetrius Nunes'
end
test "derrickspell\100cdmplus.com" do
assert_contributor_names '416385a', 'Derrick Spell'
end
test "dev\100metacasa.net" do
assert_contributor_names '9a5b91a', 'John Sheets'
end
test 'Developer' do
assert_contributor_names '179b451', 'John Pignata'
end
test 'Dmitriy Budnik' do
assert_contributor_names 'a209652', 'Dmitriy Budnik'
end
test 'devrieda' do
assert_contributor_names '45d679b', 'Derek DeVries'
end
test "devslashnull\100gmail.com" do
assert_contributor_names '4bd80f1', 'Brian Donovan'
end
test "dfelstead\100site5.com" do
assert_contributor_names '5e5b87b', 'David Felstead'
end
test 'dfens' do
assert_contributor_names 'ab9140f', 'Paweł Mikołajewski'
end
test 'dharmatech' do
assert_contributor_names 'f74a4d8', 'Eduardo Cavazos'
end
test 'DHH' do
assert_contributor_names 'bd261ff', 'David Heinemeier Hansson'
end
test 'diatmpravin' do
assert_contributor_names 'a302597', 'Pravin Mishra'
end
test 'dickeyxxx' do
assert_contributor_names '21586d3', 'Jeff Dickey'
end
test "dj\100omelia.org" do
assert_contributor_names 'f6ec9e3', 'Duff OMelia'
end
test 'djanowski' do
assert_contributor_names '0e6c8e5', 'Damian Janowski'
end
test 'dkaplan88' do
assert_contributor_names 'a0bdf2f', 'Dan Kaplan'
end
test 'dkubb' do
assert_contributor_names '11a92b3', 'Dan Kubb'
end
test 'dm1try' do
assert_contributor_names 'c12024b', 'Dmitry Dedov'
end
test 'dmathieu' do
assert_contributor_names '18bce29', 'Damien Mathieu'
end
test 'Dmitriy Vorotilin' do
assert_contributor_names '705a1d5', 'Dmitry Vorotilin'
end
test 'Vasin Dmitriy' do
assert_contributor_names 'dc8ddea', 'Dmytro Vasin'
end
test 'doabit' do
assert_contributor_names '8094156', 'Sean Dent'
end
test 'docunext' do
assert_contributor_names 'c070cc4', 'Albert Lash'
end
test "dom\100sisna.com" do
assert_contributor_names 'c81af99', 'Dominic Sisneros'
end
test "don.park\100gmail.com" do
assert_contributor_names '2ed6d36', 'Don Park'
end
test "donald.piret\100synergetek.be" do
assert_contributor_names 'd94af9a', 'Donald Piret'
end
test "doppler\100gmail.com" do
assert_contributor_names 'f4f7e75', 'David Rose'
end
test "dpiddy\100gmail.com" do
assert_contributor_names 'd9c0a37', 'Dan Peterson'
end
test 'dpmehta02' do
assert_contributor_names 'b9ead0f', 'Dev Mehta'
end
test 'Dr Nic' do
assert_contributor_names '868e6b0', 'Dr Nic Williams'
end
test "drbrain\100segment7.net" do
assert_contributor_names 'ce0653b', 'Eric Hodel'
end
test 'Dreamer3' do
assert_contributor_names 'c6a1830', 'Josh Goebel'
end
test "dreamer3\100gmail.com" do
assert_contributor_names 'dfa8aa0', 'Josh Goebel'
end
test 'dreamfall' do
assert_contributor_names '7c3a5ec', 'Vasili Kachalko'
end
test 'DrMark' do
assert_contributor_names '56fec2f', 'Mark Lane'
end
test 'drnic' do
assert_contributor_names '346d36b', 'Dr Nic Williams'
end
test 'drodriguez' do
assert_contributor_names '046a87a', 'Daniel Rodríguez Troitiño'
end
test 'dtaniwaki' do
assert_contributor_names 'c91e1cc', 'Daisuke Taniwaki'
end
test "duane.johnson\100gmail.com" do
assert_contributor_names '0b92d38', 'Duane Johnson'
end
test "duncan\100whomwah.com" do
assert_contributor_names 'fd8ee0a', 'Duncan Robertson'
end
test 'duncanbeevers' do
assert_contributor_names '9f1fdcc', 'Duncan Beevers'
end
test "dweitzman\100gmail.com" do
assert_contributor_names '9ca9f95', 'David Weitzman'
end
test 'Dylan Smith' do
assert_contributor_names 'b4be619', 'Dylan Thacker-Smith'
end
test "dymo\100mk.ukrtelecom.ua" do
assert_contributor_names '6ce3bf7', 'Alexander Dymo'
end
test 'Eadz' do
assert_contributor_names '6a17151', 'Eaden McKee'
end
test 'eadz' do
assert_contributor_names '9b6207c', 'Eaden McKee'
end
test "easleydp\100gmail.com" do
assert_contributor_names 'eede40b', 'David Easley'
end
test "eddiewould\100paradise.net.nz" do
assert_contributor_names '1e7ce13', 'Eddie Stanley'
end
test 'edibiase' do
assert_contributor_names 'cb978ba', 'Evan DiBiase'
end
test 'edogawaconan' do
assert_contributor_names '650a05c', 'Edho Arief'
end
test "edward\100debian.org" do
assert_contributor_names '7d09b8d', 'Edward Betts'
end
test 'egilburg' do
assert_contributor_names '7c34548', 'Eugene Gilburg'
end
test "ehalvorsen+rails\100runbox.com" do
assert_contributor_names '49efa02', 'Erlend Halvorsen'
end
test 'eigentone' do
assert_contributor_names 'b62243a', 'Craig Davey'
end
test "eigentone\100gmail.com" do
assert_contributor_names '5130fc8', 'Craig Davey'
end
test 'eileencodes' do
assert_contributor_names '7caceee', 'Eileen M. Uchitelle'
end
test 'Eileen Uchitelle' do
assert_contributor_names 'aec635d', 'Eileen M. Uchitelle'
end
test 'ejy' do
assert_contributor_names '740e531', 'Elliot Yates'
end
test 'Elektron1c97' do
assert_contributor_names '6bd417d', 'Yves Siegrist'
end
test "elan\100bluemandrill.com" do
assert_contributor_names 'a46214c', 'Elan Feingold'
end
test "elliot\100townx.org" do
assert_contributor_names '68e35f1', 'Elliot Smith'
end
test 'Emili Parreno' do
assert_contributor_names '3616141', 'Emili Parreño'
end
test "emptysands\100gmail.com" do
assert_contributor_names 'da874a4', 'Nicholas Lee'
end
test 'eparreno' do
assert_contributor_names '344a695', 'Emili Parreño'
end
test "eric.daspet\100survol.net" do
assert_contributor_names '9153137', 'Eric Daspet'
end
test "erik\100codefaktor.de" do
assert_contributor_names 'eefc22f', 'Erik Abele'
end
test "erik\100ruby-lang.nl" do
assert_contributor_names 'e54b4d3', 'Erik Terpstra'
end
test 'ernesto.jimenez' do
assert_contributor_names '5932357', 'Ernesto Jimenez'
end
test 'Ershad K' do
assert_contributor_names '6389571', 'Ershad Kunnakkadan'
end
test 'esad' do
assert_contributor_names '4917813', 'Esad Hajdarevic'
end
test "esad\100esse.at" do
assert_contributor_names '6ef5b74', 'Esad Hajdarevic'
end
test "eule\100space.ch" do
assert_contributor_names 'bf8b101', 'Kaspar Schiess'
end
test 'evan' do
assert_contributor_names '00cecf8', 'Evan Weaver'
end
test "evan\100protest.net" do
assert_contributor_names '68608e3', 'Evan Henshaw-Plath'
end
test 'evansj' do
assert_contributor_names '211ffd3', 'Jon Evans'
end
test 'eventualbuddha' do
assert_contributor_names 'debaf20', 'Brian Donovan'
end
test "evgeny.zislis\100gmail.com" do
assert_contributor_names '842ce34', 'Evgeny Zislis'
end
test 'f.svehla' do
assert_contributor_names '3cf2c63', 'Ferdinand Svehla'
end
test "f.svehla\100gmail.com" do
assert_contributor_names 'a53372c', 'Ferdinand Svehla'
end
test 'Fabian Rodriguez' do
assert_contributor_names 'f374720', 'Fabián Rodríguez'
end
test "fabien\100odilat.com" do
assert_contributor_names 'f9103e1', 'Fabien Mannessier'
end
test 'farleyknight' do
assert_contributor_names 'f3b3826', 'Farley Knight'
end
test 'farzy' do
assert_contributor_names 'f18c24c', 'Farzad Farid'
end
test 'fastred' do
assert_contributor_names '4ea9a8d', 'Arkadiusz Holko'
end
test "fbeausoleil\100ftml.net" do
assert_contributor_names '6d336753', 'François Beausoleil'
end
test 'fcheung' do
assert_contributor_names '083b0b7', 'Frederick Cheung'
end
test 'fearoffish' do
assert_contributor_names 'df27fb8', 'Jamie van Dyke'
end
test 'fedot' do
assert_contributor_names '70d7b37', 'Fedot Praslov'
end
test "flash\100vanklinkenbergsoftware.nl" do
assert_contributor_names 'de1f231', 'Bas van Klinkenberg'
end
test 'floehopper' do
assert_contributor_names '4f39382', 'James Mead'
end
test 'flowdelic' do
assert_contributor_names 'c59bce8', 'Mason Hale'
end
test "foamdino\100gmail.com" do
assert_contributor_names '7635fac', 'Kevin Jackson'
end
test 'foca' do
assert_contributor_names '458ef3b', 'Nicolás Sanguinetti'
end
test 'Foliosus' do
assert_contributor_names '0c4ba90', 'Brent Miller'
end
test 'FooBarWidget' do
assert_contributor_names '9e13b96', 'Hongli Lai (Phusion)'
end
test 'François Beausolei' do
assert_contributor_names 'd496db1', 'François Beausoleil'
end
test 'Francesco Rodriguez' do
assert_contributor_names '3326b8b', 'Francesco Rodríguez'
end
test 'Francois Beausoleil' do
assert_contributor_names '07f92e8', 'François Beausoleil'
end
test "francois.beausoleil\100gmail.com" do
assert_contributor_names '127ee8c', 'François Beausoleil'
end
test 'Franky W' do
assert_contributor_names 'b1d2635', 'Franky Wahl'
end
test 'Fred Cheung' do
assert_contributor_names '220a64e', 'Frederick Cheung'
end
test "frederick.cheung\100gmail.com" do
assert_contributor_names '078bd05', 'Frederick Cheung'
end
test 'frederico' do
assert_contributor_names '11a75f9', 'Frederico Macedo'
end
test 'freels' do
assert_contributor_names '36b8073', 'Matt Freels'
end
test 'fxn' do
assert_contributor_names '75aef09', 'Xavier Noria'
end
test "g.bucher\100teti.ch" do
assert_contributor_names '3574ab3', 'Gaspard Bucher'
end
test "gabriel.gironda\100gmail.com" do
assert_contributor_names '258bacb', 'Gabriel Gironda'
end
test "gabriel\100gironda.org" do
assert_contributor_names 'ba2619f', 'Gabriel Gironda'
end
test 'ganesh' do
assert_contributor_names 'b3ade0c', 'Ganesh Kumar'
end
test 'Gaurav Sharam' do
assert_contributor_names '434c768', 'Gaurav Sharma'
end
test 'gbuesing' do
assert_contributor_names '41adf87', 'Geoff Buesing'
end
test 'Girish S' do
assert_contributor_names 'bbf6df7', 'Girish Sonawane'
end
test 'geemus' do
assert_contributor_names 'ed2eb6f', 'Wesley Beary'
end
test 'genlinux' do
assert_contributor_names '23a5be7', 'Thillai Arasu'
end
test "gensym\100mac.com" do
assert_contributor_names '5241b97', 'David Altenburg'
end
test 'Geoffrey Buesing' do
assert_contributor_names 'cfd5688', 'Geoff Buesing'
end
test 'Geoffrey ROGUELON' do
assert_contributor_names '96e483a', 'Geoffrey Roguelon'
end
test 'german' do
assert_contributor_names 'adbae9a', 'Dmitrii Samoilov'
end
test 'gfriedrich' do
assert_contributor_names 'd5e1220', 'Georg Friedrich'
end
test 'ggarside' do
assert_contributor_names '5a3b4cf', 'Geoff Garside'
end
test 'glorieux' do
assert_contributor_names 'c8b7ad1', 'Geoffroy Lorieux'
end
test 'glv' do
assert_contributor_names 'dd665ff', 'Glenn Vanderburg'
end
test 'gmarik' do
assert_contributor_names 'b0be721', 'Marjan Hratson'
end
test 'GMFlash' do
assert_contributor_names '21c75e5', 'Michael Sheakoski'
end
test 'gmile' do
assert_contributor_names '56c162e', 'Pirogov Evgenij'
end
test 'gnagno' do
assert_contributor_names 'fdc0468', 'Ignazio Mostallino'
end
test "grant\100antiflux.org" do
assert_contributor_names '63a9516', 'Grant Hollingworth'
end
test 'greenbigfrog' do
assert_contributor_names '9233f52', 'greenbigfrog'
end
test 'Greenie0506' do
assert_contributor_names 'a1ae17e', 'Nicholas Greenfield'
end
test "greg\100lapcominc.com" do
assert_contributor_names '7f00f51', 'Greg Lappen'
end
test 'gregolsen' do
assert_contributor_names '8d83e33', 'Innokenty Mikhailov'
end
test 'grg' do
assert_contributor_names 'fb7807e', 'Glen Gibb'
end
test 'grosser' do
assert_contributor_names '149e3cd', 'Michael Grosser'
end
test 'gsphanikumar' do
assert_contributor_names 'b1a501e', 'G S Phani Kumar'
end
test 'gspiers' do
assert_contributor_names '9300ebd', 'Greg Spiers'
end
test 'guillaume' do
assert_contributor_names 'cfe42ba', 'Guillaume Carbonneau'
end
test "guy.naor\100famundo.com" do
assert_contributor_names 'd568fb6', 'Guy Naor'
end
test 'gwcoffey' do
assert_contributor_names '9b6207c', 'Geoff Coffey'
end
test 'h-lame' do
assert_contributor_names '295587f', 'Murray Steele'
end
test 'hakanensari' do
assert_contributor_names '9be22bd', 'Hakan Ensari'
end
test "hakuja\100hakuja.net" do
assert_contributor_names '50103b8', 'Koichi Tsutsumi'
end
test 'halfbyte' do
assert_contributor_names 'c9397e6', 'Jan Krutisch'
end
test "hanson\100surgery.wisc.edu" do
assert_contributor_names '431e21c', 'Todd Hanson'
end
test 'hardbap' do
assert_contributor_names '4c21026', 'Mike Breen'
end
test 'haruki_zaemon' do
assert_contributor_names 'fcfcc70', 'Simon Harris'
end
test 'hasmanyjosh' do
assert_contributor_names '24abd43', 'Josh Susser'
end
test "hcatlin\100gmail.com" do
assert_contributor_names 'c5ec16e', 'Hampton Catlin'
end
test "headius\100headius.com" do
assert_contributor_names 'c53ae00', 'Charles Nutter'
end
test 'heavysixer' do
assert_contributor_names '429ef9c', 'Mark Daggett'
end
test "hendrik\100mans.de" do
assert_contributor_names 'ac4b470', 'Hendrik Mans'
end
test 'henrik' do
assert_contributor_names '9929cb4', 'Henrik Nyh'
end
test 'Henrik N' do
assert_contributor_names 'c84c043', 'Henrik Nyh'
end
test "henrik\100nyh.se" do
assert_contributor_names '65a29b3', 'Henrik Nyh'
end
test "hensleyl\100papermountain.org" do
assert_contributor_names '85c603f', 'Leslie A. Hensley'
end
test 'hiroshi' do
assert_contributor_names '94d6716', 'Hiroshi Saito'
end
test 'hoffm' do
assert_contributor_names '1b12d08', 'Michael Hoffman'
end
test 'homa' do
assert_contributor_names 'b839657', 'Egor Homakov'
end
test "\100homakov" do
assert_contributor_names '973490a', 'Egor Homakov'
end
test 'homakov' do
assert_contributor_names 'f35c93f', 'Egor Homakov'
end
test 'Hongli Lai' do
assert_contributor_names 'f1d6a0e', 'Hongli Lai (Phusion)'
end
test 'Hongli Lai (Phusion' do
assert_contributor_names '011cbbc', 'Hongli Lai (Phusion)'
end
test 'HPNeo' do
assert_contributor_names '96f7ec4', 'Gustavo Leon'
end
test 'iaddict' do
assert_contributor_names 'e1b3a44', 'Tom Stone'
end
test "ian.w.white\100gmail.com" do
assert_contributor_names '59c8c63', 'Ian White'
end
test 'iGEL' do
assert_contributor_names 'c3ff04b', 'Johannes Barre'
end
test 'Igor' do
assert_contributor_names 'dd0bbd2', 'Igor Fedoronchuk'
end
test 'igor04' do
assert_contributor_names '0e50b7b', 'Igor Guzak'
end
test "ikeda\100dream.big.or.jp" do
assert_contributor_names 'f0448f5', 'Akira Ikeda'
end
test 'imajes' do
assert_contributor_names '553c23d', 'James Cox'
end
test "info\100rhalff.com" do
assert_contributor_names '73ed47d', 'Rob Halff'
end
test 'innu' do
assert_contributor_names 'af005df', 'Indrek Juhkam'
end
test 'intinig' do
assert_contributor_names 'f2e6945', 'Giovanni Intini'
end
test 'Intrepidd' do
assert_contributor_names '9c025ab', 'Adrien Siami'
end
test 'Irfy' do
assert_contributor_names 'a02c607', 'Irfan Adilovic'
end
test "isaac\100reuben.com" do
assert_contributor_names '2402131', 'Isaac Reuben'
end
test 'isaacfeliu' do
assert_contributor_names '9e76b59', 'Isaac Feliu'
end
test 'isak' do
assert_contributor_names '3b9bcf1', 'Andreas Isaksson'
end
test 'Ivan' do
assert_contributor_names 'e838fa0', 'Ivan Korunkov'
end
test 'iwiznia' do
assert_contributor_names '7b6f2a0', 'Ionatan Wiznia'
end
test "jackc\100hylesanderson.com" do
assert_contributor_names '76e4c1a', 'Jack Christensen'
end
test 'jacobat' do
assert_contributor_names '4f59aac', 'Jacob Atzen'
end
test 'jacobstr' do
assert_contributor_names 'cc99580', 'Jacob Straszynski'
end
test 'jacortinas' do
assert_contributor_names '6602fb6', 'Jose Angel Cortinas'
end
test 'jacott' do
assert_contributor_names 'db05478', 'Geoff Jacobsen'
end
test 'jafrog' do
assert_contributor_names 'dd0cae3', 'Irina Bednova'
end
test "jakob\100mentalized.net" do
assert_contributor_names '7aa9eed', 'Jakob Skjerning'
end
test 'Jakob S' do
assert_contributor_names '57098ad', 'Jakob Skjerning'
end
test "james.adam\100gmail.com" do
assert_contributor_names 'd63f6b9', 'James Adam'
end
test "james\100grayproductions.net" do
assert_contributor_names '12ff554', 'James Edward Gray II'
end
test "james\100slashetc.com" do
assert_contributor_names '85bb292', 'James Megquier'
end
test 'jamesgolick' do
assert_contributor_names '083b0b7', 'James Golick'
end
test 'jamie' do
assert_contributor_names '0a79eb7', 'Jamie Hill'
end
test "jamie\100bravenet.com" do
assert_contributor_names 'db9be58', 'Jamie Macey'
end
test "jamie\100dang.com" do
assert_contributor_names 'fddd33b', 'Jamie Orchard-Hays'
end
test 'Jamis' do
assert_contributor_names '9d00b0c', 'Jamis Buck'
end
test 'janovetz' do
assert_contributor_names '31b901a', 'Jake Janovetz'
end
test "JanPrill\100blauton.de" do
assert_contributor_names '36fc181', 'Jan Prill'
end
test 'jardeon' do
assert_contributor_names '43ef688', 'Jared Haworth'
end
test 'Jarkko' do
assert_contributor_names '98306be', 'Jarkko Laine'
end
test 'jarkko' do
assert_contributor_names 'eefe4d0', 'Jarkko Laine'
end
test "jarkko\100jlaine.net" do
assert_contributor_names 'eede40b', 'Jarkko Laine'
end
test 'jasl' do
assert_contributor_names 'ea881ca', 'Jun Jiang'
end
test 'Jason' do
assert_contributor_names '3880ab0', 'Jason Roth'
end
test 'Jason Frey (Fryguy)' do
assert_contributor_names '1d93464', 'Jason Frey'
end
test 'jastix' do
assert_contributor_names 'a1c289d', 'Nikolay Petrachkov'
end
test 'jaw6' do
assert_contributor_names '011e469', 'Joshua Wehner'
end
test "jay\100jay.fm" do
assert_contributor_names 'f5f7beb', 'Jay Levitt'
end
test 'Jay Pignata' do
assert_contributor_names 'b372b4c', 'John Pignata'
end
test 'JayK31' do
assert_contributor_names '50555f3', 'Jason Kaye'
end
test 'jbarnette' do
assert_contributor_names 'e47392b', 'John Barnette'
end
test 'jbasdf' do
assert_contributor_names 'dd27c4e', 'Justin Ball'
end
test 'jbbarth' do
assert_contributor_names '24674b3', 'Jean Baptiste Barth'
end
test 'jbsmith86' do
assert_contributor_names '20f32bb', 'Joel Smith'
end
test 'jbwiv' do
assert_contributor_names 'af2ebb2', 'John Wells'
end
test 'jcf' do
assert_contributor_names '7cc27b7', 'James Conroy-Finn'
end
test "jcfischer\100gmail.com" do
assert_contributor_names '7ad57c4', 'Jens-Christian Fischer'
end
test 'jchris' do
assert_contributor_names '5e677b6', 'Chris Anderson'
end
test 'jcoglan' do
assert_contributor_names 'a7764d8', 'James Coglan'
end
test 'jean.helou' do
assert_contributor_names 'f756bfb', 'Jean Helou'
end
test "jean.helou\100gmail.com" do
assert_contributor_names 'a3659d5', 'Jean Helou'
end
test 'jeanmartin' do
assert_contributor_names 'c48f744', 'Jan Schwenzien'
end
test 'jeem' do
assert_contributor_names 'cc3183d', 'Jim Hughes'
end
test "jeff\100ministrycentered.com" do
assert_contributor_names 'a5991d8', 'Jeff Berg'
end
test "jeff\100opendbms.com" do
assert_contributor_names '85baf07', 'Jeffrey Moss'
end
test 'JEG2' do
assert_contributor_names 'b4337c2', 'James Edward Gray II'
end
test 'jejacks0n' do
assert_contributor_names 'f55ef82', 'Jeremy Jackson'
end
test 'Jeremy Daer (Kemper)' do
assert_contributor_names '1524c01', 'Jeremy Daer'
end
test 'Jeremy Kemper' do
assert_contributor_names 'bd51bbc', 'Jeremy Daer'
end
test "jeremy\100jthopple.com" do
assert_contributor_names '2e42167', 'Jeremy Hopple'
end
test "jeremy\100planetargon.com" do
assert_contributor_names '58ebf30', 'Jeremy Voorhis'
end
test "jeremydurham\100gmail.com" do
assert_contributor_names '0e146d5', 'Jeremy Durham'
end
test "jeremye\100bsa.ca.gov" do
assert_contributor_names '47b74e6', 'Jeremy Evans'
end
test 'jeremyevans' do
assert_contributor_names '5d1a305', 'Jeremy Evans'
end
test "jeremyevans0\100gmail.com" do
assert_contributor_names '02d3444', 'Jeremy Evans'
end
test 'jeremymcanally' do
assert_contributor_names 'b564d7e', 'Jeremy McAnally'
end
test 'jeremymcnally' do
assert_contributor_names 'a79ac12', 'Jeremy McAnally'
end
test 'jeroeningen' do
assert_contributor_names '2e379c1', 'Jeroen van Ingen'
end
test 'jerome' do
assert_contributor_names 'aa4af60', 'Jérôme Lipowicz'
end
test "jerrett\100bravenet.com" do
assert_contributor_names '942132b', 'Jerrett Taylor'
end
test 'JESii' do
assert_contributor_names 'f7626ea', 'Jon Seidel'
end
test "jessemerriman\100warpmail.net" do
assert_contributor_names '04d2d5f', 'Jesse Merriman'
end
test 'jferris' do
assert_contributor_names 'd8b67f7', 'Joe Ferris'
end
test 'jgarber' do
assert_contributor_names '3c6b7a2', 'Jason Garber'
end
test "jhosteny\100mac.com" do
assert_contributor_names '9f1b577', 'Joseph Hosteny'
end
test "jimw\100mysql.com" do
assert_contributor_names '68b9097', 'Jim Winstead'
end
test 'jkit' do
assert_contributor_names '96557eb', 'J Kittiyachavalit'
end
test 'jlindley' do
assert_contributor_names 'd0bc724', 'James Lindley'
end
test "jmartin\100desertflood.com" do
assert_contributor_names '2876efb', 'Joseph A. Martin'
end
test "jmckible\100gmail.com" do
assert_contributor_names '12ab93b', 'Jordan McKible'
end
test 'jmettraux' do
assert_contributor_names 'bafd698', 'John Mettraux'
end
test "joe\100mjg2.com" do
assert_contributor_names 'c4f1979', 'Joe Goldwasser'
end
test 'joeellis' do
assert_contributor_names '8e0f49e', 'Joe Ellis'
end
test "joergd\100pobox.com" do
assert_contributor_names 'efaf2af', 'Joerg Diekmann'
end
test 'joerichsen' do
assert_contributor_names '083b0b7', 'Jørgen Orehøj Erichsen'
end
test "johan\100johansorensen.com" do
assert_contributor_names 'c42cd3c', 'Johan Sørensen'
end
test 'Johan Sorensen' do
assert_contributor_names '998ab50', 'Johan Sørensen'
end
test 'Johan Sörensen' do
assert_contributor_names '14edaa1', 'Johan Sørensen'
end
test "johan\100textdrive.com" do
assert_contributor_names '3fec943', 'Johan Sørensen'
end
test 'John J Wang' do
assert_contributor_names '4033c50', 'John J. Wang'
end
test 'johnb' do
assert_contributor_names '5e94f05', 'John Barton'
end
test 'johndouthat' do
assert_contributor_names '4250cca', 'John F. Douthat'
end
test 'johnnyb' do
assert_contributor_names '2be3a33', 'Jonathan Bartlett'
end
test 'joker1007' do
assert_contributor_names 'edd93a5', 'Tomohiro Hashidate'
end
test "jon\100blankpad.net" do
assert_contributor_names '35d3ede', 'Jon Wood'
end
test "jon\100burningbush.us" do
assert_contributor_names 'e234115', 'Jon Moses'
end
test "jon.evans\100pobox.com" do
assert_contributor_names '52c9ad4', 'Jon Evans'
end
test "jon\100instance-design.co.uk" do
assert_contributor_names '97adfda', 'Jon Wood'
end
test "jon\100siliconcircus.com" do
assert_contributor_names '4d9ca4d', 'Jon Bright'
end
test "jonathan\100bluewire.net.nz" do
assert_contributor_names '1aff68d', 'Jonathan Viney'
end
test "jonathan\100daikini.com" do
assert_contributor_names '7bfca7e', 'Jonathan Younger'
end
test 'jonnii' do
assert_contributor_names '46ebbcd', 'Jonathan Goldman'
end
test 'joost' do
assert_contributor_names '6b018e3', 'Joost Baaij'
end
test 'jordanstaub' do
assert_contributor_names 'cfe731c', 'Jordan Staub'
end
test 'jordi' do
assert_contributor_names '8a5a9dc', 'Jordi Bunster'
end
test "Jose' Valim" do
assert_contributor_names 'c2e7851', 'José Valim'
end
test 'josevalim' do
assert_contributor_names '8d0b4fa', 'José Valim'
end
test 'josh' do
assert_contributor_names 'd81ac8d', 'Josh Peek'
end
test 'Josh' do
assert_contributor_names '903dcef', 'Josh Peek'
end
test "josh\100hasmanythrough.com" do
assert_contributor_names '70577d0', 'Josh Susser'
end
test "josh\100joshpeek.com" do
assert_contributor_names '8085cbf', 'Josh Peek'
end
test "josh.starcher\100gmail.com" do
assert_contributor_names 'edb4208', 'Josh Starcher'
end
test "joshknowles\100gmail.com" do
assert_contributor_names '7d01005', 'Josh Knowles'
end
test 'joshpeek' do
assert_contributor_names 'c57c721', 'Josh Peek'
end
test 'Joshua Peek' do
assert_contributor_names 'e0846c8', 'Josh Peek'
end
test "jp.bougie\100gmail.com" do
assert_contributor_names '7581193', 'Jean-Philippe Bougie'
end
test 'jqr' do
assert_contributor_names '3028ca5', 'Elijah Miller'
end
test 'jramirez' do
assert_contributor_names 'bcb47a8', 'Javier Ramírez'
end
test 'jsgarvin' do
assert_contributor_names '38deb0e', 'Jonathan Garvin'
end
test 'jsheets' do
assert_contributor_names '83a21f7', 'John Sheets'
end
test 'jsierles' do
assert_contributor_names '3f336ad', 'Joshua Sierles'
end
test "jsolson\100damogran.org" do
assert_contributor_names 'ce0653b', 'Jon Olson'
end
test 'jstewart' do
assert_contributor_names 'cf2ee14', 'Jason Stewart'
end
test "jstirk\100oobleyboo.com" do
assert_contributor_names 'a150687', 'Jason Stirk'
end
test 'jstrachan' do
assert_contributor_names '0c96060', 'James Strachan'
end
test 'juan david pastas' do
assert_contributor_names 'dbf78b0', 'Juan David Pastas'
end
test 'juandebravo' do
assert_contributor_names 'ed4bde5', 'Juan De Bravo'
end
test 'JuanitoFatas' do
assert_contributor_names 'cd0d649', 'Juanito Fatas'
end
test 'juanjo.bazan' do
assert_contributor_names '3280a6e', 'Juanjo Bazán'
end
test 'Juanjo Bazan' do
assert_contributor_names '5e83612', 'Juanjo Bazán'
end
test 'Jude Arasu' do
assert_contributor_names 'c22989d', 'Thillai Arasu'
end
test 'JudeArasu' do
assert_contributor_names 'db20780', 'Thillai Arasu'
end
test 'jugend' do
assert_contributor_names '50253ed', 'Herryanto Siatono'
end
test 'Julia Lopez' do
assert_contributor_names '5e46f4ccb4', 'Julia López'
end
test 'julik' do
assert_contributor_names 'f756bfb', 'Julian Tarkhanov'
end
test "justin\100aspect.net" do
assert_contributor_names '21d50e4', 'Justin Mecham'
end
test 'justinfrench' do
assert_contributor_names 'b2eca73', 'Justin French'
end
test 'jvoorhis' do
assert_contributor_names '6a13376', 'Jeremy Voorhis'
end
test 'jwarwick' do
assert_contributor_names 'a85caaa', 'John Warwick'
end
test 'jweiss' do
assert_contributor_names '9e1d506', 'Jonathan Weiss'
end
test 'jwilger' do
assert_contributor_names 'd0ff4ec', 'John Wilger'
end
test 'jzw' do
assert_contributor_names '5ab94b2', 'Zac Williams'
end
test "k\100v2studio.com" do
assert_contributor_names '426fa08', 'Caio Chassot'
end
test "kajism\100yahoo.com" do
assert_contributor_names '40762a4', 'Karel Miarka'
end
test 'kamal' do
assert_contributor_names '67442cb', 'Kamal Fariz Mahyuddin'
end
test 'kamk' do
assert_contributor_names 'd0df7f2', 'Kamil Kukura'
end
test 'kampers' do
assert_contributor_names 'f000028', 'Chris Kampmeier'
end
test 'Kane' do
assert_contributor_names 'cf9f361', 'Andreas Wurm'
end
test 'kane' do
assert_contributor_names '09a23d2', 'Andreas Wurm'
end
test "kanis\100comcard.de" do
assert_contributor_names '6650da4', 'Lars Kanis'
end
test 'karanarora' do
assert_contributor_names '5415230', 'Karan Arora'
end
test 'Karel Minarik' do
assert_contributor_names 'ee7487e', 'Karel Minařík'
end
test 'kares' do
assert_contributor_names '5dbd6b5', 'Karol Bucek'
end
test 'karmi' do
assert_contributor_names '4185fb1', 'Karel Minařík'
end
test 'kasper' do
assert_contributor_names '42728c7', 'Sergey Karpesh'
end
test 'kaygee' do
assert_contributor_names '6509ceb', 'Keith Gautreaux'
end
test "kazuhiko\100fdiary.net" do
assert_contributor_names '8e78e93', 'Kazuhiko Shiozaki'
end
test 'KD' do
assert_contributor_names '1333994', 'Kuldeep Aggarwal'
end
test "keegan\100thebasement.org" do
assert_contributor_names '491b4a3', 'Keegan Quinn'
end
test 'keepcosmos' do
assert_contributor_names 'ca86c9e', 'Jaehyun Shin'
end
test 'kei' do
assert_contributor_names 'e1f8ec5', 'Kei Shiratsuchi'
end
test "keithm\100infused.org" do
assert_contributor_names '70f4a92', 'Keith Morrison'
end
test "ken.barker\100gmail.com" do
assert_contributor_names '34cbf15', 'Ken Barker'
end
test "kenneth.miller\100bitfield.net" do
assert_contributor_names '9c24899', 'Ken Miller'
end
test "kennethkunz\100gmail.com" do
assert_contributor_names 'cb62f06', 'Ken Kunz'
end
test 'kennyj' do
assert_contributor_names 'bb80a87', 'Toshinori Kajihara'
end
test 'kenta-s' do
assert_contributor_names '85119f5', 'kenta-s'
end
test 'kevcha' do
assert_contributor_names 'b523b5a', 'Kevin Chavanne'
end
test "kevin.clark\100gmail.com" do
assert_contributor_names '767eaa9', 'Kevin Clark'
end
test "kevin.clark\100gmal.com" do
assert_contributor_names 'b98684c', 'Kevin Clark'
end
test "Kevin Clark kevin.clark\100gmail.com" do
assert_contributor_names '7252666', 'Kevin Clark'
end
test "kevwil\100gmail.com" do
assert_contributor_names '080e169', 'Kevin Williams'
end
test 'kgorin' do
assert_contributor_names '10e708c', 'Kirill Gorin'
end
test "kim\100tinker.com" do
assert_contributor_names '060b9b1', 'Kim Shrier'
end
test 'Kir' do
assert_contributor_names '3fb0579', 'Kir Shatrov'
end
test 'kitallis' do
assert_contributor_names 'ea6cc28', 'Akshay Gupta'
end
test 'knapo' do
assert_contributor_names 'e2d90bf', 'Krzysztof Knapik'
end
test 'kohnkecomm' do
assert_contributor_names 'cf6299d', 'kohnkecomm'
end
test 'kommen' do
assert_contributor_names '3cfb894', 'Dieter Komendera'
end
test 'kou' do
assert_contributor_names '24e6cbc', 'Kouhei Sutou'
end
test 'Koz' do
assert_contributor_names '63e9687', 'Michael Koziarski'
end
test 'kris_chambers' do
assert_contributor_names '274ef21', 'Kristopher Chambers'
end
test 'krishna' do
assert_contributor_names 'bf658a9', 'Krishna Kotecha'
end
test 'kuahyeow' do
assert_contributor_names 'caec639', 'Thong Kuah'
end
test 'kubo' do
assert_contributor_names '8437be3', 'Kubo Takehiro'
end
test 'kucaahbe' do
assert_contributor_names 'b4ae94f', 'Dmitrij Mjakotnyi'
end
test 'kishore-mohan' do
assert_contributor_names 'cf82b2e', 'Kishore Mohan'
end
test 'Kym' do
assert_contributor_names '6a14b01', 'Kym McInerney'
end
test "l.francke\100gmx.de" do
assert_contributor_names '620726f', 'Lars Francke'
end
test 'l.guidi' do
assert_contributor_names '083b0b7', 'Luca Guidi'
end
test 'l4u' do
assert_contributor_names 'e5c95b1', 'Leo Lou'
end
test 'Laknath' do
assert_contributor_names '0149a6e', 'Buddhika Laknath'
end
test 'lakshan' do
assert_contributor_names 'e2d0b0e', 'Lakshan Perera'
end
test 'larrylv' do
assert_contributor_names '939b896', 'Larry Lv'
end
test "larrywilliams1\100gmail.com" do
assert_contributor_names '35b4bdc', 'Larry Williams'
end
test 'lars pind' do
assert_contributor_names '2fa5bf0', 'Lars Pind'
end
test 'Lars pind' do
assert_contributor_names 'ff0b766', 'Lars Pind'
end
test "lars\100pind.com" do
assert_contributor_names 'c350291', 'Lars Pind'
end
test "lars\100pinds.com" do
assert_contributor_names '52d4166', 'Lars Pind'
end
test 'laserlemon' do
assert_contributor_names '02039e9', 'Steve Richert'
end
test "laurel\100gorgorg.org" do
assert_contributor_names '302c23d', 'Laurel Fan'
end
test 'laurelfan' do
assert_contributor_names '71a4f71', 'Laurel Fan'
end
test 'laurocaetano' do
assert_contributor_names 'e87c3da', 'Lauro Caetano'
end
test 'lawrence' do
assert_contributor_names '902533e', 'Lawrence Pit'
end
test 'lazyatom' do
assert_contributor_names 'f1b1af8', 'James Adam'
end
test "lee\100omara.ca" do
assert_contributor_names 'b272100', "Lee O'Mara"
end
test 'leikind' do
assert_contributor_names '8b63dd0', 'Yuri Leikind'
end
test 'Leon Bredt' do
assert_contributor_names '7ebdecf', 'Leon Breedt'
end
test 'leprasmurf' do
assert_contributor_names 'fce92fd', 'Tim Forbes'
end
test 'leriksen' do
assert_contributor_names '951bb84', 'Leif Eriksen'
end
test "leroen\100gmail.com" do
assert_contributor_names '6a6df5f', 'Kjetil Lerøen'
end
test 'lest' do
assert_contributor_names 'ba2d850', 'Sergey Nartimov'
end
test 'lholden' do
assert_contributor_names '19f8bb28', 'Lori Holden'
end
test 'libc' do
assert_contributor_names '9d79e06', 'Eugene Pimenov'
end
test 'libin' do
assert_contributor_names '5c9d430', 'Lebin Sebastian F'
end
test 'lifo' do
assert_contributor_names '826f0bd', 'Pratik Naik'
end
test 'lifofifo' do
assert_contributor_names '19dff14', 'Pratik Naik'
end
test 'lihan' do
assert_contributor_names '36261c8', 'Lihan Li'
end
test 'lmarlow' do
assert_contributor_names '63df6eb', 'Lee Marlow'
end
test "lmarlow\100yahoo.com" do
assert_contributor_names '04d9e94', 'Lee Marlow'
end
test 'logylaps' do
assert_contributor_names 'd38b477', 'Logy Laps'
end
test 'loincloth' do
assert_contributor_names 'b4dd1e6', 'Seth Rasmussen'
end
test "lon\100speedymac.com" do
assert_contributor_names '481d579', 'Lon Baker'
end
test 'lorenjohnson' do
assert_contributor_names '69e4cc6', 'Loren Johnson'
end
test 'lotswholetime' do
assert_contributor_names '8b3f831', 'Mark Van Holstyn'
end
test "LouisStAmour\100gmail.com" do
assert_contributor_names 'b30ccef', 'Louis St-Amour'
end
test 'Lourens Naude' do
assert_contributor_names 'b08c968', 'Lourens Naudé'
end
test 'lsylvester' do
assert_contributor_names '6c57c78', 'Lachlan Sylvester'
end
test 'Lucas Caton' do
assert_contributor_names '2b7a621', 'Lucas Catón'
end
test 'Lukasz Sarnacki' do
assert_contributor_names '0c5ca07', 'Łukasz Sarnacki'
end
test 'Lukasz Strzalkowski' do
assert_contributor_names 'f9b6b86', 'Łukasz Strzałkowski'
end
test 'lukeludwig' do
assert_contributor_names '7c147e9', 'Luke Ludwig'
end
test 'lukfugl' do
assert_contributor_names '2c3ca4c', 'Jacob Fugal'
end
test 'lylo' do
assert_contributor_names '3c0fd44', 'Olly Headey'
end
test "m.stienstra\100fngtps.com" do
assert_contributor_names 'b85c535', 'Manfred Stienstra'
end
test 'ma2gedev' do
assert_contributor_names '6f9baae', 'Takayuki Matsubara'
end
test "maceywj\100telus.net" do
assert_contributor_names 'cb4bd89', 'Jamie Macey'
end
test 'maclover7' do
assert_contributor_names 'bbcbe6e', 'Jon Moss'
end
test 'macournoyer' do
assert_contributor_names '002e73a', 'Marc-André Cournoyer'
end
test 'madlep' do
assert_contributor_names '8e50f0f', 'Julian Doherty'
end
test 'madrobby' do
assert_contributor_names 'cfa31c3', 'Thomas Fuchs'
end
test 'maik schmidt' do
assert_contributor_names 'beff664', 'Maik Schmidt'
end
test "mail\100matthewpainter.co.uk" do
assert_contributor_names 'de5b737', 'Matthew Painter'
end
test 'manfred' do
assert_contributor_names 'd4c24b6', 'Manfred Stienstra'
end
test 'Manish shrivastava' do
assert_contributor_names '6252d0a', 'Manish Shrivastava'
end
test 'manish-shrivastava' do
assert_contributor_names '8145880', 'Manish Shrivastava'
end
test 'Manu' do
assert_contributor_names 'e7e4dee', 'Manu J'
end
test 'Marc-Andre Lafortune' do
assert_contributor_names '819c347', 'Marc-André Lafortune'
end
test 'Marcel' do
assert_contributor_names '89e06ed', 'Marcel Molina Jr.'
end
test 'marcel' do
assert_contributor_names 'a995f42', 'Marcel Molina Jr.'
end
test 'Marcel Molina' do
assert_contributor_names 'ba5591f', 'Marcel Molina Jr.'
end
test 'Marcel Molina Jr' do
assert_contributor_names '4874df1', 'Marcel Molina Jr.'
end
test 'Marcel Mollina Jr.' do
assert_contributor_names '3f049b0', 'Marcel Molina Jr.'
end
test 'marcenuc' do
assert_contributor_names '603ab7d', 'Marcello Nuccio'
end
test "marcin\100junkheap.net" do
assert_contributor_names '4e9bc0f', 'Marcin Szczepański'
end
test 'marclove' do
assert_contributor_names 'ce0653b', 'Marc Love'
end
test 'Marcos Tapajos' do
assert_contributor_names 'be794d8', 'Marcos Tapajós'
end
test 'mariozig' do
assert_contributor_names 'bf044a9', 'Mario Zigliotto'
end
test 'Mark Rushakof' do
assert_contributor_names '8cc4159', 'Mark Rushakoff'
end
test 'MarkMT' do
assert_contributor_names '27da612', 'MarkMT'
end
test "mark.imbriaco\100pobox.com" do
assert_contributor_names '229c0f4', 'Mark Imbriaco'
end
test 'markevich' do
assert_contributor_names '56445c9', 'Slava Markevich'
end
test "MarkusQ\100reality.com" do
assert_contributor_names '3441dfd', 'Markus Roberts'
end
test 'Marten' do
assert_contributor_names '91eaeb2', 'Marten Veldthuis'
end
test 'Martin' do
assert_contributor_names 'e807476', 'Martin Linkhorst'
end
test "martin.emde\100gmail.com" do
assert_contributor_names 'b1257d9', 'Martin Emde'
end
test 'Martin Schürrer' do
assert_contributor_names '6bc24d4', 'Martin Schuerrer'
end
test 'masarakki' do
assert_contributor_names '83b7bf4', 'Masaki Yamada'
end
test 'mathie' do
assert_contributor_names '55b6697', 'Graeme Mathieson'
end
test 'mathieuravaux' do
assert_contributor_names 'fadd1fa', 'Mathieu Ravaux'
end
test 'matrix9180' do
assert_contributor_names '8352287', 'Chad Ingram'
end
test "mats\100imediatec.co.uk" do
assert_contributor_names '900afae', 'Mats Persson'
end
test 'matt' do
assert_contributor_names 'e2ce901a', 'Matt Aimonetti'
end
test 'Matt B.' do
assert_contributor_names '97f418c', 'Matt Boehlig'
end
test "matt\100mattmargolis.net" do
assert_contributor_names '883c54a', 'Matt Margolis'
end
test 'mattbauer' do
assert_contributor_names '83c1934', 'Matt Bauer'
end
test "matthew\100walker.wattle.id.au" do
assert_contributor_names '8cdf912', 'Matthew Walker'
end
test 'MatthewRudy' do
assert_contributor_names '7e8aee7', 'Matthew Rudy Jacobs'
end
test 'mattly' do
assert_contributor_names 'd5a93b6', 'Matt Lyon'
end
test "\100mattt" do
assert_contributor_names 'ef91cdd', 'Mattt Thompson'
end
test 'Matt Stopa' do
assert_contributor_names '3c90f7a', 'Matthew Stopa'
end
test 'Maxime RETY' do
assert_contributor_names 'c7e875a', 'Maxime Réty'
end
test 'Mehmet Emin İNAÇ' do
assert_contributor_names 'ca77998', 'Mehmet Emin İNAÇ'
end
test 'maximerety' do
assert_contributor_names 'd0f22a9', 'Maxime Réty'
end
test "maznawak\100nerim.net" do
assert_contributor_names '92473b8', 'Simon Arnaud'
end
test 'mazuhl' do
assert_contributor_names '106b78d', 'Max Mitchell'
end
test 'mcmire' do
assert_contributor_names '0ff0319', 'Elliot Winkler'
end
test "mdabney\100cavoksolutions.com" do
assert_contributor_names '43b6a74fb1', 'Blane Dabney'
end
test 'mdeiters' do
assert_contributor_names 'e59978a', 'Matthew Deiters'
end
test "me\100jonnii.com" do
assert_contributor_names '4793a2f', 'Jonathan Goldman'
end
test "me\100julik.nl" do
assert_contributor_names '2ffc84d', 'Julian Tarkhanov'
end
test "me\100mdaines.com" do
assert_contributor_names '191dfe8', 'Michael Daines'
end
test "meadow.nnick\100gmail.com" do
assert_contributor_names '7c8f3ed', 'Lugovoi Nikolai'
end
test "medlar\100medlar.it" do
assert_contributor_names '5525b79', 'Giovanni Intini'
end
test 'mfazekas' do
assert_contributor_names '341afb6', 'Miklós Fazekas'
end
test 'mfoster' do
assert_contributor_names '5850edf', 'Mark W. Foster'
end
test 'mhuffnagle' do
assert_contributor_names '328ae5b', 'Marc Huffnagle'
end
test 'mhutchin' do
assert_contributor_names '738e906', 'Michael Hutchinson'
end
test 'mhw' do
assert_contributor_names 'c7df5bd6', 'Mark H. Wilkinson'
end
test 'mica eked' do
assert_contributor_names 'd5e3c49', 'Mica Eked'
end
test 'Michael A. Schoen' do
assert_contributor_names '94a1309', 'Michael Schoen'
end
test 'Michael Gunderloy' do
assert_contributor_names '6222ac1', 'Mike Gunderloy'
end
test 'Michael S. Klishin' do
assert_contributor_names 'e21ed3e', 'Michael Klishin'
end
test "michael\100schubert" do
assert_contributor_names '7d6f783', 'Michael Schubert'
end
test "michael\100schubert.cx" do
assert_contributor_names 'fd9c15e', 'Michael Schubert'
end
test "michael\100schuerig.de" do
assert_contributor_names '2abe3d2', 'Michael Schuerig'
end
test 'Michael Shuerig' do
assert_contributor_names '4836c40', 'Michael Schuerig'
end
test 'michaelminter' do
assert_contributor_names '90f7f81', 'Michael Minter'
end
test 'Miguel' do
assert_contributor_names '2e4df2a', 'Miguel Fernández'
end
test "mike\100clarkware.com" do
assert_contributor_names '9f3625d', 'Mike Clark'
end
test "mike\100michaeldewey.org" do
assert_contributor_names '750ca79', 'Michael Dewey'
end
test 'mikel' do
assert_contributor_names '1d5d050', 'Mikel Lindsaar'
end
test 'mikeycgto' do
assert_contributor_names '6df2db3', 'Michael Coyne'
end
test 'Miklos Fazekas' do
assert_contributor_names 'be0875f4', 'Miklós Fazekas'
end
test 'Miklos Fazkeas' do
assert_contributor_names 'fb71fa6', 'Miklós Fazekas'
end
test 'mikong' do
assert_contributor_names '632ab24', 'Michael Galero'
end
test 'miloops' do
assert_contributor_names 'f253e98', 'Emilio Tagua'
end
test "minimudboy\100gmail.com" do
assert_contributor_names '3fb5828', 'Lewis Jubb'
end
test 'Mip Earson' do
assert_contributor_names '4ae1cdd', 'Michael Pearson'
end
test 'misfo' do
assert_contributor_names '7706b57', 'Trent Ogren'
end
test 'mislav' do
assert_contributor_names 'a550d2a', 'Mislav Marohnić'
end
test 'Mislav Marohnic' do
assert_contributor_names '9c0e4de', 'Mislav Marohnić'
end
test "mislav\100nippur.irb.hr" do
assert_contributor_names 'c02d1f7', 'Mislav Marohnić'
end
test 'mislaw' do
assert_contributor_names '73fba4f', 'Mislav Marohnić'
end
test "mlaster\100metavillage.com" do
assert_contributor_names 'de5b737', 'Mike Laster'
end
test "mmangino\100elevatedrails.com" do
assert_contributor_names '3ad1a98', 'Mike Mangino'
end
test 'mnaberez' do
assert_contributor_names '31d3048', 'Mike Naberezny'
end
test 'mntj' do
assert_contributor_names 'cb97312', 'Corey Leveen'
end
test 'mo khan' do
assert_contributor_names '140557e', 'Mo Khan'
end
test 'mojombo' do
assert_contributor_names '928c84b', 'Tom Preston-Werner'
end
test 'Molchanov Andrey' do
assert_contributor_names '37f2674', 'Andrey Molchanov'
end
test 'moriq' do
assert_contributor_names '2876efb', 'Kazuhiro Yoshida'
end
test "moriq\100moriq.com" do
assert_contributor_names 'd4bb22b', 'Kazuhiro Yoshida'
end
test 'moro' do
assert_contributor_names '9a8d583', 'Kyosuke Morohashi'
end
test "mortonda\100dgrmm.net" do
assert_contributor_names '2876efb', 'David Morton'
end
test 'moses' do
assert_contributor_names 'e5b3d4b', 'Moses Hohman'
end
test 'mpalmer' do
assert_contributor_names '2503fd3', 'Matt Palmer'
end
test "mpalmer\100hezmatt.org" do
assert_contributor_names '7767e33', 'Matt Palmer'
end
test "mparrish\100pearware.org" do
assert_contributor_names '5fe0ecc', 'Matt Parrish'
end
test 'Mr A' do
assert_contributor_names '3134605', 'Akshay Vishnoi'
end
test 'mrageh' do
assert_contributor_names '9494855', 'Adam Magan'
end
test 'MrBrdo' do
assert_contributor_names 'bc7c0b5', 'Jan Berdajs'
end
test 'mrichman' do
assert_contributor_names '131acec', 'Mark A. Richman'
end
test 'mrj' do
assert_contributor_names 'ef57b93', 'Mark R. James'
end
test "mrj\100bigpond.net.au" do
assert_contributor_names '95e8740', 'Mark James'
end
test 'mrkjlchvz' do
assert_contributor_names 'e19ce8b', 'mrkjlchvz'
end
test 'mroch' do
assert_contributor_names '81286f8', 'Marshall Roch'
end
test 'mtitorenko' do
assert_contributor_names '74191ed', 'Mark J. Titorenko'
end
test 'murphy' do
assert_contributor_names 'dcc1549', 'Kornelius Kalnbach'
end
test "murphy\100cYcnus.de" do
assert_contributor_names '71ff705', 'Kornelius Kalnbach'
end
test "murpyh\100rubychan.de" do
assert_contributor_names '5326274', 'Kornelius Kalnbach'
end
test 'mutru' do
assert_contributor_names '00326bb', 'Otto Hilska'
end
test 'nachocab' do
assert_contributor_names '2e8f910', 'Nacho Caballero'
end
test 'naffis' do
assert_contributor_names '1117d73', 'Dave Naffis'
end
test 'nate' do
assert_contributor_names '056ddbd', 'Nathan Kontny'
end
test 'NeilW' do
assert_contributor_names '9f26164', 'Neil Wilson'
end
test 'Neodelf' do
assert_contributor_names 'e0e7047', 'Andrey Molchanov'
end
test 'nerdinand' do
assert_contributor_names '38c833c', 'Ferdinand Niedermann'
end
test 'neumayr' do
assert_contributor_names 'd7b6054', 'Matthias Neumayr'
end
test 'Nerian' do
assert_contributor_names '2ebf47a', 'Gonzalo Rodríguez-Baltanás Díaz'
end
test 'neshmi' do
assert_contributor_names 'e91d7ed', 'Matthew Vincent'
end
test 'Nex3' do
assert_contributor_names '4036fcc', 'Nathan Weizenbaum'
end
test 'Nick' do
assert_contributor_names '37b0b36', 'Nick Sieger'
end
test "nick+rails\100ag.arizona.edu" do
assert_contributor_names '06afb8c', 'Nick Murphy'
end
test 'nicksieger' do
assert_contributor_names '2029b8a', 'Nick Sieger'
end
test "nicksieger\100gmail.com" do
assert_contributor_names '81a6a60', 'Nick Sieger'
end
test 'Nicolas Hock Isaza' do
assert_contributor_names 'ee822f2', 'Nicolás Hock Isaza'
end
test "nicolas.pouillard\100gmail.com" do
assert_contributor_names '8910de6', 'Nicolas Pouillard'
end
test 'Nicolas Sanguinetti' do
assert_contributor_names 'd5d7171', 'Nicolás Sanguinetti'
end
test 'nik.kakelin' do
assert_contributor_names '2bc5e6c', 'Nik Wakelin'
end
test 'nik.wakelin' do
assert_contributor_names '5bf40f7', 'Nik Wakelin'
end
test "nils\100alumni.rice.edu" do
assert_contributor_names '64b7c5f', 'Nils Jonsson'
end
test 'nilsga' do
assert_contributor_names 'cb79118', 'Nils-Helge Garli Hegvik'
end
test 'nishant-cyro' do
assert_contributor_names '517caa8', 'Nishant Tuteja'
end
test 'nkallen' do
assert_contributor_names 'c6f2af5', 'Nick Kallen'
end
test 'noam' do
assert_contributor_names 'b64fb30', 'Noam Gagliardi Rabinovich'
end
test 'Nobuhiro IMAI' do
assert_contributor_names 'e535b45', 'Nobuhiro Imai'
end
test 'Noemj' do
assert_contributor_names 'a53935d', 'Olli Rissanen'
end
test 'noradio' do
assert_contributor_names '87b55f5', 'Marcel Molina Jr.'
end
test 'norbauer' do
assert_contributor_names '1caa763', 'Norbauer Inc'
end
test 'norbert' do
assert_contributor_names 'f70b74a', 'Norbert Crombach'
end
test "normelton\100gmail.com" do
assert_contributor_names 'd4c24b6', 'Norman Elton'
end
test 'nosolopau' do
assert_contributor_names '1e1711c', 'Pau'
end
test 'notahat' do
assert_contributor_names '566a369', 'Pete Yandell'
end
test 'nov' do
assert_contributor_names '5476a6a', 'Nobukazu Matake'
end
test "nov\100yo.rim.or.jp" do
assert_contributor_names 'ae0e1a0', 'Nobuhiro Imai'
end
test 'ntalbott' do
assert_contributor_names '5430eb6', 'Nathaniel Talbott'
end
test 'nullstyle' do
assert_contributor_names '79670fb', 'Scott Fleckenstein'
end
test "nunemaker\100gmail.com" do
assert_contributor_names '4defe1a', 'John Nunemaker'
end
test 'nzkoz' do
assert_contributor_names '96557eb', 'Michael Koziarski'
end
test 'ObieFernandez' do
assert_contributor_names 'efaf2af', 'Obie Fernandez'
end
test 'obrie' do
assert_contributor_names '2043513', 'Aaron Pfeifer'
end
test 'octopod' do
assert_contributor_names '0fd9743', 'Chris McGrath'
end
test 'ogawaso' do
assert_contributor_names 'd4d3d05', 'Soichiro Ogawa'
end
test 'ogeidix' do
assert_contributor_names '1ac802f', 'Diego Giorgini'
end
test "okkez000\100gmail.com" do
assert_contributor_names 'a9ed24c', 'okkez'
end
test "oleg.frolov\100gmail.com" do
assert_contributor_names 'a288179', 'Oleg Frolov'
end
test 'oleganza' do
assert_contributor_names 'f95ff8d', 'Oleg Andreev'
end
test "olivier_ansaldi\100yahoo.com" do
assert_contributor_names 'f801cd7', 'Olivier Ansaldi'
end
test 'Olly Legg' do
assert_contributor_names '0349278', 'Oliver Legg'
end
test "ordwaye\100evergreen.edu" do
assert_contributor_names 'c29db9f', 'Erik Ordway'
end
test 'orta' do
assert_contributor_names '4c323bc', 'Orta Therox'
end
test 'oss92' do
assert_contributor_names 'b937c24', 'Mohamed Osama'
end
test 'outerim' do
assert_contributor_names '3b6555a', 'Lee Jensen'
end
test 'Overbryd' do
assert_contributor_names '9fb5ee8', 'Lukas Rieder'
end
test 'oz' do
assert_contributor_names '4d4db4c', 'utilum'
end
test 'Pablo Ifrán' do
assert_contributor_names 'c945da5', 'Pablo Ifrán'
end
test 'ozzyaaron' do
assert_contributor_names '837f0ab', 'Aaron Todd'
end
test 'packagethief' do
assert_contributor_names 'f005587', 'Jeffrey Hardy'
end
test 'padi' do
assert_contributor_names 'f55ecc6', 'Marc Rendl Ignacio'
end
test 'pager' do
assert_contributor_names '6a611e1', 'Dmitriy Timokhin'
end
test 'palkan' do
assert_contributor_names '7785871', 'Vladimir Dementyev'
end
test 'paranoiase Kang' do
assert_contributor_names '2773257', 'Sung-hee Kang'
end
test "patrick\100lenz.sh" do
assert_contributor_names '2876efb', 'Patrick Lenz'
end
test 'patrick.t.joyce' do
assert_contributor_names '5ac3a9b', 'Patrick Joyce'
end
test "paul\100cnt.org" do
assert_contributor_names '33cf8f1', 'Paul Smith'
end
test "paul\100paulbutcher.com" do
assert_contributor_names 'ccd32ad', 'Paul Butcher'
end
test 'paulccarey' do
assert_contributor_names '2b2ee22', 'Paul Carey'
end
test 'pavlos' do
assert_contributor_names '6861a1d', 'Paul Hieromnimon'
end
test 'pawel2015' do
assert_contributor_names 'b2bfb07', 'Pawel Janiak'
end
test 'pbflinn' do
assert_contributor_names '690eae7', 'Perry Flinn'
end
test 'pburleson' do
assert_contributor_names '58bf8f4', 'Patrick Burleson'
end
test "pburleson\100gmail.com" do
assert_contributor_names '1ee780b', 'Patrick Burleson'
end
test "pdcawley\100bofh.org.uk" do
assert_contributor_names '9c5591d', 'Piers Cawley'
end
test 'pedz' do
assert_contributor_names 'e4e3df8', 'Perry Smith'
end
test 'pelargir' do
assert_contributor_names '8c2177c', 'Matthew Bass'
end
test 'Pelle' do
assert_contributor_names '53088c4', 'Pelle Braendgaard'
end
test 'pelle' do
assert_contributor_names 'da7f9f5', 'Pelle Braendgaard'
end
test "pelletierm\100eastmedia.net" do
assert_contributor_names '5a9a93d', 'Matt Pelletier'
end
test "pergesu\100gmail.com" do
assert_contributor_names 'befd62c', 'Pat Maddox'
end
test "perlguy\100gmail.com" do
assert_contributor_names 'd0bd3b5', 'Jim Helm'
end
test "petermichaux\100gmail.com" do
assert_contributor_names '8734da9', 'Peter Michaux'
end
test 'pezra' do
assert_contributor_names '860cf2d', 'Peter Williams'
end
test 'phallstrom' do
assert_contributor_names 'a49e7d5', 'Philip Hallstrom'
end
test "phil.ross\100gmail.com" do
assert_contributor_names 'f4f7e75', 'Philip Ross'
end
test "philbogle\100gmail.com" do
assert_contributor_names 'b8a5d398', 'Phil Bogle'
end
test 'Philipp Weissensteiner' do
assert_contributor_names '8102c0d', 'Philipp Weißensteiner'
end
test "phillip\100pjbsoftware.com" do
assert_contributor_names '6cae2b7', 'Phillip J. Birmingham'
end
test 'phinze' do
assert_contributor_names 'd2d7492', 'Paul Hinze'
end
test 'phoet' do
assert_contributor_names 'b6aa70c', 'Peter Schröder'
end
test "phurley\100gmail.com" do
assert_contributor_names '09751e5', 'Patrick Hurley'
end
test 'PikachuEXE' do
assert_contributor_names '7fc3390', 'Leung Ho Kuen'
end
test 'piotr' do
assert_contributor_names '1708a86', 'Piotr Banasik'
end
test "piotr\100t-p-l.com" do
assert_contributor_names '620d3a3', 'Piotr Banasik'
end
test 'pivotal' do
assert_contributor_names 'e710902', 'Pivotal Labs'
end
test 'Pivotal Labs' do
assert_contributor_names '487758b', 'Pivotal Labs'
end
test 'pixeltrix' do
assert_contributor_names '779db44', 'Andrew White'
end
test "pjhyett\100gmail.com" do
assert_contributor_names '8eaf479', 'PJ Hyett'
end
test 'pleax' do
assert_contributor_names '2dc1402', 'Dmitry Lipovoi'
end
test 'pope' do
assert_contributor_names '4d5b913', 'Tim Pope'
end
test 'postmodern' do
assert_contributor_names 'f9e84a9', 'Hal Brodigan'
end
test 'PotatoSalad' do
assert_contributor_names '083b0b7', 'Andrew Bennett'
end
test 'PoTe' do
assert_contributor_names 'dadfa1e', 'Pablo Astigarraga'
end
test 'pragdave' do
assert_contributor_names '76e4c1a', 'Dave Thomas'
end
test 'prakash' do
assert_contributor_names '8207267', 'Prakash Laxkar'
end
test 'prakashmurthy' do
assert_contributor_names '6b31fd8', 'Prakash Murthy'
end
test 'pratik' do
assert_contributor_names 'dc1166d', 'Pratik Naik'
end
test 'Pratik' do
assert_contributor_names '281edce', 'Pratik Naik'
end
test 'printercu' do
assert_contributor_names '0b7a37a', 'Max Melentiev'
end
test "progrium\100gmail.com" do
assert_contributor_names 'aa7859c', 'Jeff Lindsay'
end
test 'protocool' do
assert_contributor_names '3054971', 'Trevor Squire'
end
test 'psq' do
assert_contributor_names 'c443a61', 'Pascal Belloncle'
end
test "pterjan\100mandriva.com" do
assert_contributor_names '1e216b1', 'Pascal Terjan'
end
test "pudeyo\100rpi.com" do
assert_contributor_names '98898ed', 'Oleg Pudeyev'
end
test "purestorm\100ggnore.net" do
assert_contributor_names 'da675e5', 'Manuel Holtgrewe'
end
test 'purp' do
assert_contributor_names '31e2a2d', 'Jim Meyer'
end
test 'pyromaniac' do
assert_contributor_names '5a4cdf9', 'Arkadiy Zabazhanov'
end
test 'queso' do
assert_contributor_names '5285270', 'Josh Owens'
end
test 'raasdnil' do
assert_contributor_names '3a90e72', 'Mikel Lindsaar'
end
test 'rabiedenharn' do
assert_contributor_names 'ce0653b', 'Rob Biedenharn'
end
test 'Radar' do
assert_contributor_names 'ca94136', 'Ryan Bigg'
end
test 'Rafael França' do
assert_contributor_names 'ee82ce7', 'Rafael Mendonça França'
end
test 'raggi' do
assert_contributor_names '08e9d0d', 'James Tucker'
end
test 'RAHUL CHAUDHARI' do
assert_contributor_names '077dffa', 'Rahul P. Chaudhari'
end
test 'Rahul Chaudhari' do
assert_contributor_names 'dc3f33e', 'Rahul P. Chaudhari'
end
test "raidel\100onemail.at" do
assert_contributor_names '20d27f6', 'Michael Raidel'
end
test "rails\10033lc0.net" do
assert_contributor_names 'b2681cc', 'Eelco Lempsink'
end
test "rails\100bencurtis.com" do
assert_contributor_names '4c966a0', 'Benjamin Curtis'
end
test "rails\100electricvisions.com" do
assert_contributor_names 'ffbaf1c', 'John Oxton'
end
test "rails\100jeffcole.net" do
assert_contributor_names '4793a2f', 'Jeff Cole'
end
test "rails\100roetzel.de" do
assert_contributor_names 'ce0653b', 'David Roetzel'
end
test "rails\100tpope.info" do
assert_contributor_names '74d1512', 'Tim Pope'
end
test 'railsbob' do
assert_contributor_names '80d8608', 'Anup Narkhede'
end
test 'RainerBlessing' do
assert_contributor_names '8b7219b', 'Rainer Blessing'
end
test "raphinou\100yahoo.com" do
assert_contributor_names '7661c2b', 'Raphael Bauduin'
end
test 'Raphomet' do
assert_contributor_names 'b4e5da6', 'Raphael Lee'
end
test 'rasputnik' do
assert_contributor_names 'f22b000', 'Dick Davies'
end
test 'ravindra kumar kumawat' do
assert_contributor_names '8bd064e', 'Ravindra Kumar Kumawat'
end
test 'rayners' do
assert_contributor_names '37a4615', 'David Raynes'
end
test 'rcoder' do
assert_contributor_names '0adcd81', 'Lennon Day-Reynolds'
end
test "rdifrango\100captechventures.com" do
assert_contributor_names '42576ff', 'Ron DiFrango'
end
test 'redbeard' do
assert_contributor_names '0230fc9', 'Tal Rotbart'
end
test 'redinger' do
assert_contributor_names '1d32cec', 'Christopher Redinger'
end
test 'regonn' do
assert_contributor_names '902fef3', 'Kenta Tanoue'
end
test 'ReinH' do
assert_contributor_names 'b30de60', 'Rein Henrichs'
end
test 'remvee' do
assert_contributor_names '66e97c3', "Remco van 't Veer"
end
test 'remy' do
assert_contributor_names 'b2dd357', 'Rémy Coutable'
end
test "rephorm\100rephorm.com" do
assert_contributor_names 'b84a7be', 'Brian Mattern'
end
test 'retr0h' do
assert_contributor_names 'cbdb4aa', 'John Dewey'
end
test 'reu' do
assert_contributor_names '77c099c', 'Rodrigo Navarro'
end
test 'revans' do
assert_contributor_names 'be0c453', 'Robert Evans'
end
test 'Rich Olson' do
assert_contributor_names '9025e5d', 'Rick Olson'
end
test 'richcollins' do
assert_contributor_names '6f559f8', 'Rich Collins'
end
test "richcollins\100gmail.com" do
assert_contributor_names '1d4d037', 'Rich Collins'
end
test 'Rick' do
assert_contributor_names 'e41f489', 'Rick Olson'
end
test 'rick' do
assert_contributor_names '083b0b7', 'Rick Olson'
end
test 'Rick Olsen' do
assert_contributor_names '5b9b904', 'Rick Olson'
end
test "rick\100rickbradley.com" do
assert_contributor_names 'c4233a0', 'Rick Bradley'
end
test 'Riley' do
assert_contributor_names '380800e', 'Riley Lynch'
end
test 'rizwanreza' do
assert_contributor_names 'd1202cf', 'Rizwan Reza'
end
test 'rmacklin' do
assert_contributor_names 'aa1ba9c', 'Richard Macklin'
end
test 'rmm5t' do
assert_contributor_names 'd761ac4', 'Ryan McGeary'
end
test 'Rob' do
assert_contributor_names 'a25296a', 'Rob Anderton'
end
test "rob.rasmussen\100gmail.com" do
assert_contributor_names 'c5d5c4f', 'Rob Rasmussen'
end
test "robbat2\100gentoo.org" do
assert_contributor_names '2a12b56', 'Robin H. Johnson'
end
test "robby\100planetargon.com" do
assert_contributor_names '2f40936', 'Robby Russell'
end
test 'robbyrussell' do
assert_contributor_names 'e38ad5d', 'Robby Russell'
end
test 'Robert Pankowecki (Gavdi)' do
assert_contributor_names 'e95b5b3', 'Robert Pankowecki'
end
test 'RobertZK' do
assert_contributor_names 'a723519', 'Robert Krzyzanowski'
end
test 'robertomiranda' do
assert_contributor_names 'a316c09', 'Roberto Miranda'
end
test 'robinjfisher' do
assert_contributor_names '8091ed5', 'Robin Fisher'
end
test 'rockrep' do
assert_contributor_names 'f7dbf38', 'Michael Kintzer'
end
test 'roderickvd' do
assert_contributor_names 'b14378c', 'Roderick van Domburg'
end
test 'rohit' do
assert_contributor_names '39cb27b', 'Rohit Arondekar'
end
test 'Roman Le Negrate' do
assert_contributor_names 'a0c677c', 'Roman Le Négrate'
end
test 'Roman2K' do
assert_contributor_names '6ee09b6', 'Roman Le Négrate'
end
test 'RomD' do
assert_contributor_names 'd6ae930', 'Roman Dittert'
end
test "ror\100philippeapril.com" do
assert_contributor_names '62d749a', 'Philippe April'
end
test 'Ross Kaffenburger' do
assert_contributor_names '523f3ba', 'Ross Kaffenberger'
end
test 'rpheath' do
assert_contributor_names '1ab2ff5', 'Ryan Heath'
end
test 'rpq' do
assert_contributor_names '510502e', 'Ramon Quezada'
end
test "rramdas\100gmail.com" do
assert_contributor_names '40c86a7', 'Rodney Ramdas'
end
test 'rsanheim' do
assert_contributor_names 'af43e87', 'Rob Sanheim'
end
test "rscottmace\100gmail.com" do
assert_contributor_names '8e8bf37', 'Scott Mace'
end
test 'rsl' do
assert_contributor_names 'ba342c2', 'Russell Norris'
end
test 'RSL' do
assert_contributor_names '6ccbef5', 'Russell Norris'
end
test 'rspeicher' do
assert_contributor_names '0da754d', 'Robert Speicher'
end
test "rtomayko\100gmail.com" do
assert_contributor_names '9db8f3e', 'Ryan Tomayko'
end
test "ruben.nine\100gmail.com" do
assert_contributor_names '9935a35', 'Ruben Nine'
end
test "ruby\100bobsilva.com" do
assert_contributor_names '231a464', 'Bob Silva'
end
test "rubyonrails\100beautifulpixel.com" do
assert_contributor_names '3fb29b1', 'Alex Wayne'
end
test 'RubyRedRick' do
assert_contributor_names 'bef071d', 'Rick DeNatale'
end
test 'rubyruy' do
assert_contributor_names 'da5d8fa', 'Ruy Asan'
end
test 'rusikf' do
assert_contributor_names '47896b3', 'Ruslan Korolev'
end
test 'rwdaigle' do
assert_contributor_names '9b8399f', 'Ryan Daigle'
end
test 'Ryan Davis' do
assert_contributor_names '2890b96', 'Ryan Davis (zenspider)'
end
test 'Ryunosuke Sato' do
assert_contributor_names 'd022fce', 'Ryunosuke Sato'
end
test "ryan.raaum\100gmail.com" do
assert_contributor_names '0f0d926', 'Ryan Raaum'
end
test 'ryanb' do
assert_contributor_names '9450262', 'Ryan Bates'
end
test "ryand-ruby\100zenspider.com" do
assert_contributor_names 'a2f0ae7', 'Ryan Davis (zenspider)'
end
test 'ryepup' do
assert_contributor_names '2fac5d1', 'Ryan Davis (ryepup)'
end
test 'Sachin87' do
assert_contributor_names 'c176397', 'Sachin Singh'
end
test 'saimonmoore' do
assert_contributor_names 'c1bc61c', 'Simon Moore'
end
test 'saksmlz' do
assert_contributor_names '68307a1', 'Aliaxandr Rahalevich'
end
test "sam.kirchmeier\100gmail.com" do
assert_contributor_names '35b4bdc', 'Sam Kirchmeier'
end
test 'Samuel Elliott' do
assert_contributor_names '31ea83e', 'Sam Elliott'
end
test 'Samus_' do
assert_contributor_names 'b4b6ffe', 'Michael Cetrulo'
end
test 'sandofsky' do
assert_contributor_names '2ba8864', 'Ben Sandofsky'
end
test "sandra.metz\100duke.edu" do
assert_contributor_names '242cd06', 'Sandi Metz'
end
test 'sanemat' do
assert_contributor_names '3b98fb7', 'Murahashi Sanemat Kenichi'
end
test 'sblackstone' do
assert_contributor_names '49f2e6f', 'Stephen Blackstone'
end
test 'Schneems' do
assert_contributor_names 'a8870d1', 'Richard Schneeman'
end
test "\100schneems" do
assert_contributor_names 'ef91cdd', 'Richard Schneeman'
end
test 'schneems' do
assert_contributor_names '921dcc2', 'Richard Schneeman'
end
test "schoenm\100earthlink.net" do
assert_contributor_names '0293c34', 'Michael Schoen'
end
test "sco\100scottraymond.net" do
assert_contributor_names '0925c6b', 'Scott Raymond'
end
test "scott\100sigkill.org" do
assert_contributor_names 'ad24c6d', 'Scott Laird'
end
test 'scott_willson' do
assert_contributor_names 'd64832c', 'Scott Willson'
end
test 'ScottSchram' do
assert_contributor_names 'a0c925c', 'Scott Schram'
end
test 'Sen Zhang' do
assert_contributor_names 'd57356b', 'Sen Zhang'
end
test 'scottwillson' do
assert_contributor_names 'c75cfa0', 'Scott Willson'
end
test "sd\100notso.net" do
assert_contributor_names '35240ba', 'Sebastian Delmont'
end
test 'sdsykes' do
assert_contributor_names '0da2357', 'Stephen Sykes'
end
test 'sealocal' do
assert_contributor_names '27a4e93', 'Mike Taylor'
end
test "sean\100ardismg.com" do
assert_contributor_names '2876efb', 'Sean T Allen'
end
test 'seangeo' do
assert_contributor_names 'c81fff2', 'Sean Geoghegan'
end
test 'seanhussey' do
assert_contributor_names '18a3333', 'Sean Hussey'
end
test 'seapy' do
assert_contributor_names '6c252b5', 'Jeong Changhoon'
end
test 'sebastian.kanthak' do
assert_contributor_names '78a732b', 'Sebastian Kanthak'
end
test "sebastian.kanthak\100muehlheim.de" do
assert_contributor_names 'b1a4f91', 'Sebastian Kanthak'
end
test "self\100mattmower.com" do
assert_contributor_names '9bafd35', 'Matt Mower'
end
test "sergeykojin\100gmail.com" do
assert_contributor_names 'ba5591f', 'Sergey Kojin'
end
test 'Sergio Gil Pérez de la Manga' do
assert_contributor_names 'de178df', 'Sergio Gil'
end
test 'sethladd' do
assert_contributor_names '32b31a2', 'Seth Ladd'
end
test 'sgerrand' do
assert_contributor_names 'f40a3ea', 'Sasha Gerrand'
end
test "\100sgrif" do
assert_contributor_names '2bbcca0', 'Sean Griffin'
end
test 'sgrif' do
assert_contributor_names '92a6033', 'Sean Griffin'
end
test 'sh6khan' do
assert_contributor_names '319baed', 'Sadman Khan'
end
test "shimbo\100is.naist.jp" do
assert_contributor_names '6aaa08d', 'Masashi Shimbo'
end
test 'shingo-nakanishi' do
assert_contributor_names 'd089fe2', 'shingo-nakanishi'
end
test 'shock_one' do
assert_contributor_names 'ec1b715', 'Volodymyr Shatsky'
end
test 'shtirlic' do
assert_contributor_names 'c2b79c0', 'Serg Podtynnyi'
end
test 'shugo' do
assert_contributor_names '4699c33', 'Shugo Maeda'
end
test "shugo\100ruby-lang.org" do
assert_contributor_names '21cf3c6', 'Shugo Maeda'
end
test 'ShunsukeAida' do
assert_contributor_names '9982138', 'Shunsuke Aida'
end
test 'shunsukeaida' do
assert_contributor_names '884d123', 'Shunsuke Aida'
end
test 'Si' do
assert_contributor_names '9b24e11', 'Si Jobling'
end
test "simon\100redhillconsulting.com.au" do
assert_contributor_names '2043513', 'Simon Harris'
end
test "simon.stapleton\100gmail.com" do
assert_contributor_names '16e41c6', 'Simon Stapleton'
end
test 'simonjefford' do
assert_contributor_names 'd44ce1c', 'Simon Jefford'
end
test 'simply-phi' do
assert_contributor_names 'ac07da8', 'Ibrahim Abdullah'
end
test 'siong1987' do
assert_contributor_names '5df9d00', 'Teng Siong Ong'
end
test 'sjain1107' do
assert_contributor_names 'c351a82', 'Sakshi Jain'
end
test 'sjgman9' do
assert_contributor_names 'fade31a', 'Sam Granieri'
end
test 'skae' do
assert_contributor_names '5640773', 'Stefan Kaes'
end
test 'skaen' do
assert_contributor_names 'c8b4cf6', 'Stefan Kaes'
end
test 'Skaes' do
assert_contributor_names '689cca1', 'Stefan Kaes'
end
test 'skaes' do
assert_contributor_names 'e3670a0', 'Stefan Kaes'
end
test "skaes\100web.de" do
assert_contributor_names '5d61d23', 'Stefan Kaes'
end
test 'skaes.web.de' do
assert_contributor_names '31ae812', 'Stefan Kaes'
end
test 'skanthak' do
assert_contributor_names '693e0df', 'Sebastian Kanthak'
end
test 'slainer68' do
assert_contributor_names '6241d4e', 'Nicolas Blanco'
end
test 'smeade' do
assert_contributor_names '68849e3', 'Scott Meade'
end
test 'snusnu' do
assert_contributor_names 'ce50c96', 'Martin Gamsjaeger'
end
test "solo\100gatelys.com" do
assert_contributor_names '796a43d', 'Solomon White'
end
test "somekool\100gmail.com" do
assert_contributor_names '4fc6c87', 'Mathieu Jobin'
end
test 'sonnym' do
assert_contributor_names '0950d40', 'Sonny Michaud'
end
test 'Spakman' do
assert_contributor_names 'eede82c', 'Mark Somerville'
end
test 'spicycode' do
assert_contributor_names '8670631', 'Chad Humphries'
end
test 'spyhole' do
assert_contributor_names 'cd31eb1', 'Johnson Wang'
end
test 'Squeegy' do
assert_contributor_names '2b6954a', 'Alex Wayne'
end
test 'sshaw' do
assert_contributor_names 'f9369c0', 'Skye Shaw'
end
test "ssinghi\100kreeti.com" do
assert_contributor_names 'f3aa7c1', 'Surendra Singhi'
end
test 'ssoroka' do
assert_contributor_names '519c5c7', 'Steven Soroka'
end
test 'st0012' do
assert_contributor_names 'e8688dd', 'Stan Lo'
end
test 'Stanislav' do
assert_contributor_names '40f1642', 'Stanislav Sobolev'
end
test "starr\100starrnhorne.com" do
assert_contributor_names 'ebc0a38', 'Starr Horne'
end
test 'Stefan' do
assert_contributor_names 'db7fadd', 'Stefan Kaes'
end
test 'stellsmi' do
assert_contributor_names 'af98d88', 'Jeremy Lightsmith'
end
test 'Stephan Kaes' do
assert_contributor_names '7fe3fd2', 'Stefan Kaes'
end
test "stephen\100touset.org" do
assert_contributor_names '2876efb', 'Stephen Touset'
end
test "stephen_purcell\100yahoo.com" do
assert_contributor_names 'b0a86f8', 'Steve Purcell'
end
test "stephenh\100chase3000.com" do
assert_contributor_names '1a6954a', 'Stephen Haberman'
end
test 'steve' do
assert_contributor_names '60a6284', 'Steven Bristol'
end
test 'Steve St. Martin' do
assert_contributor_names '679a0bf', 'Stephen St. Martin'
end
test 'stevenbristol' do
assert_contributor_names '5af4463', 'Steven Bristol'
end
test 'sthollmann' do
assert_contributor_names '164af1f', 'Stefan Hollmann'
end
test "stian\100grytoyr.net" do
assert_contributor_names '7a6a923', 'Stian Grytøyr'
end
test "stlruby\100googlegroups.com" do
assert_contributor_names 'ee04aea', 'St. Louis Ruby Users Group'
end
test 'stopdropandrew' do
assert_contributor_names '083b0b7', 'Andrew Grim'
end
test 'subelsky' do
assert_contributor_names 'f596dda', 'Mike Subelsky'
end
test 'suchasurge' do
assert_contributor_names 'b99fb66', 'Frank Müller'
end
test 'Sudara' do
assert_contributor_names 'e848ab5', 'Sudara Williams'
end
test 'sue445' do
assert_contributor_names 'a913af9', 'Go Sueyoshi'
end
test 'SUGINO Yasuhiro' do
assert_contributor_names '4a36eb6', 'Sugino Yasuhiro'
end
test 'suginoy' do
assert_contributor_names '4f6f433', 'Sugino Yasuhiro'
end
test 'Sunny' do
assert_contributor_names 'ce40df2', 'Sunny Ripert'
end
test 'stve' do
assert_contributor_names 'e80d9f4', 'Steve Agalloco'
end
test 'sur' do
assert_contributor_names '8b2a601', 'Sur Max'
end
test 'sveiss' do
assert_contributor_names 'b3b0a0c', 'Stephen Veiss'
end
test "sveit\100tradeharbor.com" do
assert_contributor_names '3398f74', 'Stephen Veit'
end
test "sven\100c3d2.de" do
assert_contributor_names '4b3e964', 'Sven Klemm'
end
test 'Sven Kraeuter | 5v3n' do
assert_contributor_names 'a1c7b0e', 'Sven Kräuter'
end
test 'swapdisc' do
assert_contributor_names '5105b89', 'Joe Lewis'
end
test "t.lucas\100toolmantim.com" do
assert_contributor_names '6a83ebf', 'Tim Lucas'
end
test 'Tadas Tamosauskas' do
assert_contributor_names 'f292f09', 'Tadas Tamošauskas'
end
test 'tagoh' do
assert_contributor_names 'c554a9c', 'Akira Tagoh'
end
test "takiuchi\100drecom.co.jp" do
assert_contributor_names '06411f4', 'Genki Takiuchi'
end
test 'takkanm' do
assert_contributor_names 'fa1ea34' ,'Mitsutaka Mimura'
end
test 'tank-bohr' do
assert_contributor_names 'f3101fd', 'Alexey Nikitin'
end
test 'tanmay3011' do
assert_contributor_names '2e51d6f', 'Tanmay Sinha'
end
test 'tarmo' do
assert_contributor_names 'fd3f048', 'Tarmo Tänav'
end
test 'tarmo_t' do
assert_contributor_names '6b1901d', 'Tarmo Tänav'
end
test 'taryn' do
assert_contributor_names 'ce61a6b', 'Taryn East'
end
test 'tashian' do
assert_contributor_names 'e95b965', 'Carl Tashian'
end
test 'technomancy' do
assert_contributor_names '673daea', 'Phil Hagelberg'
end
test "technomancy\100gmail.com" do
assert_contributor_names 'f1a350a', 'Phil Hagelberg'
end
test "technoweenie\100gmail.com" do
assert_contributor_names 'df7a4d4', 'Rick Olson'
end
test 'Tekin' do
assert_contributor_names 'd7c2e52', 'Tekin Suleyman'
end
test 'Teo Hui Ming' do
assert_contributor_names 'b307210', 'Huiming Teo'
end
test 'thechrisoshow' do
assert_contributor_names '5a5b0b8', "Chris O'Sullivan"
end
test 'theflow' do
assert_contributor_names '63e9687', 'Florian Munz'
end
test 'thenickcox' do
assert_contributor_names '431b236', 'Nick Cox'
end
test 'Theory' do
assert_contributor_names '38deb0e', 'David E. Wheeler'
end
test 'thewoolleyman' do
assert_contributor_names '2bfd677', 'Chad Woolley'
end
test 'thiagoaugusto' do
assert_contributor_names '361f1bc', 'Thiago Augusto'
end
test "thijs\100fngtps.com" do
assert_contributor_names '7864d0e', 'Thijs van der Vossen'
end
test "thijs\100vandervossen.net" do
assert_contributor_names '74896c0', 'Thijs van der Vossen'
end
test 'thijsv' do
assert_contributor_names '15b1b88', 'Thijs van der Vossen'
end
test 'thoefer' do
assert_contributor_names '1863f12', 'Tom Hoefer'
end
test "thomas\100columbus.rr.com" do
assert_contributor_names '3553b59', 'Tom Lieber'
end
test "thomas\100fesch.at" do
assert_contributor_names '7d01005', 'Thomas Fuchs'
end
test "tietew\100tietew.net" do
assert_contributor_names '3080b27', 'Tietew'
end
test 'timc' do
assert_contributor_names 'd4b6671', 'Tim Carpenter'
end
test "timct\100mac.com" do
assert_contributor_names '0d52abf', 'Tim Trautmann'
end
test 'Time Pope' do
assert_contributor_names 'e2ad603', 'Tim Pope'
end
test 'Timm' do
assert_contributor_names 'ff1b7e7', 'Kasper Timm Hansen'
end
test 'Tima Maslyuchenko' do
assert_contributor_names '633ea6a', 'Tim Masliuchenko'
end
test 'tinogomes' do
assert_contributor_names '5121593', 'Celestino Gomes'
end
test 'tleneveu' do
assert_contributor_names '3d22686', 'Thibaut Leneveu'
end
test 'tmacedo' do
assert_contributor_names '1370d15', 'Tiago Macedo'
end
test "tmornini\100infomania.com" do
assert_contributor_names '311342d', 'Tom Mornini'
end
test 'tnp' do
assert_contributor_names 'bdba748', 'Troels Petersen'
end
test 'Tobias Luekte' do
assert_contributor_names 'f1880ca', 'Tobias Lütke'
end
test 'Tobias Luetke' do
assert_contributor_names '553c23d', 'Tobias Lütke'
end
test 'TobiasLuetke' do
assert_contributor_names 'd4e02f7', 'Tobias Lütke'
end
test 'toby cabot' do
assert_contributor_names 'bdfddb0', 'Toby Cabot'
end
test 'todd' do
assert_contributor_names 'a7a1c4e', 'Todd Gehman'
end
test "todd\100robotcoop.com" do
assert_contributor_names '45ee71a', 'Todd Gehman'
end
test "tom\100craz8.com" do
assert_contributor_names '838ec41', 'Tom Fakes'
end
test "tom.enebo\100gmail.com" do
assert_contributor_names 'fc04243', 'Thomas Enebo'
end
test "tom\100moertel.com" do
assert_contributor_names '43c470f', 'Tom Moertel'
end
test "tom\100popdog.net" do
assert_contributor_names '5779deb', 'Tom Ward'
end
test 'Tom ward' do
assert_contributor_names '0555fc9', 'Tom Ward'
end
test 'tomafro' do
assert_contributor_names 'ccb87e2', 'Tom Ward'
end
test 'tomhuda' do
assert_contributor_names '00be5bd', 'Yehuda Katz', 'Tom Dale'
end
test 'TomK32' do
assert_contributor_names '235cd21', 'Thomas R. Koll'
end
test 'Toshimaru' do
assert_contributor_names 'd9ff835', 'Toshimaru'
end
test 'tomtoday' do
assert_contributor_names 'b1aa674', 'Tom Brice'
end
test "tomtoday\100gmail.com" do
assert_contributor_names '5f4f4b0', 'Tom Brice'
end
test 'toolmantim' do
assert_contributor_names '4a8d3d5', 'Tim Lucas'
end
test 'topfunky' do
assert_contributor_names '2a08c45', 'Geoffrey Grosenbach'
end
test 'tpope' do
assert_contributor_names 'e522f75', 'Tim Pope'
end
test 'trans' do
assert_contributor_names 'c9d4335', 'Thomas Sawyer'
end
test 'trek' do
assert_contributor_names '083b0b7', 'Trek Glowacki'
end
test 'tsukasaoishi' do
assert_contributor_names '4e83815', 'Tsukasa OISHI'
end
test "turnip\100turnipspatch.com" do
assert_contributor_names '8e3bf70', 'Jonathan Leighton'
end
test 'Tuxie' do
assert_contributor_names '3c877ec', 'Per Wigren'
end
test "tuxie\100dekadance.se" do
assert_contributor_names '813a8b9', 'Per Wigren'
end
test 'twinturbo' do
assert_contributor_names 'dee375a', 'Adam Hawkins'
end
test "twoggle\100gmail.com" do
assert_contributor_names '8b8a30d', 'Tim Fletcher'
end
test "tyler\100kianta.com" do
assert_contributor_names 'c0f84b1', 'Tyler Kiley'
end
test "tyler.kovacs\100gmail.com" do
assert_contributor_names '5b3bc31', 'Tyler Kovacs'
end
test 'tzaharia' do
assert_contributor_names '8ff92e2', 'Tieg Zaharia'
end
test 'Ugis Ozols' do
assert_contributor_names '3ca1bc3', 'Uģis Ozols'
end
test 'Ulysses' do
assert_contributor_names 'c6ea1d3', 'Nicholas Seckar'
end
test 'vanderhoop' do
assert_contributor_names 'b345fc2', 'Travis Vander Hoop'
end
test 'vertigoclinic' do
assert_contributor_names '9aa8174', 'Greg Miller'
end
test "victor-ronr-trac\100carotena.net" do
assert_contributor_names '473e5bd', 'Victor Jalencas'
end
test 'vijay' do
assert_contributor_names '8000719', 'Vijay Dev'
end
test "vinbarnes\100gmail.com" do
assert_contributor_names '18e2188', 'Kevin Barnes'
end
test 'virusman' do
assert_contributor_names '921305b', 'Victor Babenko'
end
test 'vitaly' do
assert_contributor_names '762fc54', 'Vitaly Kushner'
end
test 'vladr' do
assert_contributor_names '8091ed5', 'Vlad Romascanu'
end
test 'WAKASUGI 5T111111' do
assert_contributor_names '0176aef', 'Hirofumi Wakasugi'
end
test 'wallerjake' do
assert_contributor_names '9bc4eb7e', 'Jake Waller'
end
test 'wangchun' do
assert_contributor_names 'c6a3a39', 'Wang Chun'
end
test 'wangjohn' do
assert_contributor_names '3182295', 'John J. Wang'
end
test 'watson' do
assert_contributor_names 'defb4d0', 'Thomas Watson Steen'
end
test 'watsonian' do
assert_contributor_names 'edd59b7', 'Joel Watson'
end
test "wbruce\100gmail.com" do
assert_contributor_names 'df70e28', 'Bruce Williams'
end
test "wejn\100box.cz" do
assert_contributor_names '88bb279', 'Michal Šafránek'
end
test 'wesley.moxam' do
assert_contributor_names 'e781fad', 'Wesley Moxam'
end
test 'weyus' do
assert_contributor_names '3122d32', 'Wes Gamble'
end
test 'wharris' do
assert_contributor_names 'b18585f', 'Will Harris'
end
test 'wildchild' do
assert_contributor_names '65539c9', 'Alexander Uvarov'
end
test 'Will' do
assert_contributor_names 'b1bbf90', 'Will Bryant'
end
test 'will.bryant' do
assert_contributor_names 'c1abe44', 'Will Bryant'
end
test 'willnet' do
assert_contributor_names '11d41c6', 'Shinichi Maeshima'
end
test 'wilson' do
assert_contributor_names '11d2a30', 'Wilson Bilkovich'
end
test "wilsonb\100gmail.com" do
assert_contributor_names '584bec9', 'Wilson Bilkovich'
end
test 'windock' do
assert_contributor_names '6feffe5', 'Andrew Shcheglov'
end
test 'wingfire' do
assert_contributor_names 'a9f2c12', 'Christof Spies'
end
test 'Winston' do
assert_contributor_names 'b52c66f', 'Winston Teo Yong Wei'
end
test 'Wintermute' do
assert_contributor_names '7ec91d4', 'Dave Murphy'
end
test 'Wizard' do
assert_contributor_names '07f92e8', 'Melih Arda Yalçiner'
end
test 'wmoxam' do
assert_contributor_names '91c14e9', 'Wesley Moxam'
end
test "wolfgang\100stufenlos.net" do
assert_contributor_names 'affe7c0', 'Wolfgang Klinger'
end
test "work\100ashleymoran.me.uk" do
assert_contributor_names '2a12b56', 'Ashley Moran'
end
test 'wycats' do
assert_contributor_names 'fd15359', 'Yehuda Katz'
end
test 'xal' do
assert_contributor_names '0ceab81', 'Tobias Lütke'
end
test 'xavier' do
assert_contributor_names '083b0b7', 'Xavier Defrang'
end
test 'xaviershay' do
assert_contributor_names '569a78c', 'Xavier Shay'
end
test 'Xenda' do
assert_contributor_names 'fabd2ce', 'Alessandra Pereyra'
end
test "xyctka\100gmail.com" do
assert_contributor_names '5890ced', 'Vitalii Khustochka'
end
test 'Yamada Masaki' do
assert_contributor_names 'f498000', 'Masaki Yamada'
end
test 'YanhaoYang' do
assert_contributor_names '2f35f61', 'Yanhao Yang'
end
test 'Yasuhiko Katoh (y@su)' do
assert_contributor_names '539d929', 'Yasuhiko Katoh'
end
test 'yhirano55' do
assert_contributor_names '5af7d47', 'Yoshiyuki Hirano'
end
test 'ymendel' do
assert_contributor_names '173e2d2', 'Yossef Mendelssohn'
end
test 'yob' do
assert_contributor_names '656a20a', 'James Healy'
end
test 'yon' do
assert_contributor_names '2f25413', 'Yonatan Feldman'
end
test "yon\100milliped.com" do
assert_contributor_names '908e9a1', 'Yonatan Feldman'
end
test "yrashk\100fp.org.ua" do
assert_contributor_names '2ad8dc6', 'Yurii Rashkovskii'
end
test "yrashk\100gmail.com" do
assert_contributor_names 'f3e5e07', 'Yurii Rashkovskii'
end
test 'ysbaddaden' do
assert_contributor_names 'b0f2b94', 'Julien Portalier'
end
test 'yuki3738' do
assert_contributor_names '17bb531', 'Yuki Minamiya'
end
test 'yuuji.yaginuma' do
assert_contributor_names '4fded7c', 'Yuuji Yaginuma'
end
test 'Yuji Yaginuma' do
assert_contributor_names 'fbd33b9', 'Yuuji Yaginuma'
end
test 'yury' do
assert_contributor_names '3df90de', 'Yury Korolev'
end
test 'yyyc514' do
assert_contributor_names 'dad3e83', 'Josh Goebel'
end
test "z\100wzph.com" do
assert_contributor_names 'd9839c1', 'Zachary Holt'
end
test 'zackchandler' do
assert_contributor_names 'd6f6a41', 'Zack Chandler'
end
test "zaitzow\100gmail.com" do
assert_contributor_names '08fd13d', 'August Zaitzow Flatby'
end
test 'Zamith' do
assert_contributor_names '6f418a0', 'Luís Ferreira'
end
test 'zapnap' do
assert_contributor_names '096f3f6', 'Nick Plante'
end
test 'zdennis' do
assert_contributor_names '57cde63', 'Zach Dennis'
end
test 'zenspider' do
assert_contributor_names 'a86e594', 'Ryan Davis (zenspider)'
end
test 'zhengjia' do
assert_contributor_names '55759a6', 'Zheng Jia'
end
test 'zires' do
assert_contributor_names '35f38b7', 'Thierry Zires'
end
test "zraii\100comcast.net" do
assert_contributor_names '9d8e348', 'Martin Emde'
end
test 'zsombor' do
assert_contributor_names '2227a17', 'Dee Zsombor'
end
test 'प्रथमेश' do
assert_contributor_names '41739a2', 'Prathamesh Sonpatki'
end
test 'प्रथमेश Sonpatki' do
assert_contributor_names 'a7a2bc0f66', 'Prathamesh Sonpatki'
end
end
end
| 23.659665 | 78 | 0.672927 |
6255bfeac275ab9d95cf7f042625ab1327090454 | 382 | # == Schema Information
#
# Table name: content_learning_quizzes
#
# id :integer not null, primary key
# player_id :integer not null
# questions :json not null
# answers :json
# created_at :datetime not null
# updated_at :datetime not null
#
FactoryGirl.define do
factory :content_learning_quiz do
end
end
| 20.105263 | 53 | 0.615183 |
618e788f937e2cdf0fcefb0abb98d8c6b8bab6d5 | 2,911 | # This file is copied to spec/ when you run 'rails generate rspec:install'
require 'spec_helper'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../config/environment', __dir__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'rspec/rails'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec', 'support', '**', '*.rb')].each { |f| require f }
# Checks for pending migrations and applies them before tests are run.
# If you are not using ActiveRecord, you can remove these lines.
begin
ActiveRecord::Migration.maintain_test_schema!
rescue ActiveRecord::PendingMigrationError => e
puts e.to_s.strip
exit 1
end
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
# config.include Devise::Test::ControllerHelpers, type: :controller
config.include Devise::Test::IntegrationHelpers, type: :request
end
| 42.188406 | 86 | 0.749571 |
6aecc062e2232ff48abc4688c4a64226bcb944cf | 1,987 | class UsersController < ApplicationController
# before filters
before_action :logged_in_user, only: [:index, :edit, :update, :destroy] # runs the method logged_in_user before index, destroy, edit & update actions
before_action :correct_user, only: [:edit, :update]
before_action :admin_user, only: :destroy
# show all users (only logged in members)
def index
@users = User.paginate(page: params[:page]) # paginate is gem
end
def show
@user = User.find(params[:id])
end
def new
@user = User.new
end
def create # Runs with every click of "Create account" button
@user = User.new(user_params)
if @user.save # save to db
log_in @user # log in freshly signed up user
flash[:success] = "Welcome to the Sample App!"
redirect_to @user
else
render 'new'
end
end
def edit
end
def update
if @user.update_attributes(user_params)
flash[:success] = "Profile updated"
redirect_to @user
else
render 'edit'
end
end
def destroy
User.find(params[:id]).destroy
flash[:success] = "User deleted"
redirect_to users_url
end
private
def user_params # require a user hash & let only name,email,pass,passconf attributes be passed to returned hash
params.require(:user).permit(:name, :email, :password,
:password_confirmation)
end
# Before filters
# Confirms a logged-in user.
def logged_in_user
unless logged_in?
store_location
flash[:danger] = "Please log in."
redirect_to login_url
end
end
# Confirms the correct user.
def correct_user
@user = User.find(params[:id])
redirect_to(root_url) unless current_user?(@user) # current_user is method in sessions_helper.rb
end
# Confirms an admin user.
def admin_user
redirect_to(root_url) unless current_user.admin?
end
end
| 24.231707 | 152 | 0.638651 |
1ddd3156c796859cd019bb9effed329e2f9a481a | 2,073 | module ComputedCustomField
module CustomFieldsHelperPatch
def self.included(base) #:nodoc:
base.extend(ClassMethods)
base.send(:include, InstanceMethods)
base.class_eval do
unloadable
alias_method_chain :custom_field_tag, :computed_custom_field
end
end
module ClassMethods
end
module InstanceMethods
def custom_field_tag_with_computed_custom_field(prefix, custom_value)
rendered_field = custom_value.custom_field.format.edit_tag self,
custom_field_tag_id(prefix, custom_value.custom_field),
custom_field_tag_name(prefix, custom_value.custom_field),
custom_value,
:class => "#{custom_value.custom_field.field_format}_cf",
:disabled => custom_value.custom_field.is_computed?
if custom_value.custom_field.is_computed?
str_rendered_field = "#{rendered_field}"
str_rendered_field.gsub! '<input type="radio" ', '<input type="radio" disabled '
rendered_field = ActiveSupport::SafeBuffer.new(str_rendered_field)
end
rendered_field
end
end
def render_computed_custom_fields_select(custom_field)
fields = CustomField.where(type: custom_field.type)
.where('custom_fields.id != ?', custom_field.id || 0)
options = fields.map do |field|
is_computed = field.is_computed? ? ", #{l(:field_is_computed)}" : ''
format = I18n.t(field.format.label)
title = "#{field.name} (#{format}#{is_computed})"
html_attributes = {
value: field.id,
title: title
}
content_tag_string(:option, title, html_attributes)
end.join("\n").html_safe
select_tag '', options, size: 5,
multiple: true, id: 'available_cfs'
end
end
end
unless CustomFieldsHelper.included_modules
.include?(ComputedCustomField::CustomFieldsHelperPatch)
CustomFieldsHelper.send :include, ComputedCustomField::CustomFieldsHelperPatch
end
| 34.55 | 90 | 0.655572 |
3824c827b384576b16e55f59bc0e2237ca16c5cf | 930 | require 'spec_helper'
class MemberValidatable
include ActiveModel::Validations
validates_with MemberCountValidator
attr_accessor :members
end
describe MemberCountValidator do
let(:workspace) { MemberValidatable.new }
context 'when license limits workspace membership' do
before do
stub(License.instance).limit_workspace_membership? { true }
end
it 'validates with 1 member' do
mock(workspace.members).count { 1 }
workspace.should be_valid
end
it 'does not validate with more than 1 member' do
mock(workspace.members).count { 2 }
workspace.should_not be_valid
end
end
context 'when license does not limit workspace membership' do
before do
stub(License.instance).limit_workspace_membership? { false }
end
it 'validates without checking count' do
mock(workspace.members).count.never
workspace.should be_valid
end
end
end
| 23.846154 | 66 | 0.723656 |
bb4e544544774db15fe39af90b2c26f18460923f | 945 | require 'spec_helper'
require 'blog'
RSpec.describe Blog do
subject { described_class.new(title: "Brett Codes", subdomain: "brettcodes") }
describe "#add_post" do
it "adds a post to the posts array" do
expect do
subject.add_post(title: "RSpec Test Doubles Explained", content: "Lots of great content!")
end.to change { subject.posts.length }.by(1)
end
it "returns a post" do
post = subject.add_post(title: "RSpec Test Doubles Explained", content: "Lots of great content!")
expect(post).to be_a(Post)
expect(post.title).to eql("RSpec Test Doubles Explained")
end
it "associates the post with the blog" do
post = subject.add_post(title: "RSpec Test Doubles Explained", content: "Lots of great content!")
expect(post.blog).to eql(subject)
end
end
describe "#slug" do
it "returns an empty string" do
expect(subject.slug).to eql("")
end
end
end
| 27.794118 | 103 | 0.667725 |
1cea2f11d82ea3f72a89deb2fe63383c3a68608e | 3,058 | #!/usr/bin/env ruby
# Encoding: utf-8
#
# Author:: [email protected] (Danial Klimkin)
#
# Copyright:: Copyright 2012, Google Inc. All Rights Reserved.
#
# License:: Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Tests the AdWords API class features.
require 'test/unit'
require 'ads_common/config'
require 'ads_common/api_config'
require 'adwords_api'
class LoggerStub
attr_reader :last_warning
def warn(message)
@last_warning = message
end
end
class TestAdwordsApi < Test::Unit::TestCase
API_VERSION = :v201406
def setup()
@logger = LoggerStub.new
end
def test_initialize()
assert_nothing_raised do
adwords_api = AdwordsApi::Api.new({})
end
end
def test_api_config()
assert_nothing_raised do
adwords_api = AdwordsApi::Api.new({})
api_config = adwords_api.api_config()
assert_not_nil(api_config)
assert_kind_of(AdsCommon::ApiConfig, api_config)
end
end
def test_use_mcc()
adwords_api = AdwordsApi::Api.new({})
adwords_api.use_mcc = false
assert(!adwords_api.use_mcc)
adwords_api.use_mcc do
assert(adwords_api.use_mcc)
end
end
def test_validate_only()
adwords_api = AdwordsApi::Api.new({})
adwords_api.validate_only = false
assert(!adwords_api.validate_only)
adwords_api.validate_only do
assert(adwords_api.validate_only)
end
end
def test_partial_failure()
adwords_api = AdwordsApi::Api.new({})
adwords_api.partial_failure = false
assert(!adwords_api.partial_failure)
adwords_api.partial_failure do
assert(adwords_api.partial_failure)
end
end
def test_no_sandbox_env()
adwords_api = AdwordsApi::Api.new({
:service => {:environment => 'SANDBOX'}
})
assert_raise(AdsCommon::Errors::Error) do
service = adwords_api.service(:ManagedCustomerService, API_VERSION)
end
end
def test_prod_env()
adwords_api = AdwordsApi::Api.new({
:library => {:logger => @logger},
:service => {:environment => 'PRODUCTION'}
})
service = adwords_api.service(:ManagedCustomerService, API_VERSION)
end
def test_clientlogin_error()
adwords_api = AdwordsApi::Api.new({
:library => {:logger => @logger},
:authentication => {:method => 'ClientLogin'},
:service => {:environment => 'PRODUCTION'}
})
assert_raise AdsCommon::Errors::AuthError do
service = adwords_api.service(:CampaignService, API_VERSION)
end
end
end
| 26.591304 | 79 | 0.68378 |
21316537c4704c150af63627e23998fe980c80d3 | 37 | module Seria
VERSION = "0.0.1"
end
| 9.25 | 19 | 0.648649 |
7a7e28f745167743e0a1377c975e3e3ae738e35f | 501 | module OpenXml
module Docx
module Properties
class EastAsianLayout < ComplexProperty
with_namespace :w do
attribute :combine, expects: :on_or_off
attribute :combine_brackets, one_of: %i(angle curly none round square)
attribute :id, expects: :integer
attribute :vertical, expects: :on_or_off, displays_as: :vert
attribute :vertical_compress, expects: :on_or_off, displays_as: :vertCompress
end
end
end
end
end
| 27.833333 | 87 | 0.664671 |
e89352fc151e9ddf1b7309d8b9dc1c3b3014dcfe | 3,720 | require "helper"
if SimpleCov.usable?
describe SimpleCov::SourceFile::Line do
context "a source line" do
subject do
SimpleCov::SourceFile::Line.new("# the ruby source", 5, 3)
end
it 'returns "# the ruby source" as src' do
expect(subject.src).to eq("# the ruby source")
end
it "returns the same for source as for src" do
expect(subject.src).to eq(subject.source)
end
it "has line number 5" do
expect(subject.line_number).to eq(5)
end
it "has equal line_number, line and number" do
expect(subject.line).to eq(subject.line_number)
expect(subject.number).to eq(subject.line_number)
end
context "flagged as skipped!" do
before do
subject.skipped!
end
it "is not covered" do
expect(subject).not_to be_covered
end
it "is skipped" do
expect(subject).to be_skipped
end
it "is not missed" do
expect(subject).not_to be_missed
end
it "is not never" do
expect(subject).not_to be_never
end
it "status is skipped" do
expect(subject.status).to eq("skipped")
end
end
end
context "A source line with coverage" do
subject do
SimpleCov::SourceFile::Line.new("# the ruby source", 5, 3)
end
it "has coverage of 3" do
expect(subject.coverage).to eq(3)
end
it "is covered" do
expect(subject).to be_covered
end
it "is not skipped" do
expect(subject).not_to be_skipped
end
it "is not missed" do
expect(subject).not_to be_missed
end
it "is not never" do
expect(subject).not_to be_never
end
it "status is covered" do
expect(subject.status).to eq("covered")
end
end
context "A source line without coverage" do
subject do
SimpleCov::SourceFile::Line.new("# the ruby source", 5, 0)
end
it "has coverage of 0" do
expect(subject.coverage).to be_zero
end
it "is not covered" do
expect(subject).not_to be_covered
end
it "is not skipped" do
expect(subject).not_to be_skipped
end
it "is missed" do
expect(subject).to be_missed
end
it "is not never" do
expect(subject).not_to be_never
end
it "status is missed" do
expect(subject.status).to eq("missed")
end
end
context "A source line with no code" do
subject do
SimpleCov::SourceFile::Line.new("# the ruby source", 5, nil)
end
it "has nil coverage" do
expect(subject.coverage).to be_nil
end
it "is not covered" do
expect(subject).not_to be_covered
end
it "is not skipped" do
expect(subject).not_to be_skipped
end
it "is not missed" do
expect(subject).not_to be_missed
end
it "is never" do
expect(subject).to be_never
end
it "status is never" do
expect(subject.status).to eq("never")
end
end
it "raises ArgumentError when initialized with invalid src" do
expect { SimpleCov::SourceFile::Line.new(:symbol, 5, 3) }.to raise_error(ArgumentError)
end
it "raises ArgumentError when initialized with invalid line_number" do
expect { SimpleCov::SourceFile::Line.new("some source", "five", 3) }.to raise_error(ArgumentError)
end
it "raises ArgumentError when initialized with invalid coverage" do
expect { SimpleCov::SourceFile::Line.new("some source", 5, "three") }.to raise_error(ArgumentError)
end
end
end
| 23.846154 | 105 | 0.598656 |
21111135faaf2751170fb4789b7e61d2aa5c2e40 | 1,084 | # frozen_string_literal: true
require "drb"
class Pry
class Shell
class Registry
include DRb::DRbUndumped
attr_reader :clients, :current, :mutex
def initialize
@clients = {}
@mutex = Mutex.new
end
def register(id:, name:, host:, pid:, location:)
Client.new(id, name, host, pid, location).tap do |client|
Logger.debug("New client connected - #{client}")
@clients[id] = client
connect_to(client) if Shell.configuration.auto_connect
end
end
def connect_to(client)
# This thread is necessary because `UI::Session.draw!`
# puts the main thread into sleep!
mutex.synchronize do
return if current
Thread.start do
UI::Session.draw!
@current = client
end
end
end
def disconnect(_client)
@current = nil
UI.restart!
end
def remove(client)
@current = nil
@clients.delete(client.id)
UI.restart!
end
end
end
end
| 19.357143 | 65 | 0.559041 |
acaf9dc5d8bfb983f3c95cbeb32a4d9f47379be0 | 4,556 | describe RequirementsService::Commands::DefaultThirdPartyRequirements do
subject { described_class.new(context_module: context_module) }
let(:context_module) do
double(
'context module',
completion_requirements: [],
update_column: nil,
touch: nil,
content_tags: content_tags,
)
end
let(:content_tags) do
[
double(:content_tag, content_type: "Assignment", id: 1),
double(:content_tag, content_type: "DiscussionTopic", id: 2),
double(:content_tag, content_type: "WikiPage", id: 3),
double(:content_tag, content_type: "ContextExternalTool", id: 4),
double(:content_tag, content_type: "Attachment", id: 5),
double(:content_tag, content_type: "Quizzes::Quiz", id: 6),
]
end
describe "#set_requirements" do
before do
subject.send(:set_requirements)
end
it "Sets an assignment to must_submit" do
assignment = subject.send(:completion_requirements).find {|req| req[:id] == 1 }
expect(assignment[:type]).to eq("must_submit")
end
it "Sets an Discussion Topic to must_contribute" do
discussion_topic = subject.send(:completion_requirements).find {|req| req[:id] == 2 }
expect(discussion_topic[:type]).to eq("must_contribute")
end
it "Sets an WikiPage to must_view" do
wiki_page = subject.send(:completion_requirements).find {|req| req[:id] == 3 }
expect(wiki_page[:type]).to eq("must_view")
end
it "Sets an Attachment to must_view" do
attachment = subject.send(:completion_requirements).find {|req| req[:id] == 5 }
expect(attachment[:type]).to eq("must_view")
end
it "Sets an Quizzes::Quiz to must_submit" do
quizzes = subject.send(:completion_requirements).find {|req| req[:id] == 6 }
expect(quizzes[:type]).to eq("must_submit")
end
context "Requirement exists" do
let(:context_module) do
double(
'context module',
completion_requirements: [{id: 3, type: "must_submit"}],
update_column: nil,
touch: nil,
content_tags: content_tags,
)
end
it "Sets an WikiPage to must_view" do
wiki_page = subject.send(:completion_requirements).find {|req| req[:id] == 3 }
expect(wiki_page[:type]).to eq("must_submit")
end
end
context "Has Subheader" do
let!(:content_tags) do
[
double(:content_tag, content_type: "ContextModuleSubHeader", id: 1),
double(:content_tag, content_type: "Assignment", id: 2),
double(:content_tag, content_type: "DiscussionTopic", id: 3),
double(:content_tag, content_type: "WikiPage", id: 4),
double(:content_tag, content_type: "ContextExternalTool", id: 5),
double(:content_tag, content_type: "Attachment", id: 6),
double(:content_tag, content_type: "Quizzes::Quiz", id: 7),
]
end
it "Sets an WikiPage to must_view" do
subject.send(:set_requirements)
subheader = subject.send(:completion_requirements).find {|req| req[:id] == 1 }
expect(subheader).to eq(nil)
end
end
end
describe "#add_prerequisites" do
let(:first_context_module) do
double(
'context module',
id: 1,
completion_requirements: [],
prerequisites: [],
position: 1,
name: "FIRST ONE",
context_id: 1,
update_column: nil,
touch: nil,
content_tags: content_tags,
)
end
let(:second_context_module) do
double(
'context module',
id: 2,
completion_requirements: [],
prerequisites: [],
position: 2,
name: "SECOND ONE",
context_id: 1,
update_column: nil,
touch: nil,
content_tags: content_tags,
)
end
context "First module" do
subject { described_class.new(context_module: first_context_module) }
it "Does not set prerequisites to the first one" do
expect(first_context_module).not_to receive(:update_column)
subject.send(:add_prerequisites)
end
end
context "Second module" do
subject { described_class.new(context_module: second_context_module) }
before do
allow(subject).to receive(:find_last_context_module).and_return(first_context_module)
end
it "Does not set prerequisites to the first one" do
expect(second_context_module).to receive(:update_column)
subject.send(:add_prerequisites)
end
end
end
end | 31.42069 | 93 | 0.630817 |
218abfb2d12a7082decb8eeb5a4ebd773a68e0c3 | 402 | module ApplicationHelper
def markdown(content)
@markdown ||= Redcarpet::Markdown.new(Redcarpet::Render::HTML, autolink: true, space_after_headers: true, fenced_code_blocks: true, filter_html: true, no_styles: true)
@markdown.render(content)
end
def vimeo_summary_cleanup(summary)
index = summary.index('Cast: ')
return summary if index.nil?
return summary[0, index]
end
end | 33.5 | 171 | 0.738806 |
1c18af91ff8eb187920218be4f213a62a50c6de7 | 7,833 | # encoding: utf-8
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# /spec/fixtures/responses/whois.centralnic.com/kr.com/status_registered.expected
#
# and regenerate the tests with the following rake task
#
# $ rake spec:generate
#
require 'spec_helper'
require 'whois/record/parser/whois.centralnic.com.rb'
describe Whois::Record::Parser::WhoisCentralnicCom, "status_registered.expected" do
subject do
file = fixture("responses", "whois.centralnic.com/kr.com/status_registered.txt")
part = Whois::Record::Part.new(body: File.read(file))
described_class.new(part)
end
describe "#disclaimer" do
it do
expect(subject.disclaimer).to eq("This whois service is provided by CentralNic Ltd and only contains information pertaining to Internet domain names we have registered for our customers. By using this service you are agreeing (1) not to use any information presented here for any purpose other than determining ownership of domain names, (2) not to store or reproduce this data in any way, (3) not to use any high-volume, automated, electronic processes to obtain data from this service. Abuse of this service is monitored and actions in contravention of these terms will result in being permanently blacklisted. All data is (c) CentralNic Ltd https://www.centralnic.com/")
end
end
describe "#domain" do
it do
expect(subject.domain).to eq("academyart.kr.com")
end
end
describe "#domain_id" do
it do
expect(subject.domain_id).to eq("CNIC-DO569707")
end
end
describe "#status" do
it do
expect(subject.status).to eq(["clientTransferProhibited", "serverTransferProhibited"])
end
end
describe "#available?" do
it do
expect(subject.available?).to eq(false)
end
end
describe "#registered?" do
it do
expect(subject.registered?).to eq(true)
end
end
describe "#created_on" do
it do
expect(subject.created_on).to be_a(Time)
expect(subject.created_on).to eq(Time.parse("2008-06-11 21:25:43 UTC"))
end
end
describe "#updated_on" do
it do
expect(subject.updated_on).to be_a(Time)
expect(subject.updated_on).to eq(Time.parse("2012-01-16 16:25:41 UTC"))
end
end
describe "#expires_on" do
it do
expect(subject.expires_on).to be_a(Time)
expect(subject.expires_on).to eq(Time.parse("2014-06-11 23:59:59 UTC"))
end
end
describe "#registrar" do
it do
expect(subject.registrar).to be_a(Whois::Record::Registrar)
expect(subject.registrar.id).to eq("H292913")
expect(subject.registrar.name).to eq(nil)
expect(subject.registrar.organization).to eq("Network Solutions LLC")
expect(subject.registrar.url).to eq("http://www.networksolutions.com/")
end
end
describe "#registrant_contacts" do
it do
expect(subject.registrant_contacts).to be_a(Array)
expect(subject.registrant_contacts.size).to eq(1)
expect(subject.registrant_contacts[0]).to be_a(Whois::Record::Contact)
expect(subject.registrant_contacts[0].type).to eq(Whois::Record::Contact::TYPE_REGISTRANT)
expect(subject.registrant_contacts[0].id).to eq("41619876")
expect(subject.registrant_contacts[0].name).to eq("Academy of Art College")
expect(subject.registrant_contacts[0].organization).to eq("Academy of Art College")
expect(subject.registrant_contacts[0].address).to eq("79 NEW MONTGOMERY ST")
expect(subject.registrant_contacts[0].city).to eq("SAN FRANCISCO")
expect(subject.registrant_contacts[0].zip).to eq("94105")
expect(subject.registrant_contacts[0].state).to eq("CA")
expect(subject.registrant_contacts[0].country).to eq(nil)
expect(subject.registrant_contacts[0].country_code).to eq("US")
expect(subject.registrant_contacts[0].phone).to eq("+1.415618350")
expect(subject.registrant_contacts[0].fax).to eq(nil)
expect(subject.registrant_contacts[0].email).to eq("[email protected]")
end
end
describe "#admin_contacts" do
it do
expect(subject.admin_contacts).to be_a(Array)
expect(subject.admin_contacts.size).to eq(1)
expect(subject.admin_contacts[0]).to be_a(Whois::Record::Contact)
expect(subject.admin_contacts[0].type).to eq(Whois::Record::Contact::TYPE_ADMINISTRATIVE)
expect(subject.admin_contacts[0].id).to eq("41619876")
expect(subject.admin_contacts[0].name).to eq("Academy of Art College")
expect(subject.admin_contacts[0].organization).to eq("Academy of Art College")
expect(subject.admin_contacts[0].address).to eq("79 NEW MONTGOMERY ST")
expect(subject.admin_contacts[0].city).to eq("SAN FRANCISCO")
expect(subject.admin_contacts[0].zip).to eq("94105")
expect(subject.admin_contacts[0].state).to eq("CA")
expect(subject.admin_contacts[0].country).to eq(nil)
expect(subject.admin_contacts[0].country_code).to eq("US")
expect(subject.admin_contacts[0].phone).to eq("+1.415618350")
expect(subject.admin_contacts[0].fax).to eq(nil)
expect(subject.admin_contacts[0].email).to eq("[email protected]")
end
end
describe "#technical_contacts" do
it do
expect(subject.technical_contacts).to be_a(Array)
expect(subject.technical_contacts.size).to eq(1)
expect(subject.technical_contacts[0]).to be_a(Whois::Record::Contact)
expect(subject.technical_contacts[0].type).to eq(Whois::Record::Contact::TYPE_TECHNICAL)
expect(subject.technical_contacts[0].id).to eq("41619876")
expect(subject.technical_contacts[0].name).to eq("Academy of Art College")
expect(subject.technical_contacts[0].organization).to eq("Academy of Art College")
expect(subject.technical_contacts[0].address).to eq("79 NEW MONTGOMERY ST")
expect(subject.technical_contacts[0].city).to eq("SAN FRANCISCO")
expect(subject.technical_contacts[0].zip).to eq("94105")
expect(subject.technical_contacts[0].state).to eq("CA")
expect(subject.technical_contacts[0].country).to eq(nil)
expect(subject.technical_contacts[0].country_code).to eq("US")
expect(subject.technical_contacts[0].phone).to eq("+1.415618350")
expect(subject.technical_contacts[0].fax).to eq(nil)
expect(subject.technical_contacts[0].email).to eq("[email protected]")
end
end
describe "#nameservers" do
it do
expect(subject.nameservers).to be_a(Array)
expect(subject.nameservers.size).to eq(5)
expect(subject.nameservers[0]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[0].name).to eq("ns1.academyart.edu")
expect(subject.nameservers[0].ipv4).to eq(nil)
expect(subject.nameservers[0].ipv6).to eq(nil)
expect(subject.nameservers[1]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[1].name).to eq("dbru.br.ns.els-gms.att.net")
expect(subject.nameservers[1].ipv4).to eq(nil)
expect(subject.nameservers[1].ipv6).to eq(nil)
expect(subject.nameservers[2]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[2].name).to eq("dmtu.mt.ns.els-gms.att.net")
expect(subject.nameservers[2].ipv4).to eq(nil)
expect(subject.nameservers[2].ipv6).to eq(nil)
expect(subject.nameservers[3]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[3].name).to eq("cbru.br.ns.els-gms.att.net")
expect(subject.nameservers[3].ipv4).to eq(nil)
expect(subject.nameservers[3].ipv6).to eq(nil)
expect(subject.nameservers[4]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[4].name).to eq("cmtu.mt.ns.els-gms.att.net")
expect(subject.nameservers[4].ipv4).to eq(nil)
expect(subject.nameservers[4].ipv6).to eq(nil)
end
end
end
| 46.625 | 679 | 0.710839 |
21c169eda8392f1af564e78e4427d96cbf59ebdb | 2,564 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::KeyVault::V2015_06_01
module Models
#
# The key import parameters
#
class KeyImportParameters
include MsRestAzure
# @return [Boolean] Whether to import as a hardware key (HSM) or software
# key
attr_accessor :hsm
# @return [JsonWebKey] The Json web key
attr_accessor :key
# @return [KeyAttributes] The key management attributes
attr_accessor :key_attributes
# @return [Hash{String => String}] Application-specific metadata in the
# form of key-value pairs
attr_accessor :tags
#
# Mapper for KeyImportParameters class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'KeyImportParameters',
type: {
name: 'Composite',
class_name: 'KeyImportParameters',
model_properties: {
hsm: {
client_side_validation: true,
required: false,
serialized_name: 'Hsm',
type: {
name: 'Boolean'
}
},
key: {
client_side_validation: true,
required: true,
serialized_name: 'key',
type: {
name: 'Composite',
class_name: 'JsonWebKey'
}
},
key_attributes: {
client_side_validation: true,
required: false,
serialized_name: 'attributes',
type: {
name: 'Composite',
class_name: 'KeyAttributes'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
}
end
end
end
end
| 27.869565 | 79 | 0.475429 |
6a07309322f67cbdde608b783d5280b460da73dd | 1,802 | # -*- encoding: utf-8 -*-
# stub: rb-fsevent 0.10.3 ruby lib
Gem::Specification.new do |s|
s.name = "rb-fsevent".freeze
s.version = "0.10.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.metadata = { "source_code_uri" => "https://github.com/thibaudgg/rb-fsevent" } if s.respond_to? :metadata=
s.require_paths = ["lib".freeze]
s.authors = ["Thibaud Guillaume-Gentil".freeze, "Travis Tilley".freeze]
s.date = "2018-03-03"
s.description = "FSEvents API with Signals catching (without RubyCocoa)".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze]
s.homepage = "http://rubygems.org/gems/rb-fsevent".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "2.7.6".freeze
s.summary = "Very simple & usable FSEvents API".freeze
s.installed_by_version = "2.7.6" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>.freeze, ["~> 1.0"])
s.add_development_dependency(%q<rspec>.freeze, ["~> 3.6"])
s.add_development_dependency(%q<guard-rspec>.freeze, ["~> 4.2"])
s.add_development_dependency(%q<rake>.freeze, ["~> 12.0"])
else
s.add_dependency(%q<bundler>.freeze, ["~> 1.0"])
s.add_dependency(%q<rspec>.freeze, ["~> 3.6"])
s.add_dependency(%q<guard-rspec>.freeze, ["~> 4.2"])
s.add_dependency(%q<rake>.freeze, ["~> 12.0"])
end
else
s.add_dependency(%q<bundler>.freeze, ["~> 1.0"])
s.add_dependency(%q<rspec>.freeze, ["~> 3.6"])
s.add_dependency(%q<guard-rspec>.freeze, ["~> 4.2"])
s.add_dependency(%q<rake>.freeze, ["~> 12.0"])
end
end
| 41.906977 | 112 | 0.656493 |
28644312a75fd094642cf756e0384ec231a5c3fe | 58,105 | require 'ostruct'
require 'xml/xml_utils'
require 'cgi' # Used for URL encoding/decoding
require 'metadata/linux/LinuxUsers'
require 'metadata/linux/LinuxUtils'
require 'metadata/ScanProfile/HostScanProfiles'
class Host < ApplicationRecord
include SupportsFeatureMixin
include NewWithTypeStiMixin
include TenantIdentityMixin
include DeprecationMixin
include CustomActionsMixin
include EmsRefreshMixin
VENDOR_TYPES = {
# DB Displayed
"microsoft" => "Microsoft",
"redhat" => "RedHat",
"ovirt" => "Ovirt",
"kubevirt" => "KubeVirt",
"vmware" => "VMware",
"openstack_infra" => "OpenStack Infrastructure",
"ibm_power_hmc" => "IBM Power HMC",
"unknown" => "Unknown",
nil => "Unknown",
}.freeze
validates_presence_of :name
validates_inclusion_of :user_assigned_os, :in => ["linux_generic", "windows_generic", nil]
validates_inclusion_of :vmm_vendor, :in => VENDOR_TYPES.keys
belongs_to :ext_management_system, :foreign_key => "ems_id"
belongs_to :ems_cluster
has_one :operating_system, :dependent => :destroy
has_one :hardware, :dependent => :destroy
has_many :vms_and_templates, :dependent => :nullify
has_many :vms, :inverse_of => :host
has_many :miq_templates, :inverse_of => :host
has_many :host_storages, :dependent => :destroy
has_many :storages, :through => :host_storages
has_many :writable_accessible_host_storages, -> { writable_accessible }, :class_name => "HostStorage"
has_many :writable_accessible_storages, :through => :writable_accessible_host_storages, :source => :storage
has_many :host_virtual_switches, :class_name => "Switch", :dependent => :destroy, :inverse_of => :host
has_many :host_switches, :dependent => :destroy
has_many :switches, :through => :host_switches
has_many :lans, :through => :switches
has_many :host_virtual_lans, :through => :host_virtual_switches, :source => :lans
has_many :subnets, :through => :lans
has_many :networks, :through => :hardware
has_many :patches, :dependent => :destroy
has_many :system_services, :dependent => :destroy
has_many :host_services, :class_name => "SystemService", :foreign_key => "host_id", :inverse_of => :host
has_many :metrics, :as => :resource # Destroy will be handled by purger
has_many :metric_rollups, :as => :resource # Destroy will be handled by purger
has_many :vim_performance_states, :as => :resource # Destroy will be handled by purger
has_many :ems_events,
->(host) { where("host_id = ? OR dest_host_id = ?", host.id, host.id).order(:timestamp) },
:class_name => "EmsEvent"
has_many :ems_events_src, :class_name => "EmsEvent"
has_many :ems_events_dest, :class_name => "EmsEvent", :foreign_key => :dest_host_id
has_many :policy_events, -> { order("timestamp") }
has_many :guest_applications, :dependent => :destroy
has_many :miq_events, :as => :target, :dependent => :destroy
has_many :filesystems, :as => :resource, :dependent => :destroy
has_many :directories, -> { where(:rsc_type => 'dir') }, :as => :resource, :class_name => "Filesystem"
has_many :files, -> { where(:rsc_type => 'file') }, :as => :resource, :class_name => "Filesystem"
# Accounts - Users and Groups
has_many :accounts, :dependent => :destroy
has_many :users, -> { where(:accttype => 'user') }, :class_name => "Account", :foreign_key => "host_id"
has_many :groups, -> { where(:accttype => 'group') }, :class_name => "Account", :foreign_key => "host_id"
has_many :advanced_settings, :as => :resource, :dependent => :destroy
has_many :miq_alert_statuses, :dependent => :destroy, :as => :resource
has_many :host_service_groups, :dependent => :destroy
has_many :cloud_services, :dependent => :nullify
has_many :host_cloud_services, :class_name => "CloudService", :foreign_key => "host_id",
:inverse_of => :host
has_many :host_aggregate_hosts, :dependent => :destroy
has_many :host_aggregates, :through => :host_aggregate_hosts
has_many :host_hardwares, :class_name => 'Hardware', :dependent => :nullify
has_many :vm_hardwares, :class_name => 'Hardware', :through => :vms_and_templates, :source => :hardware
# Physical server reference
belongs_to :physical_server, :inverse_of => :host
serialize :settings, Hash
deprecate_attribute :address, :hostname
alias_attribute :state, :power_state
alias_attribute :to_s, :name
include ProviderObjectMixin
include EventMixin
include CustomAttributeMixin
has_many :ems_custom_attributes, -> { where(:source => 'VC') }, :as => :resource, :dependent => :destroy, :class_name => "CustomAttribute"
has_many :filesystems_custom_attributes, :through => :filesystems, :source => 'custom_attributes'
acts_as_miq_taggable
virtual_column :os_image_name, :type => :string, :uses => [:operating_system, :hardware]
virtual_column :platform, :type => :string, :uses => [:operating_system, :hardware]
virtual_delegate :v_owning_cluster, :to => "ems_cluster.name", :allow_nil => true, :default => "", :type => :string
virtual_column :v_owning_datacenter, :type => :string, :uses => :all_relationships
virtual_column :v_owning_folder, :type => :string, :uses => :all_relationships
virtual_delegate :cpu_total_cores, :cpu_cores_per_socket, :to => :hardware, :allow_nil => true, :default => 0, :type => :integer
virtual_delegate :num_cpu, :to => "hardware.cpu_sockets", :allow_nil => true, :default => 0, :type => :integer
virtual_delegate :total_vcpus, :to => "hardware.cpu_total_cores", :allow_nil => true, :default => 0, :type => :integer
virtual_delegate :ram_size, :to => "hardware.memory_mb", :allow_nil => true, :default => 0, :type => :integer
virtual_column :enabled_inbound_ports, :type => :numeric_set # The following are not set to use anything
virtual_column :enabled_outbound_ports, :type => :numeric_set # because get_ports ends up re-querying the
virtual_column :enabled_udp_inbound_ports, :type => :numeric_set # database anyway.
virtual_column :enabled_udp_outbound_ports, :type => :numeric_set
virtual_column :enabled_tcp_inbound_ports, :type => :numeric_set
virtual_column :enabled_tcp_outbound_ports, :type => :numeric_set
virtual_column :all_enabled_ports, :type => :numeric_set
virtual_column :service_names, :type => :string_set, :uses => :system_services
virtual_column :enabled_run_level_0_services, :type => :string_set, :uses => :host_services
virtual_column :enabled_run_level_1_services, :type => :string_set, :uses => :host_services
virtual_column :enabled_run_level_2_services, :type => :string_set, :uses => :host_services
virtual_column :enabled_run_level_3_services, :type => :string_set, :uses => :host_services
virtual_column :enabled_run_level_4_services, :type => :string_set, :uses => :host_services
virtual_column :enabled_run_level_5_services, :type => :string_set, :uses => :host_services
virtual_column :enabled_run_level_6_services, :type => :string_set, :uses => :host_services
virtual_delegate :annotation, :to => :hardware, :prefix => "v", :allow_nil => true, :type => :string
virtual_column :vmm_vendor_display, :type => :string
virtual_column :ipmi_enabled, :type => :boolean
virtual_attribute :archived, :boolean, :arel => ->(t) { t.grouping(t[:ems_id].eq(nil)) }
virtual_column :normalized_state, :type => :string
virtual_has_many :resource_pools, :uses => :all_relationships
virtual_has_many :miq_scsi_luns, :uses => {:hardware => {:storage_adapters => {:miq_scsi_targets => :miq_scsi_luns}}}
virtual_has_many :processes, :class_name => "OsProcess", :uses => {:operating_system => :processes}
virtual_has_many :event_logs, :uses => {:operating_system => :event_logs}
virtual_has_many :firewall_rules, :uses => {:operating_system => :firewall_rules}
virtual_total :v_total_storages, :host_storages
virtual_total :v_total_vms, :vms
virtual_total :v_total_miq_templates, :miq_templates
scope :active, -> { where.not(:ems_id => nil) }
scope :archived, -> { where(:ems_id => nil) }
alias_method :datastores, :storages # Used by web-services to return datastores as the property name
alias_method :parent_cluster, :ems_cluster
alias_method :owning_cluster, :ems_cluster
include RelationshipMixin
self.default_relationship_type = "ems_metadata"
include DriftStateMixin
virtual_delegate :last_scan_on, :to => "last_drift_state_timestamp_rec.timestamp", :allow_nil => true, :type => :datetime
delegate :queue_name_for_ems_operations, :to => :ext_management_system, :allow_nil => true
include UuidMixin
include MiqPolicyMixin
include AlertMixin
include Metric::CiMixin
include FilterableMixin
include AuthenticationMixin
include AsyncDeleteMixin
include ComplianceMixin
include AvailabilityMixin
include AggregationMixin
before_create :make_smart
after_save :process_events
supports :destroy
supports_not :quick_stats
supports :reset do
unsupported_reason_add(:reset, _("The Host is not configured for IPMI")) if ipmi_address.blank?
unsupported_reason_add(:reset, _("The Host has no IPMI credentials")) if authentication_type(:ipmi).nil?
if authentication_userid(:ipmi).blank? || authentication_password(:ipmi).blank?
unsupported_reason_add(:reset, _("The Host has invalid IPMI credentials"))
end
end
supports_not :refresh_advanced_settings
supports_not :refresh_firewall_rules
supports_not :refresh_logs
supports_not :refresh_network_interfaces
supports_not :set_node_maintenance
supports_not :smartstate_analysis
supports_not :unset_node_maintenance
def self.non_clustered
where(:ems_cluster_id => nil)
end
def self.clustered
where.not(:ems_cluster_id => nil)
end
def self.failover
where(:failover => true)
end
def authentication_check_role
'smartstate'
end
def my_zone
ems = ext_management_system
ems ? ems.my_zone : MiqServer.my_zone
end
def make_smart
self.smart = true
end
def process_events
return unless saved_change_to_ems_cluster_id?
raise_cluster_event(ems_cluster_id_before_last_save, "host_remove_from_cluster") if ems_cluster_id_before_last_save
raise_cluster_event(ems_cluster, "host_add_to_cluster") if ems_cluster_id
end # after_save
def raise_cluster_event(ems_cluster, event)
# accept ids or objects
ems_cluster = EmsCluster.find(ems_cluster) unless ems_cluster.kind_of?(EmsCluster)
inputs = {:ems_cluster => ems_cluster, :host => self}
begin
MiqEvent.raise_evm_event(self, event, inputs)
_log.info("Raised EVM Event: [#{event}, host: #{name}(#{id}), cluster: #{ems_cluster.name}(#{ems_cluster.id})]")
rescue => err
_log.warn("Error raising EVM Event: [#{event}, host: #{name}(#{id}), cluster: #{ems_cluster.name}(#{ems_cluster.id})], '#{err.message}'")
end
end
private :raise_cluster_event
def validate_reboot
validate_esx_host_connected_to_vc_with_power_state('on')
end
def validate_shutdown
validate_esx_host_connected_to_vc_with_power_state('on')
end
def validate_standby
validate_esx_host_connected_to_vc_with_power_state('on')
end
def validate_enter_maint_mode
validate_esx_host_connected_to_vc_with_power_state('on')
end
def validate_exit_maint_mode
validate_esx_host_connected_to_vc_with_power_state('maintenance')
end
def validate_enable_vmotion
validate_esx_host_connected_to_vc_with_power_state('on')
end
def validate_disable_vmotion
validate_esx_host_connected_to_vc_with_power_state('on')
end
def validate_vmotion_enabled?
validate_esx_host_connected_to_vc_with_power_state('on')
end
def validate_start
validate_ipmi('off')
end
def validate_stop
validate_ipmi('on')
end
def validate_ipmi(pstate = nil)
return {:available => false, :message => "The Host is not configured for IPMI"} if ipmi_address.blank?
return {:available => false, :message => "The Host has no IPMI credentials"} if authentication_type(:ipmi).nil?
return {:available => false, :message => "The Host has invalid IPMI credentials"} if authentication_userid(:ipmi).blank? || authentication_password(:ipmi).blank?
msg = validate_power_state(pstate)
return msg unless msg.nil?
{:available => true, :message => nil}
end
def validate_esx_host_connected_to_vc_with_power_state(pstate)
msg = validate_esx_host_connected_to_vc
return msg unless msg.nil?
msg = validate_power_state(pstate)
return msg unless msg.nil?
{:available => true, :message => nil}
end
def validate_power_state(pstate)
return nil if pstate.nil?
case pstate.class.name
when 'String'
return {:available => false, :message => "The Host is not powered '#{pstate}'"} unless power_state == pstate
when 'Array'
return {:available => false, :message => "The Host is not powered #{pstate.inspect}"} unless pstate.include?(power_state)
end
end
def validate_esx_host_connected_to_vc
# Check the basic require to interact with a VM.
return {:available => false, :message => "The Host is not connected to an active Provider"} unless has_active_ems?
return {:available => false, :message => "The Host is not VMware ESX"} unless is_vmware_esx?
nil
end
def validate_scan_and_check_compliance_queue
{:available => true, :message => nil}
end
def validate_check_compliance_queue
{:available => true, :message => nil}
end
def validate_unsupported(message_prefix)
{:available => false, :message => "#{message_prefix} is not available for #{self.class.model_suffix} Host."}
end
def has_active_ems?
!!ext_management_system
end
def run_ipmi_command(verb)
require 'miq-ipmi'
_log.info("Invoking [#{verb}] for Host: [#{name}], IPMI Address: [#{ipmi_address}], IPMI Username: [#{authentication_userid(:ipmi)}]")
ipmi = MiqIPMI.new(ipmi_address, *auth_user_pwd(:ipmi))
ipmi.send(verb)
end
# event: the event sent to automate for policy resolution
# cb_method: the MiqQueue callback method along with the parameters that is called
# when automate process is done and the request is not prevented to proceed by policy
def check_policy_prevent(event, *cb_method)
MiqEvent.raise_evm_event(self, event, {:host => self}, {:miq_callback => prevent_callback_settings(*cb_method)})
end
def ipmi_power_on
run_ipmi_command(:power_on)
end
def ipmi_power_off
run_ipmi_command(:power_off)
end
def ipmi_power_reset
run_ipmi_command(:power_reset)
end
def reset
if supports?(:reset)
check_policy_prevent("request_host_reset", "ipmi_power_reset")
else
_log.warn("Cannot stop because <#{unsupported_reason(:reset)}>")
end
end
def start
if validate_start[:available] && power_state == 'standby' && respond_to?(:vim_power_up_from_standby)
check_policy_prevent("request_host_start", "vim_power_up_from_standby")
else
msg = validate_ipmi
if msg[:available]
pstate = run_ipmi_command(:power_state)
if pstate == 'off'
check_policy_prevent("request_host_start", "ipmi_power_on")
else
_log.warn("Non-Startable IPMI power state = <#{pstate.inspect}>")
end
else
_log.warn("Cannot start because <#{msg[:message]}>")
end
end
end
def stop
msg = validate_stop
if msg[:available]
check_policy_prevent("request_host_stop", "ipmi_power_off")
else
_log.warn("Cannot stop because <#{msg[:message]}>")
end
end
def standby
msg = validate_standby
if msg[:available]
if power_state == 'on' && respond_to?(:vim_power_down_to_standby)
check_policy_prevent("request_host_standby", "vim_power_down_to_standby")
else
_log.warn("Cannot go into standby mode from power state = <#{power_state.inspect}>")
end
else
_log.warn("Cannot go into standby mode because <#{msg[:message]}>")
end
end
def enter_maint_mode
msg = validate_enter_maint_mode
if msg[:available]
if power_state == 'on' && respond_to?(:vim_enter_maintenance_mode)
check_policy_prevent("request_host_enter_maintenance_mode", "vim_enter_maintenance_mode")
else
_log.warn("Cannot enter maintenance mode from power state = <#{power_state.inspect}>")
end
else
_log.warn("Cannot enter maintenance mode because <#{msg[:message]}>")
end
end
def exit_maint_mode
msg = validate_exit_maint_mode
if msg[:available] && respond_to?(:vim_exit_maintenance_mode)
check_policy_prevent("request_host_exit_maintenance_mode", "vim_exit_maintenance_mode")
else
_log.warn("Cannot exit maintenance mode because <#{msg[:message]}>")
end
end
def shutdown
msg = validate_shutdown
if msg[:available] && respond_to?(:vim_shutdown)
check_policy_prevent("request_host_shutdown", "vim_shutdown")
else
_log.warn("Cannot shutdown because <#{msg[:message]}>")
end
end
def reboot
msg = validate_reboot
if msg[:available] && respond_to?(:vim_reboot)
check_policy_prevent("request_host_reboot", "vim_reboot")
else
_log.warn("Cannot reboot because <#{msg[:message]}>")
end
end
def enable_vmotion
msg = validate_enable_vmotion
if msg[:available] && respond_to?(:vim_enable_vmotion)
check_policy_prevent("request_host_enable_vmotion", "vim_enable_vmotion")
else
_log.warn("Cannot enable vmotion because <#{msg[:message]}>")
end
end
def disable_vmotion
msg = validate_disable_vmotion
if msg[:available] && respond_to?(:vim_disable_vmotion)
check_policy_prevent("request_host_disable_vmotion", "vim_disable_vmotion")
else
_log.warn("Cannot disable vmotion because <#{msg[:message]}>")
end
end
def vmotion_enabled?
msg = validate_vmotion_enabled?
if msg[:available] && respond_to?(:vim_vmotion_enabled?)
vim_vmotion_enabled?
else
_log.warn("Cannot check if vmotion is enabled because <#{msg[:message]}>")
end
end
# Scan for VMs in a path defined in a repository
def add_elements(data)
if data.kind_of?(Hash) && data[:type] == :ems_events
_log.info("Adding HASH elements for Host id:[#{id}]-[#{name}] from [#{data[:type]}]")
add_ems_events(data)
end
rescue => err
_log.log_backtrace(err)
end
def ipaddresses
hardware.nil? ? [] : hardware.ipaddresses
end
def hostnames
hardware.nil? ? [] : hardware.hostnames
end
def mac_addresses
hardware.nil? ? [] : hardware.mac_addresses
end
def has_config_data?
!operating_system.nil? && !hardware.nil?
end
def os_image_name
OperatingSystem.image_name(self)
end
def platform
OperatingSystem.platform(self)
end
def product_name
operating_system.nil? ? "" : operating_system.product_name
end
def service_pack
operating_system.nil? ? "" : operating_system.service_pack
end
def arch
if vmm_product.to_s.include?('ESX')
return 'x86_64' if vmm_version.to_i >= 4
return 'x86'
end
return "unknown" unless hardware && !hardware.cpu_type.nil?
cpu = hardware.cpu_type.to_s.downcase
return cpu if cpu.include?('x86')
return "x86" if cpu.starts_with?("intel")
"unknown"
end
def platform_arch
ret = [os_image_name.split("_")[0], arch == "unknown" ? "x86" : arch]
ret.include?("unknown") ? nil : ret
end
def refreshable_status
if ext_management_system
return {:show => true, :enabled => true, :message => ""}
end
{:show => false, :enabled => false, :message => "Host not configured for refresh"}
end
def scannable_status
s = refreshable_status
return s if s[:show] || s[:enabled]
s[:show] = true
if has_credentials?(:ipmi) && ipmi_address.present?
s.merge!(:enabled => true, :message => "")
elsif ipmi_address.blank?
s.merge!(:enabled => false, :message => "Provide an IPMI Address")
elsif missing_credentials?(:ipmi)
s.merge!(:enabled => false, :message => "Provide credentials for IPMI")
end
s
end
def is_refreshable?
refreshable_status[:show]
end
def is_refreshable_now?
refreshable_status[:enabled]
end
def is_refreshable_now_error_message
refreshable_status[:message]
end
def is_scannable?
scannable_status[:show]
end
def is_scannable_now?
scannable_status[:enabled]
end
def is_scannable_now_error_message
scannable_status[:message]
end
def is_vmware?
vmm_vendor == 'vmware'
end
def is_vmware_esx?
is_vmware? && vmm_product.to_s.strip.downcase.starts_with?('esx')
end
def is_vmware_esxi?
product = vmm_product.to_s.strip.downcase
is_vmware? && product.starts_with?('esx') && product.ends_with?('i')
end
def vmm_vendor_display
VENDOR_TYPES[vmm_vendor]
end
#
# Relationship methods
#
def disconnect_inv
disconnect_ems
remove_all_parents(:of_type => ['EmsFolder', 'EmsCluster'])
end
def connect_ems(e)
return if ext_management_system == e
_log.debug("Connecting Host [#{name}] id [#{id}] to EMS [#{e.name}] id [#{e.id}]")
self.ext_management_system = e
save
end
def disconnect_ems(e = nil)
if e.nil? || ext_management_system == e
log_text = " from EMS [#{ext_management_system.name}] id [#{ext_management_system.id}]" unless ext_management_system.nil?
_log.info("Disconnecting Host [#{name}] id [#{id}]#{log_text}")
self.ext_management_system = nil
self.ems_cluster = nil
self.state = "unknown"
save
end
end
def connect_storage(s)
unless storages.include?(s)
_log.debug("Connecting Host [#{name}] id [#{id}] to Storage [#{s.name}] id [#{s.id}]")
storages << s
save
end
end
def disconnect_storage(s)
_log.info("Disconnecting Host [#{name}] id [#{id}] from Storage [#{s.name}] id [#{s.id}]")
storages.delete(s)
save
end
# Vm relationship methods
def direct_vms
# Look for only the Vms at the second depth (default RP + 1)
grandchildren(:of_type => 'Vm').sort_by { |r| r.name.downcase }
end
# Resource Pool relationship methods
def default_resource_pool
Relationship.resource(child_rels(:of_type => 'ResourcePool').first)
end
def resource_pools
Relationship.resources(grandchild_rels(:of_type => 'ResourcePool'))
end
def resource_pools_with_default
Relationship.resources(child_and_grandchild_rels(:of_type => 'ResourcePool'))
end
# All RPs under this Host and all child RPs
def all_resource_pools
# descendants typically returns the default_rp first but sporadically it
# will not due to a bug in the ancestry gem, this means we cannot simply
# drop the first value and need to check is_default
descendants(:of_type => 'ResourcePool').select { |r| !r.is_default }.sort_by { |r| r.name.downcase }
end
def all_resource_pools_with_default
descendants(:of_type => 'ResourcePool').sort_by { |r| r.name.downcase }
end
# Parent relationship methods
def parent_folder
p = parent
p if p.kind_of?(EmsFolder)
end
def owning_folder
detect_ancestor(:of_type => "EmsFolder") { |a| !a.kind_of?(Datacenter) && !%w(host vm).include?(a.name) }
end
def parent_datacenter
detect_ancestor(:of_type => "EmsFolder") { |a| a.kind_of?(Datacenter) }
end
alias_method :owning_datacenter, :parent_datacenter
def self.save_metadata(id, dataArray)
_log.info("for host [#{id}]")
host = Host.find_by(:id => id)
data, data_type = dataArray
data.replace(MIQEncode.decode(data)) if data_type.include?('b64,zlib')
doc = data_type.include?('yaml') ? YAML.load(data) : MiqXml.load(data)
host.add_elements(doc)
host.save!
_log.info("for host [#{id}] host saved")
rescue => err
_log.log_backtrace(err)
return false
end
def self.batch_update_authentication(host_ids, creds = {})
errors = []
return true if host_ids.blank?
host_ids.each do |id|
begin
host = Host.find(id)
host.update_authentication(creds)
rescue ActiveRecord::RecordNotFound => err
_log.warn("#{err.class.name}-#{err}")
next
rescue => err
errors << err.to_s
_log.error("#{err.class.name}-#{err}")
next
end
end
errors.empty? ? true : errors
end
def verify_credentials_task(userid, auth_type = nil, options = {})
task_opts = {
:action => "Verify Host Credentials",
:userid => userid
}
queue_opts = {
:args => [auth_type, options],
:class_name => self.class.name,
:instance_id => id,
:method_name => "verify_credentials?",
:queue_name => queue_name_for_ems_operations,
:role => "ems_operations",
:zone => my_zone
}
MiqTask.generic_action_with_callback(task_opts, queue_opts)
end
def verify_credentials?(*args)
# Prevent the connection details, including the password, from being leaked into the logs
# and MiqQueue by only returning true/false
!!verify_credentials(*args)
end
def verify_credentials(auth_type = nil, options = {})
raise MiqException::MiqHostError, _("No credentials defined") if missing_credentials?(auth_type)
if auth_type.to_s != 'ipmi' && os_image_name !~ /linux_*/
raise MiqException::MiqHostError, _("Logon to platform [%{os_name}] not supported") % {:os_name => os_image_name}
end
case auth_type.to_s
when 'remote' then verify_credentials_with_ssh(auth_type, options)
when 'ws' then verify_credentials_with_ws(auth_type)
when 'ipmi' then verify_credentials_with_ipmi(auth_type)
else
verify_credentials_with_ws(auth_type)
end
true
end
def verify_credentials_with_ws(_auth_type = nil, _options = {})
raise MiqException::MiqHostError, _("Web Services authentication is not supported for hosts of this type.")
end
def verify_credentials_with_ssh(auth_type = nil, options = {})
raise MiqException::MiqHostError, _("No credentials defined") if missing_credentials?(auth_type)
unless os_image_name =~ /linux_*/
raise MiqException::MiqHostError, _("Logon to platform [%{os_name}] not supported") % {:os_name => os_image_name}
end
begin
# connect_ssh logs address and user name(s) being used to make connection
_log.info("Verifying Host SSH credentials for [#{name}]")
connect_ssh(options) { |ssu| ssu.exec("uname -a") }
rescue Net::SSH::AuthenticationFailed => err
raise err, _("Login failed due to a bad username or password.")
rescue Net::SSH::HostKeyMismatch
raise # Re-raise the error so the UI can prompt the user to allow the keys to be reset.
rescue Exception => err
_log.warn(err.inspect)
raise MiqException::MiqHostError, _("Unexpected response returned from system, see log for details")
else
true
end
end
def verify_credentials_with_ipmi(auth_type = nil)
raise _("No credentials defined for IPMI") if missing_credentials?(auth_type)
require 'miq-ipmi'
address = ipmi_address
raise MiqException::MiqHostError, _("IPMI address is not configured for this Host") if address.blank?
if MiqIPMI.is_available?(address)
ipmi = MiqIPMI.new(address, *auth_user_pwd(auth_type))
unless ipmi.connected?
raise MiqException::MiqInvalidCredentialsError, _("Login failed due to a bad username or password.")
end
else
raise MiqException::MiqHostError, _("IPMI is not available on this Host")
end
end
def self.discoverByIpRange(starting, ending, options = {:ping => true})
options[:timeout] ||= 10
pattern = /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/
raise _("Starting address is malformed") if (starting =~ pattern).nil?
raise _("Ending address is malformed") if (ending =~ pattern).nil?
starting.split(".").each_index do |i|
if starting.split(".")[i].to_i > 255 || ending.split(".")[i].to_i > 255
raise _("IP address octets must be 0 to 255")
end
if starting.split(".")[i].to_i > ending.split(".")[i].to_i
raise _("Ending address must be greater than starting address")
end
end
network_id = starting.split(".")[0..2].join(".")
host_start = starting.split(".").last.to_i
host_end = ending.split(".").last.to_i
host_start.upto(host_end) do |h|
ipaddr = network_id + "." + h.to_s
unless Host.find_by(:ipaddress => ipaddr).nil? # skip discover for existing hosts
_log.info("ipaddress '#{ipaddr}' exists, skipping discovery")
next
end
discover_options = {:ipaddr => ipaddr,
:ping => options[:ping],
:timeout => options[:timeout],
:discover_types => options[:discover_types],
:credentials => options[:credentials]
}
# Add Windows domain credentials for HyperV WMI checks
default_zone = Zone.find_by(:name => 'default')
if !default_zone.nil? && default_zone.has_authentication_type?(:windows_domain)
discover_options[:windows_domain] = [default_zone.authentication_userid(:windows_domain), default_zone.authentication_password_encrypted(:windows_domain)]
end
MiqQueue.put(:class_name => "Host", :method_name => "discoverHost", :data => Marshal.dump(discover_options), :server_guid => MiqServer.my_guid)
end
end
def reset_discoverable_fields
raise _("Host Not Resettable - No IPMI Address") if ipmi_address.blank?
cred = authentication_type(:ipmi)
raise _("Host Not Resettable - No IPMI Credentials") if cred.nil?
run_callbacks(:destroy) { false } # Run only the before_destroy callbacks to destroy all associations
reload
attributes.each do |key, _value|
next if %w(id guid ipmi_address mac_address name created_on updated_on vmm_vendor).include?(key)
send("#{key}=", nil)
end
make_smart # before_create callback
self.settings = nil
self.name = "IPMI <#{ipmi_address}>"
self.vmm_vendor = 'unknown'
save!
authentications.create(cred.attributes) unless cred.nil?
self
end
def detect_discovered_os(ost)
# Determine os
os_type = nil
if is_vmware?
os_name = "VMware ESX Server"
elsif ost.os.include?(:linux)
os_name = "linux"
elsif ost.os.include?(:mswin)
os_name = "windows"
os_type = os_name
else
os_name = nil
end
return os_name, os_type
end
def detect_discovered_hypervisor(ost, ipaddr)
find_method = :find_by_ipaddress
if ost.hypervisor.include?(:hyperv)
self.name = "Microsoft Hyper-V (#{ipaddr})"
self.type = "ManageIQ::Providers::Microsoft::InfraManager::Host"
self.ipaddress = ipaddr
self.vmm_vendor = "microsoft"
self.vmm_product = "Hyper-V"
elsif ost.hypervisor.include?(:esx)
self.name = "VMware ESX Server (#{ipaddr})"
self.ipaddress = ipaddr
self.vmm_vendor = "vmware"
self.vmm_product = "Esx"
self.type = "ManageIQ::Providers::Vmware::InfraManager::HostEsx"
elsif ost.hypervisor.include?(:ipmi)
find_method = :find_by_ipmi_address
self.name = "IPMI (#{ipaddr})"
self.type = "Host"
self.vmm_vendor = "unknown"
self.vmm_product = nil
self.ipmi_address = ipaddr
self.ipaddress = nil
self.hostname = nil
else
self.vmm_vendor = ost.hypervisor.join(", ")
self.type = "Host"
end
find_method
end
def self.ost_inspect(ost)
hash = ost.marshal_dump.dup
hash.delete(:credentials)
OpenStruct.new(hash).inspect
end
def self.discoverHost(options)
require 'manageiq/network_discovery/discovery'
ost = OpenStruct.new(Marshal.load(options))
_log.info("Discovering Host: #{ost_inspect(ost)}")
begin
ManageIQ::NetworkDiscovery::Discovery.scan_host(ost)
if ost.hypervisor.empty?
_log.info("NOT Discovered: #{ost_inspect(ost)}")
else
_log.info("Discovered: #{ost_inspect(ost)}")
if %i(virtualcenter scvmm rhevm openstack_infra).any? { |ems_type| ost.hypervisor.include?(ems_type) }
ExtManagementSystem.create_discovered_ems(ost)
return # only create ems instance, no host.
end
host = new(
:name => "#{ost.ipaddr} - discovered #{Time.now.utc.strftime("%Y-%m-%d %H:%M %Z")}",
:ipaddress => ost.ipaddr,
:hostname => Socket.getaddrinfo(ost.ipaddr, nil)[0][2]
)
find_method = host.detect_discovered_hypervisor(ost, ost.ipaddr)
os_name, os_type = host.detect_discovered_os(ost)
if Host.send(find_method, ost.ipaddr).nil?
# It may have been added by someone else while we were discovering
host.save!
if ost.hypervisor.include?(:ipmi)
# IPMI - Check if credentials were passed and try to scan host
cred = (ost.credentials || {})[:ipmi]
unless cred.nil? || cred[:userid].blank?
ipmi = MiqIPMI.new(host.ipmi_address, cred[:userid], cred[:password])
if ipmi.connected?
_log.warn("IPMI connected to Host:<#{host.ipmi_address}> with User:<#{cred[:userid]}>")
host.update_authentication(:ipmi => cred)
host.scan
else
_log.warn("IPMI did not connect to Host:<#{host.ipmi_address}> with User:<#{cred[:userid]}>")
end
end
else
# Try to convert IP address to hostname and update host data
netHostName = Host.get_hostname(ost.ipaddr)
host.name = netHostName if netHostName
EmsRefresh.save_operating_system_inventory(host, :product_name => os_name, :product_type => os_type) unless os_name.nil?
EmsRefresh.save_hardware_inventory(host, {:cpu_type => "intel"})
host.save!
end
_log.info("#{host.name} created")
AuditEvent.success(:event => "host_created", :target_id => host.id, :target_class => "Host", :message => "#{host.name} created")
end
end
rescue => err
_log.log_backtrace(err)
AuditEvent.failure(:event => "host_created", :target_class => "Host", :message => "creating host, #{err}")
end
end
def self.get_hostname(ipAddress)
_log.info("Resolving hostname: [#{ipAddress}]")
begin
ret = Socket.gethostbyname(ipAddress)
name = ret.first
rescue => err
_log.error("ERROR: #{err}")
return nil
end
_log.info("Resolved hostname: [#{name}] to [#{ipAddress}]")
name
end
def ssh_users_and_passwords
if has_authentication_type?(:remote)
rl_user, rl_password = auth_user_pwd(:remote)
su_user, su_password = auth_user_pwd(:root)
else
rl_user, rl_password = auth_user_pwd(:root)
su_user, su_password = nil, nil
end
return rl_user, rl_password, su_user, su_password, {}
end
def connect_ssh(options = {})
require 'manageiq-ssh-util'
rl_user, rl_password, su_user, su_password, additional_options = ssh_users_and_passwords
options.merge!(additional_options)
prompt_delay = ::Settings.ssh.try(:authentication_prompt_delay)
options[:authentication_prompt_delay] = prompt_delay unless prompt_delay.nil?
users = su_user.nil? ? rl_user : "#{rl_user}/#{su_user}"
# Obfuscate private keys in the log with ****, so it's visible that field was used, but no user secret is exposed
logged_options = options.dup
logged_options[:key_data] = "[FILTERED]" if logged_options[:key_data]
_log.info("Initiating SSH connection to Host:[#{name}] using [#{hostname}] for user:[#{users}]. Options:[#{logged_options.inspect}]")
begin
ManageIQ::SSH::Util.shell_with_su(hostname, rl_user, rl_password, su_user, su_password, options) do |ssu, _shell|
_log.info("SSH connection established to [#{hostname}]")
yield(ssu)
end
_log.info("SSH connection completed to [#{hostname}]")
rescue Exception => err
_log.error("SSH connection failed for [#{hostname}] with [#{err.class}: #{err}]")
raise err
end
end
def refresh_patches(ssu)
return unless vmm_buildnumber && vmm_buildnumber != patches.highest_patch_level
patches = []
begin
sb = ssu.shell_exec("esxupdate query")
t = Time.now
sb.each_line do |line|
next if line =~ /-{5,}/ # skip any header/footer rows
data = line.split(" ")
# Find the lines we should skip
begin
next if data[1, 2].nil?
dhash = {:name => data[0], :vendor => "VMware", :installed_on => Time.parse(data[1, 2].join(" ")).utc}
next if dhash[:installed_on] - t >= 0
dhash[:description] = data[3..-1].join(" ") unless data[3..-1].nil?
patches << dhash
rescue ArgumentError => err
_log.log_backtrace(err)
next
rescue => err
_log.log_backtrace(err)
end
end
rescue
end
Patch.refresh_patches(self, patches)
end
def collect_services(ssu)
services = ssu.shell_exec("systemctl -a --type service --no-legend")
if services
# If there is a systemd use only that, chconfig is calling systemd on the background, but has misleading results
MiqLinux::Utils.parse_systemctl_list(services)
else
services = ssu.shell_exec("chkconfig --list")
MiqLinux::Utils.parse_chkconfig_list(services)
end
end
def refresh_services(ssu)
xml = MiqXml.createDoc(:miq).root.add_element(:services)
services = collect_services(ssu)
services.each do |service|
s = xml.add_element(:service,
'name' => service[:name],
'systemd_load' => service[:systemd_load],
'systemd_sub' => service[:systemd_sub],
'description' => service[:description],
'running' => service[:running],
'systemd_active' => service[:systemd_active],
'typename' => service[:typename])
service[:enable_run_level].each { |l| s.add_element(:enable_run_level, 'value' => l) } unless service[:enable_run_level].nil?
service[:disable_run_level].each { |l| s.add_element(:disable_run_level, 'value' => l) } unless service[:disable_run_level].nil?
end
SystemService.add_elements(self, xml.root)
rescue
end
def refresh_linux_packages(ssu)
pkg_xml = MiqXml.createDoc(:miq).root.add_element(:software).add_element(:applications)
rpm_list = ssu.shell_exec("rpm -qa --queryformat '%{NAME}|%{VERSION}|%{ARCH}|%{GROUP}|%{RELEASE}|%{SUMMARY}\n'").force_encoding("utf-8")
rpm_list.each_line do |line|
l = line.split('|')
pkg_xml.add_element(:application, 'name' => l[0], 'version' => l[1], 'arch' => l[2], 'typename' => l[3], 'release' => l[4], 'description' => l[5])
end
GuestApplication.add_elements(self, pkg_xml.root)
rescue
end
def refresh_user_groups(ssu)
xml = MiqXml.createDoc(:miq)
node = xml.root.add_element(:accounts)
MiqLinux::Users.new(ssu).to_xml(node)
Account.add_elements(self, xml.root)
rescue
# _log.log_backtrace($!)
end
def refresh_ssh_config(ssu)
self.ssh_permit_root_login = nil
permit_list = ssu.shell_exec("grep PermitRootLogin /etc/ssh/sshd_config")
# Setting default value to yes, which is default according to man sshd_config, if ssh returned something
self.ssh_permit_root_login = 'yes' if permit_list
permit_list.each_line do |line|
la = line.split(' ')
if la.length == 2
next if la.first[0, 1] == '#'
self.ssh_permit_root_login = la.last.to_s.downcase
break
end
end
rescue
# _log.log_backtrace($!)
end
def refresh_fs_files(ssu)
sp = HostScanProfiles.new(ScanItem.get_profile("host default"))
files = sp.parse_data_files(ssu)
EmsRefresh.save_filesystems_inventory(self, files) if files
rescue
# _log.log_backtrace($!)
end
def refresh_ipmi
if ipmi_config_valid?
require 'miq-ipmi'
address = ipmi_address
if MiqIPMI.is_available?(address)
ipmi = MiqIPMI.new(address, *auth_user_pwd(:ipmi))
if ipmi.connected?
self.power_state = ipmi.power_state
mac = ipmi.mac_address
self.mac_address = mac unless mac.blank?
hw_info = {:manufacturer => ipmi.manufacturer, :model => ipmi.model}
if hardware.nil?
EmsRefresh.save_hardware_inventory(self, hw_info)
else
hardware.update(hw_info)
end
else
_log.warn("IPMI Login failed due to a bad username or password.")
end
else
_log.info("IPMI is not available on this Host")
end
end
end
def ipmi_config_valid?(include_mac_addr = false)
return false unless (ipmi_address.present? && has_credentials?(:ipmi))
include_mac_addr == true ? mac_address.present? : true
end
alias_method :ipmi_enabled, :ipmi_config_valid?
def set_custom_field(attribute, value)
return unless is_vmware?
raise _("Host has no EMS, unable to set custom attribute") unless ext_management_system
ext_management_system.set_custom_field(self, :attribute => attribute, :value => value)
end
def quickStats
return @qs if @qs
return {} unless supports?(:quick_stats)
begin
raise _("Host has no EMS, unable to get host statistics") unless ext_management_system
@qs = ext_management_system.host_quick_stats(self)
rescue => err
_log.warn("Error '#{err.message}' encountered attempting to get host quick statistics")
return {}
end
@qs
end
def current_memory_usage
quickStats["overallMemoryUsage"].to_i
end
def current_cpu_usage
quickStats["overallCpuUsage"].to_i
end
def current_memory_headroom
ram_size - current_memory_usage
end
def firewall_rules
return [] if operating_system.nil?
operating_system.firewall_rules
end
def enforce_policy(vm, event)
inputs = {:vm => vm, :host => self}
MiqEvent.raise_evm_event(vm, event, inputs)
end
def first_cat_entry(name)
Classification.first_cat_entry(name, self)
end
def scan(userid = "system", options = {})
_log.info("Requesting scan of #{log_target}")
check_policy_prevent(:request_host_scan, :scan_queue, userid, options)
end
def scan_queue(userid = 'system', _options = {})
_log.info("Queuing scan of #{log_target}")
task = MiqTask.create(:name => "SmartState Analysis for '#{name}' ", :userid => userid)
return unless validate_task(task)
timeout = ::Settings.host_scan.queue_timeout.to_i_with_method
cb = {:class_name => task.class.name, :instance_id => task.id, :method_name => :queue_callback_on_exceptions, :args => ['Finished']}
MiqQueue.put(
:class_name => self.class.name,
:instance_id => id,
:args => [task.id],
:method_name => "scan_from_queue",
:miq_callback => cb,
:msg_timeout => timeout,
:role => "ems_operations",
:queue_name => queue_name_for_ems_operations,
:zone => my_zone
)
end
def scan_from_queue(taskid = nil)
unless taskid.nil?
task = MiqTask.find_by(:id => taskid)
task.state_active if task
end
_log.info("Scanning #{log_target}...")
task.update_status("Active", "Ok", "Scanning") if task
_dummy, t = Benchmark.realtime_block(:total_time) do
if supports?(:refresh_firewall_rules)
# Firewall Rules and Advanced Settings go through EMS so we don't need Host credentials
_log.info("Refreshing Firewall Rules for #{log_target}")
task.update_status("Active", "Ok", "Refreshing Firewall Rules") if task
Benchmark.realtime_block(:refresh_firewall_rules) { refresh_firewall_rules }
end
if supports?(:refresh_advanced_settings)
_log.info("Refreshing Advanced Settings for #{log_target}")
task.update_status("Active", "Ok", "Refreshing Advanced Settings") if task
Benchmark.realtime_block(:refresh_advanced_settings) { refresh_advanced_settings }
end
if ext_management_system.nil?
_log.info("Refreshing IPMI information for #{log_target}")
task.update_status("Active", "Ok", "Refreshing IPMI Information") if task
Benchmark.realtime_block(:refresh_ipmi) { refresh_ipmi }
end
save
# Skip SSH for ESXi hosts
unless is_vmware_esxi?
if hostname.blank?
_log.warn("No hostname defined for #{log_target}")
task.update_status("Finished", "Warn", "Scanning incomplete due to missing hostname") if task
return
end
update_ssh_auth_status! if respond_to?(:update_ssh_auth_status!)
if missing_credentials?
_log.warn("No credentials defined for #{log_target}")
task.update_status("Finished", "Warn", "Scanning incomplete due to Credential Issue") if task
return
end
begin
connect_ssh do |ssu|
_log.info("Refreshing Patches for #{log_target}")
task.update_status("Active", "Ok", "Refreshing Patches") if task
Benchmark.realtime_block(:refresh_patches) { refresh_patches(ssu) }
_log.info("Refreshing Services for #{log_target}")
task.update_status("Active", "Ok", "Refreshing Services") if task
Benchmark.realtime_block(:refresh_services) { refresh_services(ssu) }
_log.info("Refreshing Linux Packages for #{log_target}")
task.update_status("Active", "Ok", "Refreshing Linux Packages") if task
Benchmark.realtime_block(:refresh_linux_packages) { refresh_linux_packages(ssu) }
_log.info("Refreshing User Groups for #{log_target}")
task.update_status("Active", "Ok", "Refreshing User Groups") if task
Benchmark.realtime_block(:refresh_user_groups) { refresh_user_groups(ssu) }
_log.info("Refreshing SSH Config for #{log_target}")
task.update_status("Active", "Ok", "Refreshing SSH Config") if task
Benchmark.realtime_block(:refresh_ssh_config) { refresh_ssh_config(ssu) }
_log.info("Refreshing FS Files for #{log_target}")
task.update_status("Active", "Ok", "Refreshing FS Files") if task
Benchmark.realtime_block(:refresh_fs_files) { refresh_fs_files(ssu) }
if supports?(:refresh_network_interfaces)
_log.info("Refreshing network interfaces for #{log_target}")
task.update_status("Active", "Ok", "Refreshing network interfaces") if task
Benchmark.realtime_block(:refresh_network_interfaces) { refresh_network_interfaces(ssu) }
end
# refresh_openstack_services should run after refresh_services and refresh_fs_files
if respond_to?(:refresh_openstack_services)
_log.info("Refreshing OpenStack Services for #{log_target}")
task.update_status("Active", "Ok", "Refreshing OpenStack Services") if task
Benchmark.realtime_block(:refresh_openstack_services) { refresh_openstack_services(ssu) }
end
save
end
rescue Net::SSH::HostKeyMismatch
# Keep from dumping stack trace for this error which is sufficiently logged in the connect_ssh method
rescue => err
_log.log_backtrace(err)
end
end
if supports?(:refresh_logs)
_log.info("Refreshing Log information for #{log_target}")
task.update_status("Active", "Ok", "Refreshing Log Information") if task
Benchmark.realtime_block(:refresh_logs) { refresh_logs }
end
_log.info("Saving state for #{log_target}")
task.update_status("Active", "Ok", "Saving Drift State") if task
Benchmark.realtime_block(:save_driftstate) { save_drift_state }
begin
MiqEvent.raise_evm_job_event(self, :type => "scan", :suffix => "complete")
rescue => err
_log.warn("Error raising complete scan event for #{log_target}: #{err.message}")
end
end
task.update_status("Finished", "Ok", "Scanning Complete") if task
_log.info("Scanning #{log_target}...Complete - Timings: #{t.inspect}")
end
def validate_task(task)
if ext_management_system&.zone == Zone.maintenance_zone
task.update_status(MiqTask::STATE_FINISHED, MiqTask::STATUS_ERROR, "#{ext_management_system.name} is paused")
return false
end
true
end
def ssh_run_script(script)
connect_ssh { |ssu| return ssu.shell_exec(script) }
end
def add_ems_events(event_hash)
event_hash[:events].each do |event|
event[:ems_id] = ems_id
event[:host_name] = name
event[:host_id] = id
begin
EmsEvent.add(ems_id, event)
rescue => err
_log.log_backtrace(err)
end
end
end
# Virtual columns for folder and datacenter
def v_owning_folder
o = owning_folder
o ? o.name : ""
end
def v_owning_datacenter
o = owning_datacenter
o ? o.name : ""
end
def miq_scsi_luns
luns = []
return luns if hardware.nil?
hardware.storage_adapters.each do |sa|
sa.miq_scsi_targets.each do |st|
luns.concat(st.miq_scsi_luns)
end
end
luns
end
def enabled_inbound_ports
get_ports("in")
end
def enabled_outbound_ports
get_ports("out")
end
def enabled_tcp_inbound_ports
get_ports("in", "tcp")
end
def enabled_tcp_outbound_ports
get_ports("out", "tcp")
end
def enabled_udp_inbound_ports
get_ports("in", "udp")
end
def enabled_udp_outbound_ports
get_ports("out", "udp")
end
def all_enabled_ports
get_ports
end
def get_ports(direction = nil, host_protocol = nil)
return [] if operating_system.nil?
conditions = {:enabled => true}
conditions[:direction] = direction if direction
conditions[:host_protocol] = host_protocol if host_protocol
operating_system.firewall_rules.where(conditions)
.flat_map { |rule| rule.port_range.to_a }
.uniq.sort
end
def service_names
system_services.collect(&:name).uniq.sort
end
def enabled_run_level_0_services
get_service_names(0)
end
def enabled_run_level_1_services
get_service_names(2)
end
def enabled_run_level_2_services
get_service_names(2)
end
def enabled_run_level_3_services
get_service_names(3)
end
def enabled_run_level_4_services
get_service_names(4)
end
def enabled_run_level_5_services
get_service_names(5)
end
def enabled_run_level_6_services
get_service_names(6)
end
def get_service_names(*args)
if args.length == 0
services = host_services
elsif args.length == 1
services = host_services.where("enable_run_levels LIKE ?", "%#{args.first}%")
end
services.order(:name).uniq.pluck(:name)
end
def event_where_clause(assoc = :ems_events)
case assoc.to_sym
when :ems_events, :event_streams
["host_id = ? OR dest_host_id = ?", id, id]
when :policy_events
["host_id = ?", id]
end
end
def has_vm_scan_affinity?
with_relationship_type("vm_scan_affinity") { parent_count > 0 }
end
def vm_scan_affinity=(list)
list = [list].flatten
with_relationship_type("vm_scan_affinity") do
remove_all_parents
list.each { |parent| set_parent(parent) }
end
true
end
alias_method :set_vm_scan_affinity, :vm_scan_affinity=
def vm_scan_affinity
with_relationship_type("vm_scan_affinity") { parents }
end
alias_method :get_vm_scan_affinity, :vm_scan_affinity
def processes
operating_system.try(:processes) || []
end
def event_logs
operating_system.try(:event_logs) || []
end
def get_reserve(field)
default_resource_pool.try(:send, field)
end
def cpu_reserve
get_reserve(:cpu_reserve)
end
def memory_reserve
get_reserve(:memory_reserve)
end
def total_vm_cpu_reserve
vms.inject(0) { |t, vm| t + (vm.cpu_reserve || 0) }
end
def total_vm_memory_reserve
vms.inject(0) { |t, vm| t + (vm.memory_reserve || 0) }
end
def vcpus_per_core
cores = total_vcpus
return 0 if cores == 0
total_vm_vcpus = vms.inject(0) { |t, vm| t + (vm.num_cpu || 0) }
(total_vm_vcpus / cores)
end
def domain
names = hostname.to_s.split(',').first.to_s.split('.')
return names[1..-1].join('.') unless names.blank?
nil
end
#
# Metric methods
#
PERF_ROLLUP_CHILDREN = [:vms]
def perf_rollup_parents(interval_name = nil)
if interval_name == 'realtime'
[ems_cluster].compact if ems_cluster
else
[ems_cluster || ext_management_system].compact
end
end
def get_performance_metric(capture_interval, metric, range, function = nil)
# => capture_interval = 'realtime' | 'hourly' | 'daily'
# => metric = perf column name (real or virtual)
# => function = :avg | :min | :max
# => range = [start_time, end_time] | start_time | number in seconds to go back
time_range = if range.kind_of?(Array)
range
elsif range.kind_of?(Time)
[range.utc, Time.now.utc]
elsif range.kind_of?(String)
[range.to_time(:utc), Time.now.utc]
elsif range.kind_of?(Integer)
[range.seconds.ago.utc, Time.now.utc]
else
raise "Range #{range} is invalid"
end
klass = case capture_interval.to_s
when 'realtime' then HostMetric
else HostPerformance
end
perfs = klass.where(
[
"resource_id = ? AND capture_interval_name = ? AND timestamp >= ? AND timestamp <= ?",
id,
capture_interval.to_s,
time_range[0],
time_range[1]
]
).order("timestamp")
if capture_interval.to_sym == :realtime && metric.to_s.starts_with?("v_pct_cpu_")
vm_vals_by_ts = get_pct_cpu_metric_from_child_vm_performances(metric, capture_interval, time_range)
values = perfs.collect { |p| vm_vals_by_ts[p.timestamp] || 0 }
else
values = perfs.collect(&metric.to_sym)
end
# => returns value | [array of values] (if function.nil?)
return values if function.nil?
case function.to_sym
when :min, :max then return values.send(function)
when :avg
return 0 if values.length == 0
return (values.compact.sum / values.length)
else
raise _("Function %{function} is invalid, should be one of :min, :max, :avg or nil") % {:function => function}
end
end
def get_pct_cpu_metric_from_child_vm_performances(metric, capture_interval, time_range)
klass = case capture_interval.to_s
when 'realtime' then VmMetric
else VmPerformance
end
vm_perfs = klass.where(
"parent_host_id = ? AND capture_interval_name = ? AND timestamp >= ? AND timestamp <= ?",
id,
capture_interval.to_s,
time_range[0],
time_range[1])
perf_hash = {}
vm_perfs.each do |p|
perf_hash[p.timestamp] ||= []
perf_hash[p.timestamp] << p.send(metric)
end
perf_hash.each_key do |ts|
tot = perf_hash[ts].compact.sum
perf_hash[ts] = perf_hash[ts].empty? ? 0 : (tot / perf_hash[ts].length.to_f)
end
perf_hash
end
# Host Discovery Types and Platforms
def self.host_create_os_types
# TODO: This feature has been removed, once the UI no longer calls this
# method we can delete it
[]
end
Vmdb::Deprecation.deprecate_methods(self, :host_create_os_types)
def writable_storages
if host_storages.loaded? && host_storages.all? { |hs| hs.association(:storage).loaded? }
host_storages.reject(&:read_only).map(&:storage)
else
storages.where(:host_storages => {:read_only => [false, nil]})
end
end
def read_only_storages
if host_storages.loaded? && host_storages.all? { |hs| hs.association(:storage).loaded? }
host_storages.select(&:read_only).map(&:storage)
else
storages.where(:host_storages => {:read_only => true})
end
end
def archived
has_attribute?("archived") ? self["archived"] : ems_id.nil?
end
alias archived? archived
def normalized_state
return 'archived' if archived?
return power_state if power_state.present?
"unknown"
end
def self.display_name(number = 1)
n_('Host', 'Hosts', number)
end
end
| 34.019321 | 165 | 0.658566 |
1120a41121ded478387b189ef4dc89b45d13889f | 230 | class DeletedRecords::Filter::SinceDate < ApplicationFilter
def execute_query(deleted_records, filters)
return deleted_records if filters[:since_date].blank?
deleted_records.between_dates(filters[:since_date])
end
end
| 32.857143 | 59 | 0.804348 |
015c585899a50a3aff1a4af74e59b70b4a3e15f7 | 369 | cask 'ocenaudio' do
version '3.6.0.1'
sha256 'b44f2f962b9ae204e6e12249d92784c639fc3949000bf834c7a6b80a21752eb8'
url 'https://www.ocenaudio.com/downloads/index.php/ocenaudio_sierra.dmg'
appcast "https://www.corecode.io/cgi-bin/check_urls/check_url_filename.cgi?url=#{url}"
name 'ocenaudio'
homepage 'https://www.ocenaudio.com/en'
app 'ocenaudio.app'
end
| 30.75 | 88 | 0.769648 |
38386015d5fed6b231c12ab9ac57c1023a9a2b50 | 889 | class AdvisorActAsController < ActAsController
include CampusSolutions::ProfileFeatureFlagged
skip_before_filter :check_reauthentication, :only => [:stop_advisor_act_as]
def initialize
super act_as_session_key: SessionKey.original_advisor_user_id
end
def act_as_authorization(uid_param)
if is_cs_profile_feature_enabled
user_id = current_user.real_user_id
user_attributes = HubEdos::UserAttributes.new(user_id: user_id).get
authorized = user_attributes && user_attributes[:roles] && user_attributes[:roles][:advisor]
raise Pundit::NotAuthorizedError.new("User #{user_id} is not an Advisor and thus cannot view-as #{uid_param}") unless authorized
else
raise Pundit::NotAuthorizedError.new 'We cannot confirm your role as an Advisor because Campus Solutions is unavailable. Please contact us if the problem persists.'
end
end
end
| 42.333333 | 170 | 0.779528 |
4a21d24cdb8b276cb19a25f7193e2792c48b5cd1 | 1,405 | class Usuario::RegistrationsController < Devise::RegistrationsController
# before_filter :configure_sign_up_params, only: [:create]
# before_filter :configure_account_update_params, only: [:update]
respond_to :json
# GET /resource/sign_up
# def new
# super
# end
# POST /resource
# def create
# super
# end
# GET /resource/edit
# def edit
# super
# end
# PUT /resource
# def update
# super
# end
# DELETE /resource
# def destroy
# super
# end
# GET /resource/cancel
# Forces the session data which is usually expired after sign
# in to be expired now. This is useful if the user wants to
# cancel oauth signing in/up in the middle of the process,
# removing all OAuth session data.
# def cancel
# super
# end
# protected
# If you have extra params to permit, append them to the sanitizer.
# def configure_sign_up_params
# devise_parameter_sanitizer.for(:sign_up) << :attribute
# end
# If you have extra params to permit, append them to the sanitizer.
# def configure_account_update_params
# devise_parameter_sanitizer.for(:account_update) << :attribute
# end
# The path used after sign up.
# def after_sign_up_path_for(resource)
# super(resource)
# end
# The path used after sign up for inactive accounts.
# def after_inactive_sign_up_path_for(resource)
# super(resource)
# end
end
| 22.66129 | 72 | 0.697509 |
33b48c9dcd9f339c79f0d29066b40d41e1e94740 | 3,587 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_10_06_163443) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "comments", force: :cascade do |t|
t.integer "user_id"
t.integer "post_id"
t.text "content"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["post_id"], name: "index_comments_on_post_id"
t.index ["user_id"], name: "index_comments_on_user_id"
end
create_table "friendships", force: :cascade do |t|
t.boolean "status", default: false, null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.integer "inviter_id"
t.integer "invitee_id"
t.index ["invitee_id"], name: "index_friendships_on_invitee_id"
t.index ["inviter_id"], name: "index_friendships_on_inviter_id"
end
create_table "jwt_denylist", force: :cascade do |t|
t.string "jti", null: false
t.datetime "exp", null: false
t.index ["jti"], name: "index_jwt_denylist_on_jti"
end
create_table "likes", force: :cascade do |t|
t.integer "post_id"
t.integer "user_id"
t.index ["post_id"], name: "index_likes_on_post_id"
t.index ["user_id"], name: "index_likes_on_user_id"
end
create_table "posts", force: :cascade do |t|
t.integer "user_id"
t.text "content"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["user_id"], name: "index_posts_on_user_id"
end
create_table "sessions", force: :cascade do |t|
t.string "session_id", null: false
t.text "data"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["session_id"], name: "index_sessions_on_session_id", unique: true
t.index ["updated_at"], name: "index_sessions_on_updated_at"
end
create_table "users", force: :cascade do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "name"
t.string "gravatar_url"
t.string "provider", limit: 50, default: "", null: false
t.string "uid", limit: 500, default: "", null: false
t.string "confirmation_token"
t.datetime "confirmed_at"
t.datetime "confirmation_sent_at"
t.string "jti", null: false
t.index ["confirmation_token"], name: "index_users_on_confirmation_token", unique: true
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["jti"], name: "index_users_on_jti", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
end
| 39.417582 | 95 | 0.712016 |
01e717b897a875b8ba08fd94f7ef9c21cb18d39d | 168 | # What is the value of a, b, and c in the following program?
string = "Welcome to America!"
a = string[6]
b = string[11]
c = string[19]
# a = "e"
# b = "A"
# c = nil
| 15.272727 | 60 | 0.589286 |
619bc56824453137bc4baeccba1827d21c317704 | 775 | #
# Cookbook Name:: crowbar
# Role:: crowbar-upgrade
#
# Copyright 2013-2014, SUSE LINUX Products GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "crowbar-upgrade"
description "Prepare nodes for the crowbar upgrade"
run_list(
"recipe[crowbar::crowbar-upgrade]"
)
| 29.807692 | 74 | 0.757419 |
39a8f4a6952fdad9219b63be9a9099f5ccd02870 | 41,205 | require 'spec_helper'
require 'request_spec_shared_examples'
RSpec.describe 'Packages' do
let(:email) { '[email protected]' }
let(:user) { VCAP::CloudController::User.make }
let(:user_name) { 'clarence' }
let(:user_header) { headers_for(user, email: email, user_name: user_name) }
let(:space) { VCAP::CloudController::Space.make }
let(:space_guid) { space.guid }
let(:app_model) { VCAP::CloudController::AppModel.make(:docker, space_guid: space_guid) }
describe 'POST /v3/packages' do
let(:guid) { app_model.guid }
before do
space.organization.add_user(user)
space.add_developer(user)
end
let(:type) { 'docker' }
let(:data) { { image: 'registry/image:latest', username: 'my-docker-username', password: 'my-password' } }
let(:expected_data) { { image: 'registry/image:latest', username: 'my-docker-username', password: '***' } }
let(:relationships) { { app: { data: { guid: app_model.guid } } } }
let(:metadata) {
{
labels: {
release: 'stable',
'seriouseats.com/potato' => 'mashed',
},
annotations: {
potato: 'idaho',
},
}
}
describe 'creation' do
it 'creates a package' do
expect {
post '/v3/packages', { type: type, data: data, relationships: relationships, metadata: metadata }.to_json, user_header
}.to change { VCAP::CloudController::PackageModel.count }.by(1)
package = VCAP::CloudController::PackageModel.last
expected_response = {
'guid' => package.guid,
'type' => type,
'data' => {
'image' => 'registry/image:latest',
'username' => 'my-docker-username',
'password' => '***'
},
'state' => 'READY',
'relationships' => { 'app' => { 'data' => { 'guid' => app_model.guid } } },
'metadata' => { 'labels' => { 'release' => 'stable', 'seriouseats.com/potato' => 'mashed' }, 'annotations' => { 'potato' => 'idaho' } },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/packages/#{package.guid}" },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{guid}" },
}
}
expected_event_metadata = {
package_guid: package.guid,
request: {
type: type,
data: expected_data,
relationships: relationships,
metadata: metadata,
}
}.to_json
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(201)
expect(parsed_response).to be_a_response_like(expected_response)
event = VCAP::CloudController::Event.last
expect(event.values).to include({
type: 'audit.app.package.create',
actor: user.guid,
actor_type: 'user',
actor_name: email,
actor_username: user_name,
actee: package.app.guid,
actee_type: 'app',
actee_name: package.app.name,
metadata: expected_event_metadata,
space_guid: space.guid,
organization_guid: space.organization.guid
})
end
end
describe 'copying' do
let(:target_app_model) { VCAP::CloudController::AppModel.make(space_guid: space_guid) }
let!(:original_package) { VCAP::CloudController::PackageModel.make(type: 'docker', app_guid: app_model.guid, docker_image: 'http://awesome-sauce.com') }
let!(:guid) { target_app_model.guid }
let(:source_package_guid) { original_package.guid }
it 'copies a package' do
expect {
post "/v3/packages?source_guid=#{source_package_guid}",
{
relationships: {
app: { data: { guid: guid } },
}
}.to_json,
user_header
}.to change { VCAP::CloudController::PackageModel.count }.by(1)
package = VCAP::CloudController::PackageModel.last
expected_response = {
'guid' => package.guid,
'type' => 'docker',
'data' => {
'image' => 'http://awesome-sauce.com',
'username' => nil,
'password' => nil,
},
'state' => 'READY',
'relationships' => { 'app' => { 'data' => { 'guid' => target_app_model.guid } } },
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/packages/#{package.guid}" },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{guid}" },
}
}
expect(last_response.status).to eq(201)
parsed_response = MultiJson.load(last_response.body)
expect(parsed_response).to be_a_response_like(expected_response)
expected_event_metadata = {
package_guid: package.guid,
request: {
source_package_guid: source_package_guid
}
}.to_json
event = VCAP::CloudController::Event.last
expect(event.values).to include({
type: 'audit.app.package.create',
actor: user.guid,
actor_type: 'user',
actor_name: email,
actor_username: user_name,
actee: package.app.guid,
actee_type: 'app',
actee_name: package.app.name,
metadata: expected_event_metadata,
space_guid: space.guid,
organization_guid: space.organization.guid
})
end
end
end
describe 'GET /v3/apps/:guid/packages' do
let(:space) { VCAP::CloudController::Space.make }
let!(:package) { VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, created_at: Time.at(1)) }
let(:app_model) { VCAP::CloudController::AppModel.make(space_guid: space.guid) }
let(:guid) { app_model.guid }
let(:page) { 1 }
let(:per_page) { 2 }
let(:order_by) { '-created_at' }
before do
space.organization.add_user(user)
space.add_developer(user)
end
it 'lists paginated result of all packages for an app' do
package2 = VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, created_at: Time.at(2))
expected_response = {
'pagination' => {
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/apps/#{guid}/packages?order_by=-created_at&page=1&per_page=2" },
'last' => { 'href' => "#{link_prefix}/v3/apps/#{guid}/packages?order_by=-created_at&page=1&per_page=2" },
'next' => nil,
'previous' => nil,
},
'resources' => [
{
'guid' => package2.guid,
'type' => 'bits',
'data' => {
'checksum' => { 'type' => 'sha256', 'value' => nil },
'error' => nil
},
'relationships' => { 'app' => { 'data' => { 'guid' => app_model.guid } } },
'state' => VCAP::CloudController::PackageModel::CREATED_STATE,
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/packages/#{package2.guid}" },
'upload' => { 'href' => "#{link_prefix}/v3/packages/#{package2.guid}/upload", 'method' => 'POST' },
'download' => { 'href' => "#{link_prefix}/v3/packages/#{package2.guid}/download" },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{guid}" },
}
},
{
'guid' => package.guid,
'type' => 'bits',
'data' => {
'checksum' => { 'type' => 'sha256', 'value' => nil },
'error' => nil
},
'relationships' => { 'app' => { 'data' => { 'guid' => app_model.guid } } },
'state' => VCAP::CloudController::PackageModel::CREATED_STATE,
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/packages/#{package.guid}" },
'upload' => { 'href' => "#{link_prefix}/v3/packages/#{package.guid}/upload", 'method' => 'POST' },
'download' => { 'href' => "#{link_prefix}/v3/packages/#{package.guid}/download" },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{guid}" },
}
},
]
}
get "/v3/apps/#{guid}/packages?page=#{page}&per_page=#{per_page}&order_by=#{order_by}", {}, user_header
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
context 'faceted search' do
it 'filters by types' do
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, type: VCAP::CloudController::PackageModel::BITS_TYPE)
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, type: VCAP::CloudController::PackageModel::BITS_TYPE)
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, type: VCAP::CloudController::PackageModel::DOCKER_TYPE)
VCAP::CloudController::PackageModel.make(type: VCAP::CloudController::PackageModel::BITS_TYPE)
get '/v3/packages?types=bits', {}, user_header
expected_pagination = {
'total_results' => 3,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/packages?page=1&per_page=50&types=bits" },
'last' => { 'href' => "#{link_prefix}/v3/packages?page=1&per_page=50&types=bits" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].count).to eq(3)
expect(parsed_response['resources'].map { |r| r['type'] }.uniq).to eq(['bits'])
expect(parsed_response['pagination']).to eq(expected_pagination)
end
it 'filters by states' do
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, state: VCAP::CloudController::PackageModel::PENDING_STATE)
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, state: VCAP::CloudController::PackageModel::PENDING_STATE)
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, state: VCAP::CloudController::PackageModel::READY_STATE)
VCAP::CloudController::PackageModel.make(state: VCAP::CloudController::PackageModel::PENDING_STATE)
get "/v3/apps/#{app_model.guid}/packages?states=PROCESSING_UPLOAD", {}, user_header
expected_pagination = {
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/packages?page=1&per_page=50&states=PROCESSING_UPLOAD" },
'last' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/packages?page=1&per_page=50&states=PROCESSING_UPLOAD" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].count).to eq(2)
expect(parsed_response['resources'].map { |r| r['state'] }.uniq).to eq(['PROCESSING_UPLOAD'])
expect(parsed_response['pagination']).to eq(expected_pagination)
end
it 'filters by package guids' do
package1 = VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
package2 = VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
VCAP::CloudController::PackageModel.make
get "/v3/apps/#{app_model.guid}/packages?guids=#{package1.guid},#{package2.guid}", {}, user_header
expected_pagination = {
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/packages?guids=#{package1.guid}%2C#{package2.guid}&page=1&per_page=50" },
'last' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/packages?guids=#{package1.guid}%2C#{package2.guid}&page=1&per_page=50" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].map { |r| r['guid'] }).to match_array([package1.guid, package2.guid])
expect(parsed_response['pagination']).to eq(expected_pagination)
end
end
end
describe 'GET /v3/packages' do
let(:bits_type) { 'bits' }
let(:docker_type) { 'docker' }
let(:page) { 1 }
let(:per_page) { 2 }
before do
space.organization.add_user(user)
space.add_developer(user)
end
it_behaves_like 'request_spec_shared_examples.rb list query endpoint' do
let(:message) { VCAP::CloudController::PackagesListMessage }
let(:request) { '/v3/packages' }
let(:excluded_params) {
[
:app_guid
]
}
let(:params) do
{
guids: ['foo', 'bar'],
space_guids: ['foo', 'bar'],
organization_guids: ['foo', 'bar'],
app_guids: ['foo', 'bar'],
states: ['foo', 'bar'],
types: ['foo', 'bar'],
page: '2',
per_page: '10',
order_by: 'updated_at',
label_selector: 'foo,bar',
}
end
end
it 'gets all the packages' do
bits_package = VCAP::CloudController::PackageModel.make(type: bits_type, app_guid: app_model.guid)
docker_package = VCAP::CloudController::PackageModel.make(
type: docker_type,
app_guid: app_model.guid,
state: VCAP::CloudController::PackageModel::READY_STATE,
docker_image: 'http://location-of-image.com')
VCAP::CloudController::PackageModel.make(type: docker_type, app_guid: app_model.guid, docker_image: 'http://location-of-image-2.com')
VCAP::CloudController::PackageModel.make(app_guid: VCAP::CloudController::AppModel.make.guid)
expected_response =
{
'pagination' => {
'total_results' => 3,
'total_pages' => 2,
'first' => { 'href' => "#{link_prefix}/v3/packages?page=1&per_page=2" },
'last' => { 'href' => "#{link_prefix}/v3/packages?page=2&per_page=2" },
'next' => { 'href' => "#{link_prefix}/v3/packages?page=2&per_page=2" },
'previous' => nil,
},
'resources' => [
{
'guid' => bits_package.guid,
'type' => 'bits',
'data' => {
'checksum' => { 'type' => 'sha256', 'value' => nil },
'error' => nil
},
'state' => VCAP::CloudController::PackageModel::CREATED_STATE,
'relationships' => { 'app' => { 'data' => { 'guid' => app_model.guid } } },
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/packages/#{bits_package.guid}" },
'upload' => { 'href' => "#{link_prefix}/v3/packages/#{bits_package.guid}/upload", 'method' => 'POST' },
'download' => { 'href' => "#{link_prefix}/v3/packages/#{bits_package.guid}/download" },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{bits_package.app_guid}" },
}
},
{
'guid' => docker_package.guid,
'type' => 'docker',
'data' => {
'image' => 'http://location-of-image.com',
'username' => nil,
'password' => nil,
},
'state' => VCAP::CloudController::PackageModel::READY_STATE,
'relationships' => { 'app' => { 'data' => { 'guid' => app_model.guid } } },
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/packages/#{docker_package.guid}" },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{docker_package.app_guid}" },
}
}
]
}
get '/v3/packages', { per_page: per_page }, user_header
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
context 'faceted search' do
it 'filters by types' do
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, type: VCAP::CloudController::PackageModel::BITS_TYPE)
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, type: VCAP::CloudController::PackageModel::BITS_TYPE)
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, type: VCAP::CloudController::PackageModel::DOCKER_TYPE)
another_app_in_same_space = VCAP::CloudController::AppModel.make(space_guid: space_guid)
VCAP::CloudController::PackageModel.make(app_guid: another_app_in_same_space.guid, type: VCAP::CloudController::PackageModel::BITS_TYPE)
get '/v3/packages?types=bits', {}, user_header
expected_pagination = {
'total_results' => 3,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/packages?page=1&per_page=50&types=bits" },
'last' => { 'href' => "#{link_prefix}/v3/packages?page=1&per_page=50&types=bits" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].count).to eq(3)
expect(parsed_response['resources'].map { |r| r['type'] }.uniq).to eq(['bits'])
expect(parsed_response['pagination']).to eq(expected_pagination)
end
it 'filters by states' do
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, state: VCAP::CloudController::PackageModel::PENDING_STATE)
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, state: VCAP::CloudController::PackageModel::PENDING_STATE)
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, state: VCAP::CloudController::PackageModel::READY_STATE)
another_app_in_same_space = VCAP::CloudController::AppModel.make(space_guid: space_guid)
VCAP::CloudController::PackageModel.make(app_guid: another_app_in_same_space.guid, state: VCAP::CloudController::PackageModel::PENDING_STATE)
get '/v3/packages?states=PROCESSING_UPLOAD', {}, user_header
expected_pagination = {
'total_results' => 3,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/packages?page=1&per_page=50&states=PROCESSING_UPLOAD" },
'last' => { 'href' => "#{link_prefix}/v3/packages?page=1&per_page=50&states=PROCESSING_UPLOAD" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].count).to eq(3)
expect(parsed_response['resources'].map { |r| r['state'] }.uniq).to eq(['PROCESSING_UPLOAD'])
expect(parsed_response['pagination']).to eq(expected_pagination)
end
it 'filters by app guids' do
app_model2 = VCAP::CloudController::AppModel.make(space_guid: space_guid)
package1 = VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
package2 = VCAP::CloudController::PackageModel.make(app_guid: app_model2.guid)
VCAP::CloudController::PackageModel.make
get "/v3/packages?app_guids=#{app_model.guid},#{app_model2.guid}", {}, user_header
expected_pagination = {
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/packages?app_guids=#{app_model.guid}%2C#{app_model2.guid}&page=1&per_page=50" },
'last' => { 'href' => "#{link_prefix}/v3/packages?app_guids=#{app_model.guid}%2C#{app_model2.guid}&page=1&per_page=50" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].map { |r| r['guid'] }).to match_array([package1.guid, package2.guid])
expect(parsed_response['pagination']).to eq(expected_pagination)
end
it 'filters by package guids' do
app_model2 = VCAP::CloudController::AppModel.make(space_guid: space_guid)
package1 = VCAP::CloudController::PackageModel.make(app_guid: app_model2.guid)
package2 = VCAP::CloudController::PackageModel.make(app_guid: app_model2.guid)
VCAP::CloudController::PackageModel.make
get "/v3/packages?guids=#{package1.guid},#{package2.guid}", {}, user_header
expected_pagination = {
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/packages?guids=#{package1.guid}%2C#{package2.guid}&page=1&per_page=50" },
'last' => { 'href' => "#{link_prefix}/v3/packages?guids=#{package1.guid}%2C#{package2.guid}&page=1&per_page=50" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].map { |r| r['guid'] }).to match_array([package1.guid, package2.guid])
expect(parsed_response['pagination']).to eq(expected_pagination)
end
it 'filters by space guids' do
package_on_space1 = VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
space2 = VCAP::CloudController::Space.make(organization: space.organization)
space2.add_developer(user)
app_model2 = VCAP::CloudController::AppModel.make(space_guid: space2.guid)
package_on_space2 = VCAP::CloudController::PackageModel.make(app_guid: app_model2.guid)
space3 = VCAP::CloudController::Space.make(organization: space.organization)
space3.add_developer(user)
app_model3 = VCAP::CloudController::AppModel.make(space_guid: space3.guid)
VCAP::CloudController::PackageModel.make(app_guid: app_model3.guid)
get "/v3/packages?space_guids=#{space2.guid},#{space_guid}", {}, user_header
expected_pagination = {
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/packages?page=1&per_page=50&space_guids=#{space2.guid}%2C#{space_guid}" },
'last' => { 'href' => "#{link_prefix}/v3/packages?page=1&per_page=50&space_guids=#{space2.guid}%2C#{space_guid}" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].map { |r| r['guid'] }).to match_array([package_on_space2.guid, package_on_space1.guid])
expect(parsed_response['pagination']).to eq(expected_pagination)
end
it 'filters by org guids' do
org1_guid = space.organization.guid
package_in_org1 = VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
space2 = VCAP::CloudController::Space.make
org2_guid = space2.organization.guid
app_model2 = VCAP::CloudController::AppModel.make(space_guid: space2.guid)
space2.organization.add_user(user)
space2.add_developer(user)
package_in_org2 = VCAP::CloudController::PackageModel.make(app_guid: app_model2.guid)
space3 = VCAP::CloudController::Space.make
space3.organization.add_user(user)
space3.add_developer(user)
app_model3 = VCAP::CloudController::AppModel.make(space_guid: space3.guid)
VCAP::CloudController::PackageModel.make(app_guid: app_model3.guid)
get "/v3/packages?organization_guids=#{org1_guid},#{org2_guid}", {}, user_header
expected_pagination = {
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/packages?organization_guids=#{org1_guid}%2C#{org2_guid}&page=1&per_page=50" },
'last' => { 'href' => "#{link_prefix}/v3/packages?organization_guids=#{org1_guid}%2C#{org2_guid}&page=1&per_page=50" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].map { |r| r['guid'] }).to match_array([package_in_org1.guid, package_in_org2.guid])
expect(parsed_response['pagination']).to eq(expected_pagination)
end
it 'filters by label selectors' do
target = VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
VCAP::CloudController::PackageLabelModel.make(key_name: 'fruit', value: 'strawberry', package: target)
get '/v3/packages?label_selector=fruit=strawberry', {}, user_header
expected_pagination = {
'total_results' => 1,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/packages?label_selector=fruit%3Dstrawberry&page=1&per_page=50" },
'last' => { 'href' => "#{link_prefix}/v3/packages?label_selector=fruit%3Dstrawberry&page=1&per_page=50" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].count).to eq(1)
expect(parsed_response['resources'][0]['guid']).to eq(target.guid)
expect(parsed_response['pagination']).to eq(expected_pagination)
end
end
end
describe 'GET /v3/packages/:guid' do
let(:space) { VCAP::CloudController::Space.make }
let(:app_model) { VCAP::CloudController::AppModel.make(space_guid: space.guid) }
let(:package_model) do
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
end
let(:guid) { package_model.guid }
let(:space_guid) { space.guid }
before do
space.organization.add_user user
space.add_developer user
end
it 'gets a package' do
expected_response = {
'type' => package_model.type,
'guid' => guid,
'data' => {
'checksum' => { 'type' => 'sha256', 'value' => nil },
'error' => nil
},
'state' => VCAP::CloudController::PackageModel::CREATED_STATE,
'relationships' => { 'app' => { 'data' => { 'guid' => app_model.guid } } },
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/packages/#{guid}" },
'upload' => { 'href' => "#{link_prefix}/v3/packages/#{guid}/upload", 'method' => 'POST' },
'download' => { 'href' => "#{link_prefix}/v3/packages/#{guid}/download" },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
}
}
get "v3/packages/#{guid}", {}, user_header
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
end
describe 'POST /v3/packages/:guid/upload' do
let(:type) { 'bits' }
let!(:package_model) do
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, type: type)
end
let(:space) { VCAP::CloudController::Space.make }
let(:app_model) { VCAP::CloudController::AppModel.make(guid: 'woof', space_guid: space.guid, name: 'meow') }
let(:guid) { package_model.guid }
let(:tmpdir) { Dir.mktmpdir }
before do
space.organization.add_user(user)
space.add_developer(user)
TestConfig.override(directories: { tmpdir: tmpdir })
end
let(:packages_params) do
{
bits_name: 'application.zip',
bits_path: "#{tmpdir}/application.zip",
}
end
shared_examples :upload_bits_successfully do
it 'uploads the bits for the package' do
expect(Delayed::Job.count).to eq 0
post "/v3/packages/#{guid}/upload", packages_params.to_json, user_header
expect(Delayed::Job.count).to eq 1
expected_response = {
'type' => package_model.type,
'guid' => guid,
'data' => {
'checksum' => { 'type' => 'sha256', 'value' => nil },
'error' => nil
},
'state' => VCAP::CloudController::PackageModel::PENDING_STATE,
'relationships' => { 'app' => { 'data' => { 'guid' => app_model.guid } } },
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/packages/#{guid}" },
'upload' => { 'href' => "#{link_prefix}/v3/packages/#{guid}/upload", 'method' => 'POST' },
'download' => { 'href' => "#{link_prefix}/v3/packages/#{guid}/download" },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
}
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
expected_metadata = { package_guid: package_model.guid }.to_json
event = VCAP::CloudController::Event.last
expect(event.values).to include({
type: 'audit.app.package.upload',
actor: user.guid,
actor_type: 'user',
actor_name: email,
actor_username: user_name,
actee: 'woof',
actee_type: 'app',
actee_name: 'meow',
metadata: expected_metadata,
space_guid: space.guid,
organization_guid: space.organization.guid
})
end
end
it 'uploads the bits for the package' do
expect(Delayed::Job.count).to eq 0
post "/v3/packages/#{guid}/upload", packages_params.to_json, user_header
expect(Delayed::Job.count).to eq 1
expected_response = {
'type' => package_model.type,
'guid' => guid,
'data' => {
'checksum' => { 'type' => 'sha256', 'value' => nil },
'error' => nil
},
'state' => VCAP::CloudController::PackageModel::PENDING_STATE,
'relationships' => { 'app' => { 'data' => { 'guid' => app_model.guid } } },
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/packages/#{guid}" },
'upload' => { 'href' => "#{link_prefix}/v3/packages/#{guid}/upload", 'method' => 'POST' },
'download' => { 'href' => "#{link_prefix}/v3/packages/#{guid}/download" },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
}
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
expected_metadata = { package_guid: package_model.guid }.to_json
event = VCAP::CloudController::Event.last
expect(event.values).to include({
type: 'audit.app.package.upload',
actor: user.guid,
actor_type: 'user',
actor_name: email,
actor_username: user_name,
actee: 'woof',
actee_type: 'app',
actee_name: 'meow',
metadata: expected_metadata,
space_guid: space.guid,
organization_guid: space.organization.guid
})
end
context 'with v2 resources' do
let(:packages_params) do
{
bits_name: 'application.zip',
bits_path: "#{tmpdir}/application.zip",
resources: '[{"fn":"path/to/content.txt","size":123,"sha1":"b907173290db6a155949ab4dc9b2d019dea0c901"},
{"fn":"path/to/code.jar","size":123,"sha1":"ff84f89760317996b9dd180ab996b079f418396f"},
{"fn":"path/to/code.jar","size":123,"sha1":"ff84f89760317996b9dd180ab996b079f418396f","mode":"644"}]'
}
end
include_examples :upload_bits_successfully
end
context 'with v3 resources' do
let(:packages_params) do
{
bits_name: 'application.zip',
bits_path: "#{tmpdir}/application.zip",
resources: '[{"path":"path/to/content.txt","size_in_bytes":123,"checksum": { "value" : "b907173290db6a155949ab4dc9b2d019dea0c901" }},
{"path":"path/to/code.jar","size_in_bytes":123,"checksum": { "value" : "ff84f89760317996b9dd180ab996b079f418396f" }},
{"path":"path/to/code.jar","size_in_bytes":123,"checksum": { "value" : "ff84f89760317996b9dd180ab996b079f418396f" },"mode":"644"}]'
}
end
include_examples :upload_bits_successfully
end
context 'telemetry' do
it 'should log the required fields when the package uploads' do
Timecop.freeze do
expected_json = {
'telemetry-source' => 'cloud_controller_ng',
'telemetry-time' => Time.now.to_datetime.rfc3339,
'upload-package' => {
'api-version' => 'v3',
'app-id' => Digest::SHA256.hexdigest(app_model.guid),
'user-id' => Digest::SHA256.hexdigest(user.guid),
}
}
expect_any_instance_of(ActiveSupport::Logger).to receive(:info).with(JSON.generate(expected_json))
post "/v3/packages/#{guid}/upload", packages_params.to_json, user_header
expect(last_response.status).to eq(200)
end
end
end
end
describe 'GET /v3/packages/:guid/download' do
let(:type) { 'bits' }
let!(:package_model) do
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid, type: type)
end
let(:app_model) do
VCAP::CloudController::AppModel.make(guid: 'woof-guid', space_guid: space.guid, name: 'meow')
end
let(:space) { VCAP::CloudController::Space.make }
let(:bits_download_url) { CloudController::DependencyLocator.instance.blobstore_url_generator.package_download_url(package_model) }
let(:guid) { package_model.guid }
let(:temp_file) do
file = File.join(Dir.mktmpdir, 'application.zip')
TestZip.create(file, 1, 1024)
file
end
let(:upload_body) do
{
bits_name: 'application.zip',
bits_path: temp_file,
}
end
before do
TestConfig.override(directories: { tmpdir: File.dirname(temp_file) })
space.organization.add_user(user)
space.add_developer(user)
post "/v3/packages/#{guid}/upload", upload_body.to_json, user_header
Delayed::Worker.new.work_off
end
it 'downloads the bit(s) for a package' do
Timecop.freeze do
get "/v3/packages/#{guid}/download", {}, user_header
expect(last_response.status).to eq(302)
expect(last_response.headers['Location']).to eq(bits_download_url)
expected_metadata = { package_guid: package_model.guid }.to_json
event = VCAP::CloudController::Event.last
expect(event.values).to include({
type: 'audit.app.package.download',
actor: user.guid,
actor_type: 'user',
actor_name: email,
actor_username: user_name,
actee: 'woof-guid',
actee_type: 'app',
actee_name: 'meow',
metadata: expected_metadata,
space_guid: space.guid,
organization_guid: space.organization.guid
})
end
end
end
describe 'PATCH /v3/packages/:guid' do
let(:app_name) { 'sir meow' }
let(:app_guid) { 'meow-the-guid' }
let(:space) { VCAP::CloudController::Space.make }
let(:app_model) { VCAP::CloudController::AppModel.make(space_guid: space.guid, name: app_name, guid: app_guid) }
let!(:package_model) do
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
end
let(:metadata) { {
labels: {
release: 'stable',
'seriouseats.com/potato' => 'mashed'
},
annotations: { 'checksum' => 'SHA' },
}
}
let(:guid) { package_model.guid }
before do
space.organization.add_user user
space.add_developer user
end
it 'updates package metadata' do
patch "/v3/packages/#{guid}", { metadata: metadata }.to_json, user_header
expected_metadata = {
'labels' => {
'release' => 'stable',
'seriouseats.com/potato' => 'mashed',
},
'annotations' => { 'checksum' => 'SHA' },
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['metadata']).to eq(expected_metadata)
end
end
describe 'DELETE /v3/packages/:guid' do
let(:app_name) { 'sir meow' }
let(:app_guid) { 'meow-the-guid' }
let(:space) { VCAP::CloudController::Space.make }
let(:app_model) { VCAP::CloudController::AppModel.make(space_guid: space.guid, name: app_name, guid: app_guid) }
let!(:package_model) do
VCAP::CloudController::PackageModel.make(app_guid: app_model.guid)
end
let(:guid) { package_model.guid }
before do
space.organization.add_user user
space.add_developer user
end
it 'deletes a package asynchronously' do
delete "/v3/packages/#{guid}", {}, user_header
expect(last_response.status).to eq(202)
expect(last_response.body).to eq('')
expect(last_response.header['Location']).to match(%r(jobs/[a-fA-F0-9-]+))
execute_all_jobs(expected_successes: 2, expected_failures: 0)
get "/v3/packages/#{guid}", {}, user_header
expect(last_response.status).to eq(404)
expected_metadata = { package_guid: guid }.to_json
event = VCAP::CloudController::Event.last
expect(event.values).to include({
type: 'audit.app.package.delete',
actor: user.guid,
actor_type: 'user',
actor_name: email,
actor_username: user_name,
actee: app_guid,
actee_type: 'app',
actee_name: app_name,
metadata: expected_metadata,
space_guid: space.guid,
organization_guid: space.organization.guid
})
end
context 'deleting metadata' do
it_behaves_like 'resource with metadata' do
let(:resource) { package_model }
let(:api_call) do
-> { delete "/v3/packages/#{resource.guid}", nil, user_header }
end
end
end
end
describe 'PATCH /internal/v4/packages/:guid' do
let!(:package_model) { VCAP::CloudController::PackageModel.make(state: VCAP::CloudController::PackageModel::PENDING_STATE) }
let(:body) do
{
'state' => 'READY',
'checksums' => [
{
'type' => 'sha1',
'value' => 'potato'
},
{
'type' => 'sha256',
'value' => 'potatoest'
}
]
}.to_json
end
let(:guid) { package_model.guid }
it 'updates a package' do
patch "/internal/v4/packages/#{guid}", body
expect(last_response.status).to eq(204)
expect(last_response.body).to eq('')
end
end
end
| 40.837463 | 158 | 0.576556 |
184a7c571e71983e8a0383f5579cf781f824a9c1 | 1,425 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-amplifybackend/types'
require_relative 'aws-sdk-amplifybackend/client_api'
require_relative 'aws-sdk-amplifybackend/client'
require_relative 'aws-sdk-amplifybackend/errors'
require_relative 'aws-sdk-amplifybackend/resource'
require_relative 'aws-sdk-amplifybackend/customizations'
# This module provides support for AmplifyBackend. This module is available in the
# `aws-sdk-amplifybackend` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# amplify_backend = Aws::AmplifyBackend::Client.new
# resp = amplify_backend.clone_backend(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from AmplifyBackend are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::AmplifyBackend::Errors::ServiceError
# # rescues all AmplifyBackend API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::AmplifyBackend
GEM_VERSION = '1.8.0'
end
| 26.388889 | 82 | 0.756491 |
4ad6f38e2b3b0bc7e7377a20c0e87eedd74b9d19 | 238 | execute 'apt-get make' do
command 'apt-get install make'
ignore_failure true
action :nothing
end.run_action(:run)
execute 'apt-get-update' do
command 'apt-get update'
ignore_failure true
action :nothing
end.run_action(:run)
| 18.307692 | 32 | 0.743697 |
e889a87172155f7bfa1dba4090c2e6b164fee71d | 355 | # frozen_string_literal: true
# Generated via
# `rails generate hyrax:work_resource CollectionResource`
require 'rails_helper'
require 'valkyrie/specs/shared_specs'
RSpec.describe CollectionResourceForm do
let(:change_set) { described_class.new(resource) }
let(:resource) { CollectionResource.new }
it_behaves_like 'a Valkyrie::ChangeSet'
end
| 25.357143 | 58 | 0.791549 |
bb944812f217dac8004ab206e52192f67769f8ac | 9,090 | require "association_cache"
require "fileutils"
class Course < ApplicationRecord
trim_field :name, :semester, :display_name
validates_uniqueness_of :name
validates_presence_of :display_name, :start_date, :end_date
validates_presence_of :late_slack, :grace_days, :late_penalty, :version_penalty
validates_numericality_of :grace_days, greater_than_or_equal_to: 0
validates_numericality_of :version_threshold, only_integer: true, greater_than_or_equal_to: -1
validate :order_of_dates
validates_format_of :name, with: /\A(\w|-)+\z/, on: :create
# validates course website format if there exists one
validate :valid_website?
has_many :course_user_data, dependent: :destroy
has_many :assessments, dependent: :destroy
has_many :scheduler, dependent: :destroy
has_many :announcements, dependent: :destroy
has_many :attachments, dependent: :destroy
belongs_to :late_penalty, class_name: "Penalty"
belongs_to :version_penalty, class_name: "Penalty"
has_many :assessment_user_data, through: :assessments
has_many :submissions, through: :assessments
has_many :watchlist_instances, dependent: :destroy
has_many :risk_conditions, dependent: :destroy
accepts_nested_attributes_for :late_penalty, :version_penalty
before_save :cgdub_dependencies_updated, if: :grace_days_or_late_slack_changed?
after_save :update_course_gdu_watchlist_instances, if: :saved_change_to_grace_days_or_late_slack?
after_save :update_course_grade_watchlist_instances, if: :saved_change_to_grade_related_fields?
before_create :cgdub_dependencies_updated
after_create :init_course_folder
# Create a course with name, semester, and instructor email
# all other fields are filled in automatically
def self.quick_create(unique_name, semester, instructor_email)
newCourse = Course.new(name: unique_name, semester: semester)
newCourse.display_name = newCourse.name
# fill temporary values in other fields
newCourse.late_slack = 0
newCourse.grace_days = 0
newCourse.start_date = Time.now
newCourse.end_date = Time.now
newCourse.late_penalty = Penalty.new
newCourse.late_penalty.kind = "points"
newCourse.late_penalty.value = "0"
newCourse.version_penalty = Penalty.new
newCourse.version_penalty.kind = "points"
newCourse.version_penalty.value = "0"
if not newCourse.save
raise "Failed to create course #{newCourse.name}: #{newCourse.errors.full_messages.join(", ")}"
end
# Check instructor
instructor = User.where(email: instructor_email).first
# create a new user as instructor if didn't exist
if instructor.nil?
begin
instructor = User.instructor_create(instructor_email,
newCourse.name)
rescue Exception => e
# roll back course creation
newCourse.destroy
raise "Failed to create instructor for course: #{e}"
end
end
# Create CUD
newCUD = newCourse.course_user_data.new
newCUD.user = instructor
newCUD.instructor = true
if not newCUD.save
# roll back course creation
newCourse.destroy
raise "Failed to create CUD for instructor of new course #{newCourse.name}"
end
# Load course config
if not newCourse.reload_course_config
# roll back course and CUD creation
newCUD.destroy
newCourse.destroy
raise "Failed to load course config for new course #{newCourse.name}"
end
return newCourse
end
# generate course folder
def init_course_folder
course_dir = Rails.root.join("courses", name)
FileUtils.mkdir_p course_dir
FileUtils.touch File.join(course_dir, "autolab.log")
course_rb = File.join(course_dir, "course.rb")
default_course_rb = Rails.root.join("lib", "__defaultCourse.rb")
FileUtils.cp default_course_rb, course_rb
FileUtils.mkdir_p Rails.root.join("assessmentConfig")
FileUtils.mkdir_p Rails.root.join("courseConfig")
FileUtils.mkdir_p Rails.root.join("gradebooks")
end
def order_of_dates
errors.add(:start_date, "must come before end date") if start_date > end_date
end
def valid_website?
if website.nil? || website.eql?("")
return true
else
if website[0..7].eql?("https://")
return true
else
errors.add("website", "needs to start with https://")
return false
end
end
end
def temporal_status(now = DateTime.now)
if now < start_date
:upcoming
elsif now > end_date
:completed
else
:current
end
end
def current_assessments(now = DateTime.now)
assessments.where("start_at < :now AND end_at > :now", now: now)
end
def full_name
if semester.to_s.size > 0
display_name + " (" + semester + ")"
else
display_name
end
end
def reload_config_file
course = name.gsub(/[^A-Za-z0-9]/, "")
src = Rails.root.join("courses", name, "course.rb")
dest = Rails.root.join("courseConfig/", "#{course}.rb")
s = File.open(src, "r")
lines = s.readlines
s.close
d = File.open(dest, "w")
d.write("require 'CourseBase.rb'\n\n")
d.write("module Course" + course.camelize + "\n")
d.write("\tinclude CourseBase\n\n")
for line in lines do
if line.length > 0
d.write("\t" + line)
else
d.write(line)
end
end
d.write("end")
d.close
load(dest)
eval("Course#{course.camelize}")
end
# reload_course_config
# Reload the course config file and extend the loaded methods
# to AdminsController
def reload_course_config
mod = nil
begin
mod = reload_config_file
rescue Exception => @error
return false
end
AdminsController.extend(mod)
true
end
def sanitized_name
name.gsub(/[^A-Za-z0-9]/, "")
end
def invalidate_cgdubs
cgdub_dependencies_updated
save!
update_course_gdu_watchlist_instances
end
# Update the grace day usage condition watchlist instances for each course user datum
# This is called when:
# - Grace days or late slack have been changed and the record is saved
# - invalidate_cgdubs is somehow incurred
def update_course_gdu_watchlist_instances
WatchlistInstance.update_course_gdu_watchlist_instances(self)
end
# Update the grade related condition watchlist instances for each course user datum
# This is called when:
# - Fields related to grades are changed in the course setting
# - Assessment setting is changed and assessment has passed end_at
def update_course_grade_watchlist_instances
WatchlistInstance.update_course_grade_watchlist_instances(self)
end
def update_course_no_submissions_watchlist_instances(course_assistant=nil)
WatchlistInstance.update_course_no_submissions_watchlist_instances(self, course_assistant)
end
# NOTE: Needs to be updated as new items are cached
def invalidate_caches
# cgdubs
invalidate_cgdubs
# raw_scores
# NOTE: keep in sync with assessment#invalidate_raw_scores
assessments.update_all(updated_at: Time.now)
end
def config
@config ||= config!
end
# return all CUDs that are not course_assistants, instructors, or dropped
# TODO: should probably exclude adminstrators, but the fact that admins are in
# the User model instead of CourseUserDatum makes that difficult
def students
course_user_data.where(course_assistant: false, instructor: false, dropped: [false, nil])
end
# return all CUDs that are not course_assistants, instructors, or dropped
def instructors
course_user_data.where(instructor: true)
end
def assessment_categories
assessments.pluck("DISTINCT category_name").sort
end
def assessments_with_category(cat_name, isStudent = false)
if isStudent
assessments.where(category_name: cat_name).ordered.released
else
assessments.where(category_name: cat_name).ordered
end
end
def to_param
name
end
def asmts_before_date(date)
asmts = self.assessments.ordered
asmts_before_date = asmts.where("due_at < ?", date)
return asmts_before_date
end
private
def saved_change_to_grade_related_fields?
return (saved_change_to_late_slack? or saved_change_to_grace_days? or
saved_change_to_version_threshold? or saved_change_to_late_penalty_id? or
saved_change_to_version_penalty_id?)
end
def grace_days_or_late_slack_changed?
return (grace_days_changed? or late_slack_changed?)
end
def saved_change_to_grace_days_or_late_slack?
return (saved_change_to_grace_days? or saved_change_to_late_slack?)
end
def cgdub_dependencies_updated
self.cgdub_dependencies_updated_at = Time.now
end
def config!
source = "#{name}_course_config".to_sym
Utilities.execute_instructor_code(source) do
require config_file_path
Class.new.extend eval(config_module_name)
end
end
def config_file_path
Rails.root.join("courseConfig", "#{sanitized_name}.rb")
end
def config_module_name
"Course#{sanitized_name.camelize}"
end
include CourseAssociationCache
end
| 29.417476 | 101 | 0.725193 |
390be15e74ab00b61d6723d410a9da6b8c236e73 | 374 | # frozen_string_literal: true
require_relative '../../test_helper'
class TestDeprecateHeyArnold < Test::Unit::TestCase
def setup
@tester = Faker::HeyArnold
end
def test_character
assert @tester.character.match(/\w+/)
end
def test_location
assert @tester.location.match(/\w+/)
end
def test_quote
assert @tester.quote.match(/\w+/)
end
end
| 17 | 51 | 0.695187 |
d56928ba4d948a7b0bce1dd2d447cc32a2f34555 | 3,641 | #file: points_shower_gfx.rb
##
# Holds information about points shower
class PointsShowerGfx
attr_accessor :pos_x, :pos_y, :visible, :z_order, :image
def initialize(x=0, y=0, img=nil, zord=0, visb=true)
@pos_x = x # x position
@pos_y = y # y position
@image = img # image
@visible = visb # visible flag
@z_order = zord
@text_col = nil
@font = nil
@team_1_name = ""
@team_2_name = ""
@tot_segni = 0
@team_1_segni = 0
@team_2_segni = 0
end
##
# Set names of two team
def set_name_teams(t1_name, t2_name, font, color)
@text_col = color
@font = font
@team_1_name = t1_name
@team_2_name = t2_name
end
##
# Set current game pints info
def set_segni_info(tot_segni, segn_team1, segn_team2)
@tot_segni = tot_segni
@team_1_segni = segn_team1
@team_2_segni = segn_team2
end
##
# Draw points
def draw_points(dc)
# draw background
#control_width = @image.width
#dc.drawImage(@image, @pos_x, @pos_y)
# draw names
return unless @visible
dc.font = @font
dc.foreground = @text_col
width_text_1 = @font.getTextWidth(@team_1_name)
height_text_1 = @font.getTextHeight(@team_1_name)
width_text_2 = @font.getTextWidth(@team_2_name)
height_text_2 = @font.getTextHeight(@team_2_name)
control_width = width_text_1 + width_text_2 + 40
control_height = 150
# total segni
# horizontal under names
y1 = @pos_y + height_text_1 + 10
y0 = y1
x0 = @pos_x + 20
x1 = x0 + control_width
#dc.drawLine(x0,y0,x1,y1)
# middle vertical
xv0 = x0 + (x1 - x0)/2
xv1 = xv0
yv0 = y1
yv1 = @pos_y + control_height - 2
if @tot_segni == 2
yv1 = yv0 + 45
end
dc.drawLine(xv0,yv0,xv1,yv1)
#team 1 text
#xpos_text = @pos_x + ( (control_width / 2 - 20) - width_text) / 2
xpos_text = xv0 - 10 - width_text_1
ypos_text = @pos_y + height_text_1
dc.drawText(xpos_text, ypos_text, @team_1_name)
#team 2 text
#xpos_text = control_width / 2 + @pos_x + ( (control_width / 2 ) - width_text) / 2
xpos_text = xv0 + 10
ypos_text = @pos_y + height_text_1
dc.drawText(xpos_text, ypos_text, @team_2_name)
# empty points raggi
#y_space_av = yv1 - yv0
#off_y = y_space_av / @tot_segni
off_y = 18
points_coord = [] # store coordinate for circle
(0...@tot_segni).each do |ix|
xs0 = x0 + 15
xs1 = x1 - 15
ys0 = off_y * ix + yv0 + 8
ys1 = ys0
points_coord << {:team1 => [xs0, ys0], :team2 => [xs1, ys1]}
dc.drawLine(xs0,ys0,xs1,ys1)
end
# draw segni as circle at the end of raggi
count_coord = 1
w_circle = 13
points_coord.each do |coord_pt|
if @team_1_segni >= count_coord
# enable segno
pt = coord_pt[:team1]
fill_circle(dc, pt[0] - w_circle/2, pt[1] - w_circle/2, w_circle, w_circle)
end
if @team_2_segni >= count_coord
# enable segno
pt = coord_pt[:team2]
fill_circle(dc, pt[0] - w_circle/2, pt[1] - w_circle/2, w_circle, w_circle)
end
count_coord += 1
end
end#end draw_points
##
# Need to implement a function that draw a filled circle. fillEllipse don't exist
def fill_circle(dc,x,y,w,h)
dc.fillArc(x, y, w, w, 0, 64*90)
dc.fillArc(x, y, w, w, 64*90, 64*180)
dc.fillArc(x, y, w, w, 64*180, 64*270)
dc.fillArc(x, y, w, w, 64*270, 64*360)
end
end
| 28.007692 | 87 | 0.583082 |
bf693c9e39fdee9e87d8fff97339a01db0ad3a63 | 131 | require 'test_helper'
class CompanyStateFieldTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 16.375 | 53 | 0.725191 |
1c54be7021db790895c9ae2f99b13aeace4c558e | 1,233 | require 'fog/volume/telefonica/requests/create_snapshot'
module Fog
module Volume
class TeleFonica
class V2
class Real
include Fog::Volume::TeleFonica::Real
def create_snapshot(volume_id, name, description, force = false)
data = {
'snapshot' => {
'volume_id' => volume_id,
'name' => name,
'description' => description,
'force' => force.nil? ? false : force
}
}
_create_snapshot(data)
end
end
class Mock
def create_snapshot(volume_id, name, description, _force = false)
response = Excon::Response.new
response.status = 202
response.body = {
"snapshot" => {
"status" => "creating",
"name" => name,
"created_at" => Time.now,
"description" => description,
"volume_id" => volume_id,
"id" => "5",
"size" => 1
}
}
response
end
end
end
end
end
end
| 26.804348 | 75 | 0.428224 |
6271550d9d98ded78c3df2a04f17f8720851ad31 | 38 | # Minitest
require 'minitest/autorun'
| 12.666667 | 26 | 0.789474 |
9100420b43755cff1532e036989ea2b82065f7fb | 2,002 | module Alf
module Sql
class Processor
class SemiJoin < Processor
include JoinSupport
def initialize(right, negate = false, builder)
super(builder)
@right = right
@negate = negate
end
attr_reader :right, :negate
def call(sexpr)
if sexpr.set_operator?
call(builder.from_self(sexpr))
elsif right.set_operator?
SemiJoin.new(builder.from_self(right), negate, builder).call(sexpr)
else
super(sexpr)
end
end
private
def apply_join_strategy(left, right)
predicate = build_semijoin_predicate(left, right)
expand_where_clause(left, negate ? !predicate : predicate)
end
def build_semijoin_predicate(left, right)
if right.is_table_dee?
right.where_clause.predicate
else
commons = left.to_attr_list & right.to_attr_list
subquery = Clip.new(commons, :star, builder).call(right)
if commons.size == 0
builder.exists(subquery)
elsif commons.size == 1
identifier = left.desaliaser[commons.to_a.first]
Predicate::Factory.in(identifier, subquery)
else
join_pre = join_predicate(left, subquery, commons)
subquery = expand_where_clause(subquery, join_pre)
builder.exists(subquery)
end
end
end
def expand_where_clause(sexpr, predicate)
Grammar.sexpr \
[ :select_exp,
sexpr.set_quantifier,
sexpr.select_list,
sexpr.from_clause,
[ :where_clause, (sexpr.predicate || tautology) & predicate ],
sexpr.order_by_clause,
sexpr.limit_clause,
sexpr.offset_clause ].compact
end
end # class SemiJoin
end # class Processor
end # module Sql
end # module Alf
| 30.333333 | 79 | 0.567932 |
bb20e92b36647a00409e98f39247875c11eea9c8 | 2,026 | class Povray < Formula
desc "Persistence Of Vision RAYtracer (POVRAY)"
homepage "https://www.povray.org/"
url "https://github.com/POV-Ray/povray/archive/v3.7.0.8.tar.gz"
sha256 "53d11ebd2972fc452af168a00eb83aefb61387662c10784e81b63e44aa575de4"
revision 1
bottle do
rebuild 1
sha256 "a882f103b0ad016cbafa13cc1fd028046b337590feff3e6188bb574f1e328488" => :catalina
sha256 "eae4cf975215cdfdeadb665c53061c6ed2b4f9fa95121e7145222409b0e44c56" => :mojave
sha256 "4472bb00380eb26d3045dd5e67effa4f75934936263129009f9a80bbf5290633" => :high_sierra
sha256 "f21cb29c30c8367aa14f6a4485bf03377f23e30b2e7178be466d12bb84be26a9" => :sierra
sha256 "f2f0bf20fbe2d5b1ce91ecdf4eca52e4a544323910febae396d8b9fb1c0044ec" => :el_capitan
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "boost"
depends_on "jpeg"
depends_on "libpng"
depends_on "libtiff"
depends_on "openexr"
def install
ENV.cxx11
args = %W[
COMPILED_BY=homebrew
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
--mandir=#{man}
--with-boost=#{Formula["boost"].opt_prefix}
--with-openexr=#{Formula["openexr"].opt_prefix}
--without-libsdl
--without-x
]
# Adjust some scripts to search for `etc` in HOMEBREW_PREFIX.
%w[allanim allscene portfolio].each do |script|
inreplace "unix/scripts/#{script}.sh",
/^DEFAULT_DIR=.*$/, "DEFAULT_DIR=#{HOMEBREW_PREFIX}"
end
cd "unix" do
system "./prebuild.sh"
end
system "./configure", *args
system "make", "install"
end
test do
# Condensed version of `share/povray-3.7/scripts/allscene.sh` that only
# renders variants of the famous Utah teapot as a quick smoke test.
scenes = Dir["#{share}/povray-3.7/scenes/advanced/teapot/*.pov"]
assert !scenes.empty?, "Failed to find test scenes."
scenes.each do |scene|
system "#{share}/povray-3.7/scripts/render_scene.sh", ".", scene
end
end
end
| 31.169231 | 93 | 0.699408 |
d5c2950ecdacaf562c4b2db53bf0fe359ff8d016 | 977 | # frozen_string_literal: true
require 'sprockets'
##
# Wrapper class for our applications's Sprockets environment.
class ApplicationAssets < Sprockets::Environment
# request path prepended to each asset -- this is where the Rack app is
# _mounted_ OR a directory within /public containing compiled assets
ASSET_ROOT = '/assets/'
##
# Configure Sprockets' search paths here
def initialize
super(File.join(__dir__, '..'))
%w[config images javascripts stylesheets].each do |dir|
append_path "assets/#{dir}"
end
append_path 'node_modules'
end
##
# Returns an HTTP path that will resolve to the requested asset. The optional
# digest: parameter will cause the return result to include the hexidigest of
# the asset so that it will resolve to a compiled, digested asset in
# production.
def asset_path(file, digest: false)
method = digest ? :digest_path : :logical_path
ASSET_ROOT + find_asset(file).send(method)
end
end
| 30.53125 | 79 | 0.727738 |
01a159275eba0150c9562eb714b266c15ad0cb2d | 314 | require 'rails_helper'
RSpec.describe Favorite, type: :model do
let(:topic) { create(:topic) }
let(:user) { create(:user) }
let(:post) { create(:post) }
let(:favorite) { Favorite.create!(post: post, user: user) }
it { is_expected.to belong_to(:post) }
it { is_expected.to belong_to(:user) }
end
| 26.166667 | 62 | 0.649682 |
5dbf3ffc3b5fc200cc0dac3be25247cccf434750 | 4,124 | require 'capistrano/node-deploy'
require 'capistrano/uptodate'
set :application, 'gatewayd'
set :scm, 'git'
set :repository, 'https://github.com/ripple/gatewayd.git'
set :branch, ENV['branch'] ? ENV['branch'] : 'develop'
set :deploy_to, "/var/apps/#{application}"
set :user, 'ubuntu' # deploy user
set :node_user, "#{application}" # run user
set :node_binary, '/usr/bin/node'
set :node_env, ENV['NODE_ENV'] ? ENV['NODE_ENV'] : 'staging'
set :app_command, 'bin/gateway'
#set :app_environment, '' # environment variables
# used in staging:configure_postgres task
set :postgres_username, 'gatewayd_user'
set :postgres_password, 'password'
set :postgres_dbname, 'gatewayd_db'
set :upstart_file_contents, <<EOD
#!upstart
description "#{application} node app"
author "capistrano"
start on runlevel [2345]
stop on shutdown
respawn
respawn limit 99 5
kill timeout #{kill_timeout}
script
cd #{current_path} && exec sudo -u #{node_user} NODE_ENV=#{node_env} #{app_environment} #{node_binary} #{current_path}/#{app_command} start 2>> #{stderr_log_path} 1>> #{stdout_log_path}
end script
pre-stop script
cd #{current_path} && exec sudo -u #{node_user} NODE_ENV=#{node_env} #{app_environment} #{node_binary} #{current_path}/#{app_command} stop 2>> #{stderr_log_path} 1>> #{stdout_log_path}
end script
post-stop script
# not nice workaround for './bin/gateway stop' not working
sudo -u #{node_user} pkill pm2
end script
EOD
namespace :deploy do
desc "Copy settings file to release dir"
task :copy_config_to_release_path do
run "if [ -f #{shared_path}/config/config.json ]; then cp -a #{shared_path}/config/config.json #{release_path}/config/; fi"
end
end
namespace :setup do
desc 'Create service user'
task :create_user do
run "sudo -u root useradd -U -m -r -s /dev/null #{application}"
end
desc 'Install dependencies (apt repositories and packages)'
task :install_dependencies do
run 'sudo apt-get -y update'
run 'sudo apt-get -y install git python-software-properties python g++ make libpq-dev'
run 'sudo add-apt-repository -y ppa:chris-lea/node.js'
run 'sudo apt-get -y update'
run 'sudo apt-get -y install nodejs postgresql postgresql-client'
# nodejs package installs binary as /usr/bin/nodejs (?!) so we symlink it..
run 'sudo ln -s /usr/bin/nodejs /usr/bin/node || true' # but silently fail if it's already there
end
desc 'Upload config file from env/deploy/config.json'
task :upload_config do
run_locally 'cp -n env/deploy/config.json.example env/deploy/config.json || true'
run "mkdir -p #{shared_path}/config"
upload 'env/deploy/config.json', "#{shared_path}/config/config.json"
end
desc 'Create deploy path'
task :create_deploy_path do
run "sudo mkdir -p #{deploy_to}"
run "sudo chown #{user}:#{user} #{deploy_to}"
end
end
namespace :staging do
desc 'Setup a staging server from scratch'
task :cold_setup do
setup.create_user
setup.create_deploy_path
setup.install_dependencies
setup.upload_config
configure_postgres
node.install_global_packages
end
desc 'Configure postgresql in a manner suitable for staging/testing'
task :configure_postgres do
run "sudo -u postgres psql -U postgres -c \"create user #{postgres_username} with password \'#{postgres_password}\'\""
run "sudo -u postgres psql -U postgres -c \"create database #{postgres_dbname} with owner #{postgres_username} encoding=\'utf8\'\""
end
end
namespace :node do
desc 'Run migrations'
task :migrate do
# grunt migrations plugin seems to be ignoring the path to migrations dir, so symlink it to default location
run "ln -s #{release_path}/lib/data/migrations #{release_path}/migrations"
run "cd #{release_path} && grunt migrate:up --env=#{node_env}"
end
desc 'Install globally required NPM packages'
task :install_global_packages do
run 'sudo npm install --global pg pm2 grunt grunt-cli forever db-migrate'
run "sudo chown -R #{user}:#{user} ~#{user}/tmp ~#{user}/.npm" # fix permissions
end
end
after 'node:install_packages', 'deploy:copy_config_to_release_path'
| 34.366667 | 189 | 0.719932 |
d5635132e30e40955500c612a25ad521f7f09212 | 1,463 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_mailbox/engine"
require "action_text/engine"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module NationalParkTrackerBackend
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
config.middleware.use ActionDispatch::Cookies
config.middleware.use ActionDispatch::Session::CookieStore, key: '_cookie_name'
end
end
| 35.682927 | 83 | 0.779904 |
62cc2e61aa12f892466c18af1b29af0bee7aabf1 | 6,888 | describe '/v1/certificates' do
let(:request_headers) do
{
'HTTP_AUTHORIZATION' => "Bearer #{valid_token.token_plain}"
}
end
let(:grid) do
Grid.create!(name: 'terminal-a')
end
let(:david) do
user = User.create!(email: '[email protected]', external_id: '123456')
grid.users << user
user
end
let(:valid_token) do
AccessToken.create!(user: david, scopes: ['user'])
end
let(:ca_pem) { '-----BEGIN CERTIFICATE-----
MIIBYzCCAQ2gAwIBAgIJAIpNg6jylBQkMA0GCSqGSIb3DQEBCwUAMA0xCzAJBgNV
BAMMAkNBMB4XDTE3MTAzMTE3MDEyN1oXDTE4MTAzMTE3MDEyN1owDTELMAkGA1UE
AwwCQ0EwXDANBgkqhkiG9w0BAQEFAANLADBIAkEAz/Ee36KUY7l0tRFREO/XOSoO
Xqyv48Jcvz0TnV7d+n3yapzCZfvDtX0qMpdZqd4Gr7v2Zgr64PJJNELfSE/vMQID
AQABo1AwTjAdBgNVHQ4EFgQUcLvPScr8TZMmeiGGtFQecMBrt+IwHwYDVR0jBBgw
FoAUcLvPScr8TZMmeiGGtFQecMBrt+IwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0B
AQsFAANBAGjroEv8WBLeIbGbSDM6RMVHQjt8V5Pwd/RPI7pusWGsaJbOVXCwQSsd
wpUzwKt2lbtAZFmLIIJ53Pv0PZsgC6Q=
-----END CERTIFICATE-----
' }
let(:cert_pem) {
'-----BEGIN CERTIFICATE-----
MIIBBTCBsAIBAjANBgkqhkiG9w0BAQsFADANMQswCQYDVQQDDAJDQTAeFw0xNzEw
MzExNzA2MzJaFw0xNzExMzAxNzA2MzJaMA8xDTALBgNVBAMMBHRlc3QwXDANBgkq
hkiG9w0BAQEFAANLADBIAkEA+jOxPABMu7Kp4lfrlGXWm+kQWIj4FSi5pczK/ReE
w1Gxhq49CEt3bIC3Li8slJU4H1WZbhbz8VWyEUsmaYEfiQIDAQABMA0GCSqGSIb3
DQEBCwUAA0EAIHbczx/kmb/ji/5kDtAUldbicApY9vl75JbPxnAfU5yqyZjhsFiF
uH6nBTUEAXS4Ic89vJ+J9e14hXh7YLzq1w==
-----END CERTIFICATE-----
' }
let(:key_pem) { '-----BEGIN PRIVATE KEY-----
MIIBVgIBADANBgkqhkiG9w0BAQEFAASCAUAwggE8AgEAAkEA+jOxPABMu7Kp4lfr
lGXWm+kQWIj4FSi5pczK/ReEw1Gxhq49CEt3bIC3Li8slJU4H1WZbhbz8VWyEUsm
aYEfiQIDAQABAkBb0uTU1HdU23klrIa067sbdSmelIYXnd6kTsigoiUDWRo9mccV
kPx4bL+L9bL2BX64+Sqjch2+EUYYqQSQLMzRAiEA/fpz9nR5feWi75URhS1oHi/0
vpYxvQlTyt6LNBG6LxsCIQD8MYs+tUhwCfuKHPSfqE9oizOwAcfTUp/PVgLGhWcC
KwIhAN3AQGGuHqmqx5GRwSNbmu3Ih1Okhbb8ntmhZz9GPx6DAiEAjPfApt+8Suw5
j30Z+/if0ock8Dg+k1A3BjVEveUprBsCIQCjel8oZuN/3zatvWMCgCQboYoQjw9M
U3GffGoMbo0kTw==
-----END PRIVATE KEY-----
' }
let!(:certificate) do
Certificate.create!(
grid: grid,
subject: 'kontena.io',
alt_names: [],
valid_until: Time.now + 90.days,
private_key: key_pem,
certificate: cert_pem,
chain: ca_pem,
)
end
describe 'POST /v1/certificates/register' do
it 'makes LE registration' do
expect(GridCertificates::Register).to receive(:run).and_return(double({:success? => true}))
data = {email: '[email protected]'}
post "/v1/certificates/#{grid.name}/register", data.to_json, request_headers
expect(response.status).to eq(201)
end
it 'fails to make LE registration' do
outcome = double(
:success? => false,
:errors => double(:message => 'kaboom')
)
expect(GridCertificates::Register).to receive(:run).and_return(outcome)
data = {email: '[email protected]'}
post "/v1/certificates/#{grid.name}/register", data.to_json, request_headers
expect(response.status).to eq(422)
expect(json_response['error']).to eq('kaboom')
end
end
describe 'POST /v1/grids/<grid>/certificates' do
it 'requests new certificate' do
outcome = double(
:success? => true,
:result => certificate
)
expect(GridCertificates::RequestCertificate).to receive(:run).and_return(outcome)
data = {
domains: ['kontena.io']
}
post "/v1/grids/#{grid.name}/certificates", data.to_json, request_headers
expect(response.status).to eq(201), response.body
expect(json_response['subject']).to eq('kontena.io')
end
it 'fails in requesting new certificate' do
outcome = double(
:success? => false,
:result => certificate,
:errors => double(:message => 'kaboom')
)
expect(GridCertificates::RequestCertificate).to receive(:run).and_return(outcome)
data = {
domains: ['kontena.io']
}
post "/v1/grids/#{grid.name}/certificates", data.to_json, request_headers
expect(response.status).to eq(422), response.body
expect(json_response['error']).to eq('kaboom')
end
end
describe 'GET /v1/grids/<grid>/certificates' do
it 'gets all certs' do
get "/v1/grids/#{grid.name}/certificates", nil, request_headers
expect(response.status).to eq(200)
expect(json_response['certificates'].size).to eq(1)
expect(json_response['certificates'][0]['subject']).to eq('kontena.io')
end
end
describe 'GET /v1/certificates/<grid>/<subject>' do
it 'gets a certificate' do
get "/v1/certificates/#{grid.name}/kontena.io", nil, request_headers
expect(response.status).to eq(200)
expect(json_response['subject']).to eq('kontena.io')
expect(json_response['id']).to eq("#{grid.name}/kontena.io")
end
it '404 for non-existing cert' do
get "/v1/certificates/#{grid.name}/foobar.io", nil, request_headers
expect(response.status).to eq(404)
end
end
describe 'GET /v1/certificates/<grid>/<subject>/export' do
it 'exports a certificate' do
get "/v1/certificates/#{grid.name}/kontena.io/export", nil, request_headers
expect(response.status).to eq(200)
expect(json_response['id']).to eq("#{grid.name}/kontena.io")
expect(json_response['subject']).to eq('kontena.io')
expect(json_response['certificate']).to eq(cert_pem)
expect(json_response['chain']).to eq(ca_pem)
expect(json_response['private_key']).to eq(key_pem)
end
end
describe 'PUT /v1/certificates/<grid>/<subject>' do
it 'imports certificate' do
data = {
certificate: cert_pem,
chain: [ca_pem],
private_key: key_pem,
}
put "/v1/certificates/#{grid.name}/test", data.to_json, request_headers
expect(response.status).to eq(201), response.body
expect(json_response['subject']).to eq('test')
end
end
describe 'DELETE /v1/certificates/<grid>/<subject>' do
it 'deletes certificate' do
expect {
delete "/v1/certificates/#{grid.name}/kontena.io", nil, request_headers
expect(response.status).to eq(200)
}.to change{Certificate.count}.by (-1)
end
it 'fails deleting certificate as it\'s in use' do
GridService.create!(grid: grid, name: 'redis', image_name: 'redis', certificates: [GridServiceCertificate.new(subject: 'kontena.io', name: 'SSL_CERT')])
expect {
delete "/v1/certificates/#{grid.name}/kontena.io", nil, request_headers
expect(response.status).to eq(422)
expect(json_response['error']['certificate']).to match(/Certificate still in use/)
}.not_to change{Certificate.count}
end
it 'return 404 for missing cert' do
delete "/v1/certificates/#{grid.name}/foobar.io", nil, request_headers
expect(response.status).to eq(404)
expect(json_response['error']).to eq('Not found')
end
end
end
| 34.268657 | 158 | 0.695267 |
283108c1404b188ff0936c5fb26e983cbb2e9a79 | 2,073 | # frozen_string_literal: true
module Factories
# rubocop:disable Metrics/MethodLength All-in-one-place is more relevant than short methods here
class << self
def project(name,
score: 25,
downloads: 5000,
first_release: 1.year.ago,
description: nil)
rubygem = self.rubygem name, downloads: downloads, first_release: first_release
github_repo = GithubRepo.create!(
path: "#{name}/#{name}",
stargazers_count: downloads,
forks_count: downloads,
watchers_count: downloads
)
Project.create! permalink: name,
score: score,
rubygem: rubygem,
github_repo: github_repo,
description: description
end
def rubygem(name, downloads: 5000, first_release: 1.year.ago, latest_release: 3.months.ago)
Rubygem.create!(
name: name,
current_version: "1.0",
downloads: downloads,
first_release_on: first_release,
latest_release_on: latest_release
)
end
def rubygem_download_stat(name, date:, total_downloads: 5000)
Rubygem::DownloadStat.create! rubygem_name: name,
date: date,
total_downloads: total_downloads
end
def rubygem_trend(name, date:, position:, with_stats: false)
date = Date.parse(date.to_s)
if with_stats
rubygem_download_stat name, date: date - 8.weeks, total_downloads: 500
rubygem_download_stat name, date: date - 4.weeks, total_downloads: 2000
end
Rubygem::Trend.create! rubygem_name: name,
position: position,
date: date,
rubygem_download_stat: rubygem_download_stat(name, date: date, total_downloads: 15_000)
end
end
# rubocop:enable Metrics/MethodLength
end
| 36.368421 | 116 | 0.561505 |
0146b250f97d49d8ac13e3fb1f4e87abb56b25c1 | 44 | module Odin
VERSION = "0.1.0.alpha.1"
end
| 11 | 27 | 0.659091 |
e8585aefef1f49a8a89a70e0d91219a63e41d452 | 4,767 | # -*- encoding: utf-8 -*-
#
shared_examples_for 'cnab400' do
let(:pagamento) do
Brcobranca::Remessa::Pagamento.new(valor: 199.9,
data_vencimento: Date.current,
nosso_numero: 123,
documento_sacado: '12345678901',
nome_sacado: 'PABLO DIEGO JOSÉ FRANCISCO DE PAULA JUAN NEPOMUCENO MARÍA DE LOS REMEDIOS CIPRIANO DE LA SANTÍSSIMA TRINIDAD RUIZ Y PICASSO',
endereco_sacado: 'RUA RIO GRANDE DO SUL São paulo Minas caçapa da silva junior',
bairro_sacado: 'São josé dos quatro apostolos magros',
cep_sacado: '12345678',
cidade_sacado: 'Santa rita de cássia maria da silva',
nome_avalista: 'ISABEL CRISTINA LEOPOLDINA ALGUSTA MIGUELA GABRIELA RAFAELA GONZAGA DE BRAGANÇA E BOURBON',
uf_sacado: 'SP')
end
let(:params) do
if subject.class == Brcobranca::Remessa::Cnab400::Bradesco
{ carteira: '01',
agencia: '12345',
conta_corrente: '1234567',
digito_conta: '1',
empresa_mae: 'SOCIEDADE BRASILEIRA DE ZOOLOGIA LTDA',
sequencial_remessa: '1',
codigo_empresa: '123',
pagamentos: [pagamento] }
elsif subject.class == Brcobranca::Remessa::Cnab400::Citibank
{
portfolio: '17777751042700080112',
carteira: '1',
empresa_mae: 'SOCIEDADE BRASILEIRA DE ZOOLOGIA LTDA',
documento_cedente: '12345678910',
pagamentos: [pagamento]
}
elsif subject.class == Brcobranca::Remessa::Cnab400::Santander
{
codigo_transmissao: '17777751042700080112',
empresa_mae: 'SOCIEDADE BRASILEIRA DE ZOOLOGIA LTDA',
documento_cedente: '12345678910',
agencia: '8888',
conta_corrente: '000002997',
digito_conta: '8',
pagamentos: [pagamento]
}
elsif subject.class == Brcobranca::Remessa::Cnab400::Sicoob
{ carteira: '01',
agencia: '1234',
conta_corrente: '12345678',
digito_conta: '1',
empresa_mae: 'SOCIEDADE BRASILEIRA DE ZOOLOGIA LTDA',
documento_cedente: '12345678910',
convenio: '123456789',
pagamentos: [pagamento] }
elsif subject.class == Brcobranca::Remessa::Cnab400::BancoBrasil
{ carteira: '12',
agencia: '1234',
variacao_carteira: '123',
convenio: '1234567',
convenio_lider: '7654321',
conta_corrente: '1234',
empresa_mae: 'SOCIEDADE BRASILEIRA DE ZOOLOGIA LTDA',
documento_cedente: '12345678910',
sequencial_remessa: '1',
pagamentos: [pagamento] }
else
{ carteira: '123',
agencia: '4327',
conta_corrente: '12345',
digito_conta: '1',
empresa_mae: 'SOCIEDADE BRASILEIRA DE ZOOLOGIA LTDA',
documento_cedente: '12345678910',
pagamentos: [pagamento] }
end
end
let(:objeto) { subject.class.new(params) }
it 'header deve ter 400 posicoes' do
expect(objeto.monta_header.size).to eq 400
end
it 'detalhe deve falhar se pagamento nao for valido' do
expect { objeto.monta_detalhe(Brcobranca::Remessa::Pagamento.new, 1) }.to raise_error(Brcobranca::RemessaInvalida)
end
it 'detalhe deve ter 400 posicoes' do
expect(objeto.monta_detalhe(pagamento, 1).size).to eq 400
end
context 'trailer' do
it 'trailer deve ter 400 posicoes' do
expect(objeto.monta_trailer(1).size).to eq 400
end
it 'informacoes devem estar posicionadas corretamente no trailer' do
trailer = objeto.monta_trailer 3
expect(trailer[0]).to eq '9' # identificacao registro
expect(trailer[394..399]).to eq '000003' # numero sequencial do registro
end
end
it 'montagem da remessa deve falhar se o objeto nao for valido' do
expect { subject.class.new.gera_arquivo }.to raise_error(Brcobranca::RemessaInvalida)
end
it 'remessa deve conter os registros mais as quebras de linha' do
remessa = objeto.gera_arquivo
expect(remessa.size).to eq 1206
# registros
expect(remessa[0..399]).to eq objeto.monta_header
expect(remessa[402..801]).to eq objeto.monta_detalhe(pagamento, 2).upcase
expect(remessa[804..1203]).to eq objeto.monta_trailer(3)
# quebras de linha
expect(remessa[400..401]).to eq "\r\n"
expect(remessa[802..803]).to eq "\r\n"
end
it 'deve ser possivel adicionar mais de um pagamento' do
objeto.pagamentos << pagamento
remessa = objeto.gera_arquivo
expect(remessa.size).to eq 1608
end
end
| 38.136 | 178 | 0.617369 |
3371ee1672a106b0655be857e6b9247e65daa6d3 | 490 | module Fog
module DNS
class Rackspace
class Real
def list_subdomains(domain_id, options={})
validate_path_fragment :domain_id, domain_id
path = "domains/#{domain_id}/subdomains"
if !options.empty?
path = path + '?' + array_to_query_string(options)
end
request(
:expects => 200,
:method => 'GET',
:path => path
)
end
end
end
end
end
| 20.416667 | 62 | 0.504082 |
d5bbc906b56b7a40ff285886ae2fa95a8954d550 | 3,393 | require 'spec_helper'
describe FacilityAccount do
context "valid account number" do
before(:each) do
@user = FactoryGirl.create(:user)
@facility = FactoryGirl.create(:facility)
assert @facility.valid?
@options = Hash[:is_active => 1, :created_by => @user.id, :facility_id => @facility.id, :revenue_account => 51234]
@starts_at = Time.zone.now-3.days
@expires_at = Time.zone.now+3.days
end
it "should create using factory" do
attrs=FactoryGirl.attributes_for(:facility_account)
define_open_account(attrs[:revenue_account], attrs[:account_number])
@facility_account = @facility.facility_accounts.create(attrs)
assert @facility_account.valid?
end
context "revenue_account" do
it "should not allow account < 5 digits" do
@options[:revenue_account] = "9999"
@account = FacilityAccount.create(@options)
assert @account.invalid?
assert @account.errors[:revenue_account]
end
it "should not allow account > 5 digits" do
@options[:revenue_account] = "111111"
@account = FacilityAccount.create(@options)
assert @account.invalid?
assert @account.errors[:revenue_account]
end
end
it "should allow format fund3-dept7-project8" do
@options[:account_number] = '123-1234567-12345678'
define_open_account(@options[:revenue_account], @options[:account_number])
@account = FacilityAccount.create(@options)
assert @account.valid?
end
it "should allow format fund3-dept7-project8" do
@options[:account_number] = '123-1234567-12345678'
define_open_account(@options[:revenue_account], @options[:account_number])
@account = FacilityAccount.create(@options)
assert @account.valid?
end
it "should allow format fund3-dept7-project8-activity2" do
@options[:account_number] = '123-1234567-12345678-12'
define_open_account(@options[:revenue_account], @options[:account_number])
@account = FacilityAccount.create(@options)
assert @account.valid?
end
it "should allow format fund3-dept7-project8-activity2-program4" do
# create chart string without program value
@options[:account_number] = '123-1234567-12345678-12-1234'
define_open_account(@options[:revenue_account], @options[:account_number])
@account = FacilityAccount.create(@options)
assert @account.valid?
end
# we no longer validate facility accounts against BCS table
#it "should not allow account that has expired" do
# @bcs = BudgetedChartString.create(:fund => '123', :dept => '1234567', :starts_at => @starts_at, :expires_at => @starts_at)
# @options[:account_number] = '123-1234567'
# @account = FacilityAccount.create(@options)
# assert [email protected]?
# assert_equal "Account has expired", @account.errors[:base]
#end
# we no longer validate facility accounts against BCS table
#it "should not allow account that has not started" do
# @bcs = BudgetedChartString.create(:fund => '123', :dept => '1234567', :starts_at => @starts_at+1.year, :expires_at => @expires_at)
# @options[:account_number] = '123-1234567'
# @account = FacilityAccount.create(@options)
# assert [email protected]?
# assert_equal "Account is not active", @account.errors[:base]
#end
end
end
| 39.917647 | 137 | 0.681403 |
3864ec189a4dfd1ee07abb245acae8134aa0e994 | 23,730 | require 'spec_helper'
RSpec.describe Qa::Authorities::LinkedData::FindTerm do
describe '#find' do
let(:lod_oclc) { described_class.new(term_config(:OCLC_FAST)) }
let(:lod_loc) { described_class.new(term_config(:LOC)) }
context 'basic parameter testing' do
context 'with bad id' do
before do
stub_request(:get, 'http://id.worldcat.org/fast/FAKE_ID.rdf.xml')
.to_return(status: 404, body: '', headers: {})
end
it 'raises a TermNotFound exception' do
expect { lod_oclc.find('FAKE_ID') }.to raise_error Qa::TermNotFound, /.*\/FAKE_ID.rdf.xml\ Not Found - Term may not exist at LOD Authority./
end
end
end
context 'performance stats' do
before do
stub_request(:get, 'http://id.worldcat.org/fast/530369.rdf.xml')
.to_return(status: 200, body: webmock_fixture('lod_oclc_term_found.rdf.xml'), headers: { 'Content-Type' => 'application/rdf+xml' })
end
context 'when set to true' do
let :results do
lod_oclc.find('530369', request_header: { performance_data: true })
end
it 'includes performance in return hash' do
expect(results.keys).to match_array [:performance, :results]
expect(results[:performance].keys).to match_array [:fetch_time_s, :normalization_time_s, :fetched_bytes, :normalized_bytes,
:fetch_bytes_per_s, :normalization_bytes_per_s, :total_time_s]
expect(results[:performance][:total_time_s]).to eq results[:performance][:fetch_time_s] + results[:performance][:normalization_time_s]
end
end
context 'when set to false' do
let :results do
lod_oclc.find('530369', request_header: { performance_data: false })
end
it 'does NOT include performance in return hash' do
expect(results.keys).not_to include(:performance)
end
end
context 'when using default setting' do
let :results do
lod_oclc.find('530369')
end
it 'does NOT include performance in return hash' do
expect(results.keys).not_to include(:performance)
end
end
end
context 'response header' do
before do
stub_request(:get, 'http://id.worldcat.org/fast/530369.rdf.xml')
.to_return(status: 200, body: webmock_fixture('lod_oclc_term_found.rdf.xml'), headers: { 'Content-Type' => 'application/rdf+xml' })
end
context 'when set to true' do
let :results do
lod_oclc.find('530369', request_header: { response_header: true })
end
it 'includes response header in return hash' do
expect(results.keys).to match_array [:response_header, :results]
expect(results[:response_header].keys).to match_array [:predicate_count]
expect(results[:response_header][:predicate_count]).to eq 7
end
end
context 'when set to false' do
let :results do
lod_oclc.find('530369', request_header: { response_header: false })
end
it 'does NOT include response header in return hash' do
expect(results.keys).not_to include(:response_header)
end
end
context 'when using default setting' do
let :results do
lod_oclc.find('530369')
end
it 'does NOT include response header in return hash' do
expect(results.keys).not_to include(:response_header)
end
end
end
context 'in OCLC_FAST authority' do
context 'term found' do
let :results do
stub_request(:get, 'http://id.worldcat.org/fast/530369.rdf.xml')
.to_return(status: 200, body: webmock_fixture('lod_oclc_term_found.rdf.xml'), headers: { 'Content-Type' => 'application/rdf+xml' })
lod_oclc.find('530369')
end
it 'has correct primary predicate values' do
expect(results[:uri]).to eq('http://id.worldcat.org/fast/530369')
expect(results[:id]).to eq('530369')
expect(results[:label]).to eq ['Cornell University']
expect(results[:altlabel]).to include('Ithaca (N.Y.). Cornell University', "Kornel\\xCA\\xB9skii universitet", "K\\xCA\\xBBang-nai-erh ta hs\\xC3\\xBCeh")
expect(results[:altlabel].size).to eq 3
expect(results[:sameas]).to include('http://id.loc.gov/authorities/names/n79021621', 'https://viaf.org/viaf/126293486')
end
it 'has correct number of predicates in pred-obj list' do
expect(results['predicates'].count).to eq 7
end
it 'has primary predicates in pred-obj list' do
expect(results['predicates']['http://purl.org/dc/terms/identifier']).to eq ['530369']
expect(results['predicates']['http://www.w3.org/2004/02/skos/core#prefLabel']).to eq ['Cornell University']
expect(results['predicates']['http://www.w3.org/2004/02/skos/core#altLabel'])
.to include('Ithaca (N.Y.). Cornell University', "Kornel\\xCA\\xB9skii universitet",
"K\\xCA\\xBBang-nai-erh ta hs\\xC3\\xBCeh")
expect(results['predicates']['http://schema.org/sameAs']).to include('http://id.loc.gov/authorities/names/n79021621', 'https://viaf.org/viaf/126293486')
end
it 'has unspecified predicate values' do
expect(results['predicates']['http://www.w3.org/1999/02/22-rdf-syntax-ns#type']).to eq ['http://schema.org/Organization']
expect(results['predicates']['http://www.w3.org/2004/02/skos/core#inScheme'])
.to include('http://id.worldcat.org/fast/ontology/1.0/#fast', 'http://id.worldcat.org/fast/ontology/1.0/#facet-Corporate')
expect(results['predicates']['http://schema.org/name'])
.to include('Cornell University', 'Ithaca (N.Y.). Cornell University', "Kornel\\xCA\\xB9skii universitet",
"K\\xCA\\xBBang-nai-erh ta hs\\xC3\\xBCeh")
end
context "ID in graph doesn't match ID in request URI" do
before do
stub_request(:get, 'http://id.worldcat.org/fast/530369.rdf.xml')
.to_return(status: 200, body: webmock_fixture('lod_oclc_term_bad_id.nt'), headers: { 'Content-Type' => 'application/ntriples' })
end
it 'raises DataNormalizationError' do
expect { lod_oclc.find('530369') }.to raise_error Qa::DataNormalizationError, "Unable to extract URI based on ID: 530369"
end
end
end
end
context 'in LOC authority' do
context 'term found' do
context 'when id requires special processing for <blank> in id' do
before do
stub_request(:get, 'http://id.loc.gov/authorities/subjects/sh85118553')
.to_return(status: 200, body: webmock_fixture('lod_loc_term_found.rdf.xml'), headers: { 'Content-Type' => 'application/rdf+xml' })
end
let(:results) { lod_loc.find('sh 85118553', request_header: { subauthority: 'subjects' }) }
it 'has correct primary predicate values' do
expect(results[:uri]).to eq 'http://id.loc.gov/authorities/subjects/sh85118553'
expect(results[:uri]).to be_kind_of String
expect(results[:id]).to eq 'sh 85118553'
expect(results[:label]).to eq ['Science']
expect(results[:altlabel]).to include('Natural science', 'Science of science', 'Sciences')
expect(results[:narrower]).to include('http://id.loc.gov/authorities/subjects/sh92004048')
expect(results[:narrower].first).to be_kind_of String
end
it 'has correct number of predicates in pred-obj list' do
expect(results['predicates'].count).to eq 15
end
it 'has primary predicates in pred-obj list' do
expect(results['predicates']['http://id.loc.gov/vocabulary/identifiers/lccn']).to eq ['sh 85118553']
expect(results['predicates']['http://www.loc.gov/mads/rdf/v1#authoritativeLabel']).to eq ['Science']
expect(results['predicates']['http://www.w3.org/2004/02/skos/core#prefLabel']).to eq ['Science']
expect(results['predicates']['http://www.w3.org/2004/02/skos/core#altLabel']).to include('Natural science', 'Science of science', 'Sciences')
end
it 'has loc mads predicate values' do
expect(results['predicates']['http://www.loc.gov/mads/rdf/v1#classification']).to eq ['Q']
expect(results['predicates']['http://www.loc.gov/mads/rdf/v1#isMemberOfMADSCollection'])
.to include('http://id.loc.gov/authorities/subjects/collection_LCSHAuthorizedHeadings',
'http://id.loc.gov/authorities/subjects/collection_LCSH_General',
'http://id.loc.gov/authorities/subjects/collection_SubdivideGeographically')
expect(results['predicates']['http://www.loc.gov/mads/rdf/v1#hasCloseExternalAuthority'])
.to include('http://data.bnf.fr/ark:/12148/cb12321484k', 'http://data.bnf.fr/ark:/12148/cb119673416',
'http://data.bnf.fr/ark:/12148/cb119934236', 'http://data.bnf.fr/ark:/12148/cb12062047t',
'http://data.bnf.fr/ark:/12148/cb119469567', 'http://data.bnf.fr/ark:/12148/cb11933232c',
'http://data.bnf.fr/ark:/12148/cb122890536', 'http://data.bnf.fr/ark:/12148/cb121155321',
'http://data.bnf.fr/ark:/12148/cb15556043g', 'http://data.bnf.fr/ark:/12148/cb123662513',
'http://d-nb.info/gnd/4066562-8', 'http://data.bnf.fr/ark:/12148/cb120745812',
'http://data.bnf.fr/ark:/12148/cb11973101n', 'http://data.bnf.fr/ark:/12148/cb13328497r')
expect(results['predicates']['http://www.loc.gov/mads/rdf/v1#isMemberOfMADSScheme'])
.to eq ['http://id.loc.gov/authorities/subjects']
expect(results['predicates']['http://www.loc.gov/mads/rdf/v1#editorialNote'])
.to eq ['headings beginning with the word [Scientific;] and subdivision [Science] under ethnic groups and individual wars, e.g. [World War, 1939-1945--Science]']
end
it 'has more unspecified predicate values' do
expect(results['predicates']['http://www.w3.org/1999/02/22-rdf-syntax-ns#type']).to include('http://www.loc.gov/mads/rdf/v1#Topic', 'http://www.loc.gov/mads/rdf/v1#Authority', 'http://www.w3.org/2004/02/skos/core#Concept')
expect(results['predicates']['http://www.w3.org/2002/07/owl#sameAs']).to include('info:lc/authorities/sh85118553', 'http://id.loc.gov/authorities/sh85118553#concept')
expect(results['predicates']['http://www.w3.org/2004/02/skos/core#closeMatch'])
.to include('http://data.bnf.fr/ark:/12148/cb12321484k', 'http://data.bnf.fr/ark:/12148/cb119673416',
'http://data.bnf.fr/ark:/12148/cb119934236', 'http://data.bnf.fr/ark:/12148/cb12062047t',
'http://data.bnf.fr/ark:/12148/cb119469567', 'http://data.bnf.fr/ark:/12148/cb11933232c',
'http://data.bnf.fr/ark:/12148/cb122890536', 'http://data.bnf.fr/ark:/12148/cb121155321',
'http://data.bnf.fr/ark:/12148/cb15556043g', 'http://data.bnf.fr/ark:/12148/cb123662513',
'http://d-nb.info/gnd/4066562-8', 'http://data.bnf.fr/ark:/12148/cb120745812',
'http://data.bnf.fr/ark:/12148/cb11973101n', 'http://data.bnf.fr/ark:/12148/cb13328497r')
expect(results['predicates']['http://www.w3.org/2004/02/skos/core#editorial'])
.to eq ['headings beginning with the word [Scientific;] and subdivision [Science] under ethnic groups and individual wars, e.g. [World War, 1939-1945--Science]']
expect(results['predicates']['http://www.w3.org/2004/02/skos/core#inScheme']).to eq ['http://id.loc.gov/authorities/subjects']
end
end
context 'when multiple requests are made' do
before do
stub_request(:get, 'http://id.loc.gov/authorities/subjects/sh85118553')
.to_return(status: 200, body: webmock_fixture('lod_loc_term_found.rdf.xml'), headers: { 'Content-Type' => 'application/rdf+xml' })
stub_request(:get, 'http://id.loc.gov/authorities/subjects/sh1234')
.to_return(status: 200, body: webmock_fixture('lod_loc_second_term_found.rdf.xml'), headers: { 'Content-Type' => 'application/rdf+xml' })
end
let(:results) { lod_loc.find('sh 85118553', request_header: { subauthority: 'subjects' }) }
let(:second_results) { lod_loc.find('sh 1234', request_header: { subauthority: 'subjects' }) }
it 'has correct primary predicate values for second request' do
expect(results[:uri]).to eq 'http://id.loc.gov/authorities/subjects/sh85118553'
expect(second_results[:uri]).to eq 'http://id.loc.gov/authorities/subjects/sh1234'
expect(second_results[:uri]).to be_kind_of String
expect(second_results[:id]).to eq 'sh 1234'
expect(second_results[:label]).to eq ['More Science']
expect(second_results[:altlabel]).to include('More Natural science', 'More Science of science', 'More Sciences')
end
end
context 'when id does not have a <blank>' do
before do
stub_request(:get, 'http://id.loc.gov/authorities/subjects/sh85118553')
.to_return(status: 200, body: webmock_fixture('lod_loc_term_found.rdf.xml'), headers: { 'Content-Type' => 'application/rdf+xml' })
end
let(:results_without_blank) { lod_loc.find('sh85118553', request_header: { subauthority: 'subjects' }) }
it 'extracts correct uri' do
expect(results_without_blank[:uri]).to eq 'http://id.loc.gov/authorities/subjects/sh85118553'
end
end
context "ID in graph doesn't match ID in request URI" do
before do
stub_request(:get, 'http://id.loc.gov/authorities/subjects/sh85118553')
.to_return(status: 200, body: webmock_fixture('lod_loc_term_bad_id.rdf.xml'), headers: { 'Content-Type' => 'application/rdf+xml' })
end
it 'raises DataNormalizationError' do
expect { lod_loc.find('sh85118553', request_header: { subauthority: 'subjects' }) }.to raise_error Qa::DataNormalizationError, "Unable to extract URI based on ID: sh85118553"
end
end
context 'when alternate authority name is used to access loc' do
before do
stub_request(:get, 'http://id.loc.gov/authorities/subjects/sh85118553')
.to_return(status: 200, body: webmock_fixture('lod_loc_term_found.rdf.xml'), headers: { 'Content-Type' => 'application/rdf+xml' })
allow(lod_loc.term_config).to receive(:authority_name).and_return('ALT_LOC_AUTHORITY')
end
let(:results) { lod_loc.find('sh 85118553', request_header: { subauthority: 'subjects' }) }
it 'does special processing to remove blank from id' do
expect(results[:uri]).to eq 'http://id.loc.gov/authorities/subjects/sh85118553'
end
end
end
end
# rubocop:disable RSpec/NestedGroups
describe "language processing" do
context "when filtering #find result" do
context "and lang NOT passed in" do
context "and NO language defined in authority config" do
context "and NO language defined in Qa config" do
let(:lod_lang_no_defaults) { described_class.new(term_config(:LOD_LANG_NO_DEFAULTS)) }
let :results do
stub_request(:get, "http://aims.fao.org/aos/agrovoc/c_9513")
.to_return(status: 200, body: webmock_fixture("lod_lang_term_enfr.rdf.xml"), headers: { 'Content-Type' => 'application/rdf+xml' })
lod_lang_no_defaults.find('http://aims.fao.org/aos/agrovoc/c_9513')
end
before do
Qa.config.default_language = []
end
after do
Qa.config.default_language = :en
end
it "is not filtered" do
expect(results[:label]).to eq ['buttermilk', 'Babeurre']
expect(results[:altlabel]).to eq ['yummy', 'délicieux']
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#prefLabel"]).to include("buttermilk", "Babeurre")
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#altLabel"]).to include("yummy", "délicieux")
end
end
context "and default_language is defined in Qa config" do
let(:lod_lang_no_defaults) { described_class.new(term_config(:LOD_LANG_NO_DEFAULTS)) }
let :results do
stub_request(:get, "http://aims.fao.org/aos/agrovoc/c_9513")
.to_return(status: 200, body: webmock_fixture("lod_lang_term_enfr.rdf.xml"), headers: { 'Content-Type' => 'application/rdf+xml' })
lod_lang_no_defaults.find('http://aims.fao.org/aos/agrovoc/c_9513')
end
it "filters using Qa configured default for summary but not for predicates list" do
expect(results[:label]).to eq ['buttermilk']
expect(results[:altlabel]).to eq ['yummy']
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#prefLabel"]).to include("buttermilk", "Babeurre")
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#altLabel"]).to include("yummy", "délicieux")
end
end
end
context "and language IS defined in authority config" do
let(:lod_lang_defaults) { described_class.new(term_config(:LOD_LANG_DEFAULTS)) }
let :results do
stub_request(:get, "http://aims.fao.org/aos/agrovoc/c_9513")
.to_return(status: 200, body: webmock_fixture("lod_lang_term_enfr.rdf.xml"), headers: { 'Content-Type' => 'application/rdf+xml' })
lod_lang_defaults.find('http://aims.fao.org/aos/agrovoc/c_9513')
end
it "filters using authority configured language for summary but not for predicates list" do
expect(results[:label]).to eq ['Babeurre']
expect(results[:altlabel]).to eq ['délicieux']
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#prefLabel"]).to include("buttermilk", "Babeurre")
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#altLabel"]).to include("yummy", "délicieux")
end
end
context "and multiple languages ARE defined in authority config" do
let(:lod_lang_multi_defaults) { described_class.new(term_config(:LOD_LANG_MULTI_DEFAULTS)) }
let :results do
stub_request(:get, "http://aims.fao.org/aos/agrovoc/c_9513")
.to_return(status: 200, body: webmock_fixture("lod_lang_term_enfrde.rdf.xml"), headers: { 'Content-Type' => 'application/rdf+xml' })
lod_lang_multi_defaults.find('http://aims.fao.org/aos/agrovoc/c_9513')
end
it "filters using authority configured languages for summary but not for predicates list" do
expect(results[:label]).to eq ['buttermilk', 'Babeurre']
expect(results[:altlabel]).to eq ['yummy', 'délicieux']
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#prefLabel"]).to include("buttermilk", "Babeurre", "Buttermilch")
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#altLabel"]).to include("yummy", "délicieux", "lecker")
end
end
end
context "and lang IS passed in" do
let(:lod_lang_defaults) { described_class.new(term_config(:LOD_LANG_DEFAULTS)) }
let :results do
stub_request(:get, "http://aims.fao.org/aos/agrovoc/c_9513")
.to_return(status: 200, body: webmock_fixture("lod_lang_term_enfr.rdf.xml"), headers: { 'Content-Type' => 'application/rdf+xml' })
lod_lang_defaults.find('http://aims.fao.org/aos/agrovoc/c_9513', request_header: { language: 'fr' })
end
it "is filtered to specified language" do
expect(results[:label]).to eq ['Babeurre']
expect(results[:altlabel]).to eq ['délicieux']
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#prefLabel"]).to include("buttermilk", "Babeurre")
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#altLabel"]).to include("yummy", "délicieux")
end
end
context "and result does not have altlabel" do
let(:lod_lang_defaults) { described_class.new(term_config(:LOD_LANG_DEFAULTS)) }
let :results do
stub_request(:get, "http://aims.fao.org/aos/agrovoc/c_9513")
.to_return(status: 200, body: webmock_fixture("lod_lang_term_enfr_noalt.rdf.xml"), headers: { 'Content-Type' => 'application/rdf+xml' })
lod_lang_defaults.find('http://aims.fao.org/aos/agrovoc/c_9513', request_header: { language: 'fr' })
end
it "is filtered to specified language" do
expect(results[:label]).to eq ['Babeurre']
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#prefLabel"]).to include("buttermilk", "Babeurre")
end
end
context "when replacement on authority term URL" do
context "and using default" do
let(:lod_lang_param) { described_class.new(term_config(:LOD_LANG_PARAM)) }
let :results do
stub_request(:get, "http://aims.fao.org/aos/agrovoc/c_9513?lang=en")
.to_return(status: 200, body: webmock_fixture("lod_lang_term_en.rdf.xml"), headers: { 'Content-Type' => 'application/rdf+xml' })
lod_lang_param.find('http://aims.fao.org/aos/agrovoc/c_9513')
end
it "is correctly parsed" do
expect(results[:label]).to eq ['buttermilk']
expect(results[:altlabel]).to eq ['yummy']
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#prefLabel"]).to eq ['buttermilk']
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#altLabel"]).to eq ['yummy']
end
end
context "and lang specified" do
let(:lod_lang_param) { described_class.new(term_config(:LOD_LANG_PARAM)) }
let :results do
stub_request(:get, "http://aims.fao.org/aos/agrovoc/c_9513?lang=fr")
.to_return(status: 200, body: webmock_fixture("lod_lang_term_fr.rdf.xml"), headers: { 'Content-Type' => 'application/rdf+xml' })
lod_lang_param.find('http://aims.fao.org/aos/agrovoc/c_9513', request_header: { replacements: { 'lang' => 'fr' } })
end
it "is correctly parsed" do
expect(results[:label]).to eq ['Babeurre']
expect(results[:altlabel]).to eq ['délicieux']
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#prefLabel"]).to eq ['Babeurre']
expect(results["predicates"]["http://www.w3.org/2004/02/skos/core#altLabel"]).to eq ['délicieux']
end
end
end
end
end
# rubocop:enable RSpec/NestedGroups
end
def term_config(authority_name)
Qa::Authorities::LinkedData::Config.new(authority_name).term
end
end
| 57.597087 | 234 | 0.619764 |
01f9a0aa2e7920ad2f33586dabdc112a85060c8f | 7,401 | # encoding: utf-8
require "logstash/outputs/base"
require "logstash/outputs/loki/entry"
require "logstash/outputs/loki/batch"
require "logstash/namespace"
require 'net/http'
require 'time'
require 'uri'
require 'json'
class LogStash::Outputs::Loki < LogStash::Outputs::Base
include Loki
config_name "loki"
## 'A single instance of the Output will be shared among the pipeline worker threads'
concurrency :single
## 'Loki URL'
config :url, :validate => :string, :required => true
## 'BasicAuth credentials'
config :username, :validate => :string, :required => false
config :password, :validate => :string, secret: true, :required => false
## 'Client certificate'
config :cert, :validate => :path, :required => false
config :key, :validate => :path, :required => false
## 'TLS'
config :ca_cert, :validate => :path, :required => false
## 'Disable server certificate verification'
config :insecure_skip_verify, :validate => :boolean, :default => false, :required => false
## 'Loki Tenant ID'
config :tenant_id, :validate => :string, :required => false
## 'Maximum batch size to accrue before pushing to loki. Defaults to 102400 bytes'
config :batch_size, :validate => :number, :default => 102400, :required => false
## 'Interval in seconds to wait before pushing a batch of records to loki. Defaults to 1 second'
config :batch_wait, :validate => :number, :default => 1, :required => false
## 'Log line field to pick from logstash. Defaults to "message"'
config :message_field, :validate => :string, :default => "message", :required => false
## 'Backoff configuration. Initial backoff time between retries. Default 1s'
config :min_delay, :validate => :number, :default => 1, :required => false
## 'An array of fields to map to labels, if defined only fields in this list will be mapped.'
config :include_fields, :validate => :array, :default => [], :required => false
## 'Backoff configuration. Maximum backoff time between retries. Default 300s'
config :max_delay, :validate => :number, :default => 300, :required => false
## 'Backoff configuration. Maximum number of retries to do'
config :retries, :validate => :number, :default => 10, :required => false
attr_reader :batch
public
def register
@uri = URI.parse(@url)
unless @uri.is_a?(URI::HTTP) || @uri.is_a?(URI::HTTPS)
raise LogStash::ConfigurationError, "url parameter must be valid HTTP, currently '#{@url}'"
end
if @min_delay > @max_delay
raise LogStash::ConfigurationError, "Min delay should be less than Max delay, currently 'Min delay is #{@min_delay} and Max delay is #{@max_delay}'"
end
@logger.info("Loki output plugin", :class => self.class.name)
# initialize Queue and Mutex
@entries = Queue.new
@mutex = Mutex.new
@stop = false
# create nil batch object.
@batch = nil
# validate certs
if ssl_cert?
load_ssl
validate_ssl_key
end
# start batch_max_wait and batch_max_size threads
@batch_wait_thread = Thread.new{max_batch_wait()}
@batch_size_thread = Thread.new{max_batch_size()}
end
def max_batch_size
loop do
@mutex.synchronize do
return if @stop
end
e = @entries.deq
return if e.nil?
@mutex.synchronize do
if !add_entry_to_batch(e)
@logger.debug("Max batch_size is reached. Sending batch to loki")
send(@batch)
@batch = Batch.new(e)
end
end
end
end
def max_batch_wait
# minimum wait frequency is 10 milliseconds
min_wait_checkfrequency = 1/100
max_wait_checkfrequency = @batch_wait
if max_wait_checkfrequency < min_wait_checkfrequency
max_wait_checkfrequency = min_wait_checkfrequency
end
loop do
@mutex.synchronize do
return if @stop
end
sleep(max_wait_checkfrequency)
if is_batch_expired
@mutex.synchronize do
@logger.debug("Max batch_wait time is reached. Sending batch to loki")
send(@batch)
@batch = nil
end
end
end
end
def ssl_cert?
[email protected]? && [email protected]?
end
def load_ssl
@cert = OpenSSL::X509::Certificate.new(File.read(@cert)) if @cert
@key = OpenSSL::PKey.read(File.read(@key)) if @key
end
def validate_ssl_key
if [email protected]_a?(OpenSSL::PKey::RSA) && [email protected]_a?(OpenSSL::PKey::DSA)
raise LogStash::ConfigurationError, "Unsupported private key type '#{@key.class}''"
end
end
def ssl_opts(uri)
opts = {
use_ssl: uri.scheme == 'https'
}
# disable server certificate verification
if @insecure_skip_verify
opts = opts.merge(
verify_mode: OpenSSL::SSL::VERIFY_NONE
)
end
if [email protected]? && [email protected]?
opts = opts.merge(
verify_mode: OpenSSL::SSL::VERIFY_PEER,
cert: @cert,
key: @key
)
end
unless @ca_cert.nil?
opts = opts.merge(
ca_file: @ca_cert
)
end
opts
end
# Add an entry to the current batch returns false if the batch is full
# and the entry can't be added.
def add_entry_to_batch(e)
line = e.entry['line']
# we don't want to send empty lines.
return true if line.to_s.strip.empty?
if @batch.nil?
@batch = Batch.new(e)
return true
end
if @batch.size_bytes_after(line) > @batch_size
return false
end
@batch.add(e)
return true
end
def is_batch_expired
return [email protected]? && @batch.age() >= @batch_wait
end
## Receives logstash events
public
def receive(event)
@entries << Entry.new(event, @message_field, @include_fields)
end
def close
@entries.close
@mutex.synchronize do
@stop = true
end
@batch_wait_thread.join
@batch_size_thread.join
# if by any chance we still have a forming batch, we need to send it.
send(@batch) if [email protected]?
@batch = nil
end
def send(batch)
payload = batch.to_json
res = loki_http_request(payload)
if res.is_a?(Net::HTTPSuccess)
@logger.debug("Successfully pushed data to loki")
else
@logger.debug("failed payload", :payload => payload)
end
end
def loki_http_request(payload)
req = Net::HTTP::Post.new(
@uri.request_uri
)
req.add_field('Content-Type', 'application/json')
req.add_field('X-Scope-OrgID', @tenant_id) if @tenant_id
req['User-Agent']= 'loki-logstash'
req.basic_auth(@username, @password) if @username
req.body = payload
opts = ssl_opts(@uri)
@logger.debug("sending #{req.body.length} bytes to loki")
retry_count = 0
delay = @min_delay
begin
res = Net::HTTP.start(@uri.host, @uri.port, **opts) { |http|
http.request(req)
}
return res if !res.nil? && res.code.to_i != 429 && res.code.to_i.div(100) != 5
raise StandardError.new res
rescue StandardError => e
retry_count += 1
@logger.warn("Failed to send batch, attempt: #{retry_count}/#{@retries}", :error_inspect => e.inspect, :error => e)
if retry_count < @retries
sleep delay
if delay * 2 <= @max_delay
delay = delay * 2
else
delay = @max_delay
end
retry
else
@logger.error("Failed to send batch", :error_inspect => e.inspect, :error => e)
return res
end
end
end
end
| 27.513011 | 154 | 0.642346 |
87502f1a731b4c2095bbac9f4fd5a24956f8b80a | 124 | class RemoveDeckAccessFlag < ActiveRecord::Migration[4.2]
def change
remove_column :users, :has_deck_access
end
end
| 20.666667 | 57 | 0.774194 |
39077aea7ea3a72ad004cf6549b5232314e05a8d | 13,782 | module ActiveRecord
# = Active Record Aggregations
module Aggregations # :nodoc:
extend ActiveSupport::Concern
def clear_aggregation_cache #:nodoc:
@aggregation_cache.clear if persisted?
end
# Active Record implements aggregation through a macro-like class method called +composed_of+
# for representing attributes as value objects. It expresses relationships like "Account [is]
# composed of Money [among other things]" or "Person [is] composed of [an] address". Each call
# to the macro adds a description of how the value objects are created from the attributes of
# the entity object (when the entity is initialized either as a new object or from finding an
# existing object) and how it can be turned back into attributes (when the entity is saved to
# the database).
#
# class Customer < ActiveRecord::Base
# composed_of :balance, class_name: "Money", mapping: %w(balance amount)
# composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
# end
#
# The customer class now has the following methods to manipulate the value objects:
# * <tt>Customer#balance, Customer#balance=(money)</tt>
# * <tt>Customer#address, Customer#address=(address)</tt>
#
# These methods will operate with value objects like the ones described below:
#
# class Money
# include Comparable
# attr_reader :amount, :currency
# EXCHANGE_RATES = { "USD_TO_DKK" => 6 }
#
# def initialize(amount, currency = "USD")
# @amount, @currency = amount, currency
# end
#
# def exchange_to(other_currency)
# exchanged_amount = (amount * EXCHANGE_RATES["#{currency}_TO_#{other_currency}"]).floor
# Money.new(exchanged_amount, other_currency)
# end
#
# def ==(other_money)
# amount == other_money.amount && currency == other_money.currency
# end
#
# def <=>(other_money)
# if currency == other_money.currency
# amount <=> other_money.amount
# else
# amount <=> other_money.exchange_to(currency).amount
# end
# end
# end
#
# class Address
# attr_reader :street, :city
# def initialize(street, city)
# @street, @city = street, city
# end
#
# def close_to?(other_address)
# city == other_address.city
# end
#
# def ==(other_address)
# city == other_address.city && street == other_address.street
# end
# end
#
# Now it's possible to access attributes from the database through the value objects instead. If
# you choose to name the composition the same as the attribute's name, it will be the only way to
# access that attribute. That's the case with our +balance+ attribute. You interact with the value
# objects just like you would with any other attribute:
#
# customer.balance = Money.new(20) # sets the Money value object and the attribute
# customer.balance # => Money value object
# customer.balance.exchange_to("DKK") # => Money.new(120, "DKK")
# customer.balance > Money.new(10) # => true
# customer.balance == Money.new(20) # => true
# customer.balance < Money.new(5) # => false
#
# Value objects can also be composed of multiple attributes, such as the case of Address. The order
# of the mappings will determine the order of the parameters.
#
# customer.address_street = "Hyancintvej"
# customer.address_city = "Copenhagen"
# customer.address # => Address.new("Hyancintvej", "Copenhagen")
#
# customer.address_street = "Vesterbrogade"
# customer.address # => Address.new("Hyancintvej", "Copenhagen")
# customer.clear_aggregation_cache
# customer.address # => Address.new("Vesterbrogade", "Copenhagen")
#
# customer.address = Address.new("May Street", "Chicago")
# customer.address_street # => "May Street"
# customer.address_city # => "Chicago"
#
# == Writing value objects
#
# Value objects are immutable and interchangeable objects that represent a given value, such as
# a Money object representing $5. Two Money objects both representing $5 should be equal (through
# methods such as <tt>==</tt> and <tt><=></tt> from Comparable if ranking makes sense). This is
# unlike entity objects where equality is determined by identity. An entity class such as Customer can
# easily have two different objects that both have an address on Hyancintvej. Entity identity is
# determined by object or relational unique identifiers (such as primary keys). Normal
# ActiveRecord::Base classes are entity objects.
#
# It's also important to treat the value objects as immutable. Don't allow the Money object to have
# its amount changed after creation. Create a new Money object with the new value instead. The
# Money#exchange_to method is an example of this. It returns a new value object instead of changing
# its own values. Active Record won't persist value objects that have been changed through means
# other than the writer method.
#
# The immutable requirement is enforced by Active Record by freezing any object assigned as a value
# object. Attempting to change it afterwards will result in a RuntimeError.
#
# Read more about value objects on http://c2.com/cgi/wiki?ValueObject and on the dangers of not
# keeping value objects immutable on http://c2.com/cgi/wiki?ValueObjectsShouldBeImmutable
#
# == Custom constructors and converters
#
# By default value objects are initialized by calling the <tt>new</tt> constructor of the value
# class passing each of the mapped attributes, in the order specified by the <tt>:mapping</tt>
# option, as arguments. If the value class doesn't support this convention then +composed_of+ allows
# a custom constructor to be specified.
#
# When a new value is assigned to the value object, the default assumption is that the new value
# is an instance of the value class. Specifying a custom converter allows the new value to be automatically
# converted to an instance of value class if necessary.
#
# For example, the NetworkResource model has +network_address+ and +cidr_range+ attributes that should be
# aggregated using the NetAddr::CIDR value class (http://www.ruby-doc.org/gems/docs/n/netaddr-1.5.0/NetAddr/CIDR.html).
# The constructor for the value class is called +create+ and it expects a CIDR address string as a parameter.
# New values can be assigned to the value object using either another NetAddr::CIDR object, a string
# or an array. The <tt>:constructor</tt> and <tt>:converter</tt> options can be used to meet
# these requirements:
#
# class NetworkResource < ActiveRecord::Base
# composed_of :cidr,
# class_name: 'NetAddr::CIDR',
# mapping: [ %w(network_address network), %w(cidr_range bits) ],
# allow_nil: true,
# constructor: Proc.new { |network_address, cidr_range| NetAddr::CIDR.create("#{network_address}/#{cidr_range}") },
# converter: Proc.new { |value| NetAddr::CIDR.create(value.is_a?(Array) ? value.join('/') : value) }
# end
#
# # This calls the :constructor
# network_resource = NetworkResource.new(network_address: '192.168.0.1', cidr_range: 24)
#
# # These assignments will both use the :converter
# network_resource.cidr = [ '192.168.2.1', 8 ]
# network_resource.cidr = '192.168.0.1/24'
#
# # This assignment won't use the :converter as the value is already an instance of the value class
# network_resource.cidr = NetAddr::CIDR.create('192.168.2.1/8')
#
# # Saving and then reloading will use the :constructor on reload
# network_resource.save
# network_resource.reload
#
# == Finding records by a value object
#
# Once a +composed_of+ relationship is specified for a model, records can be loaded from the database
# by specifying an instance of the value object in the conditions hash. The following example
# finds all customers with +balance_amount+ equal to 20 and +balance_currency+ equal to "USD":
#
# Customer.where(balance: Money.new(20, "USD"))
#
module ClassMethods
# Adds reader and writer methods for manipulating a value object:
# <tt>composed_of :address</tt> adds <tt>address</tt> and <tt>address=(new_address)</tt> methods.
#
# Options are:
# * <tt>:class_name</tt> - Specifies the class name of the association. Use it only if that name
# can't be inferred from the part id. So <tt>composed_of :address</tt> will by default be linked
# to the Address class, but if the real class name is CompanyAddress, you'll have to specify it
# with this option.
# * <tt>:mapping</tt> - Specifies the mapping of entity attributes to attributes of the value
# object. Each mapping is represented as an array where the first item is the name of the
# entity attribute and the second item is the name of the attribute in the value object. The
# order in which mappings are defined determines the order in which attributes are sent to the
# value class constructor.
# * <tt>:allow_nil</tt> - Specifies that the value object will not be instantiated when all mapped
# attributes are +nil+. Setting the value object to +nil+ has the effect of writing +nil+ to all
# mapped attributes.
# This defaults to +false+.
# * <tt>:constructor</tt> - A symbol specifying the name of the constructor method or a Proc that
# is called to initialize the value object. The constructor is passed all of the mapped attributes,
# in the order that they are defined in the <tt>:mapping option</tt>, as arguments and uses them
# to instantiate a <tt>:class_name</tt> object.
# The default is <tt>:new</tt>.
# * <tt>:converter</tt> - A symbol specifying the name of a class method of <tt>:class_name</tt>
# or a Proc that is called when a new value is assigned to the value object. The converter is
# passed the single value that is used in the assignment and is only called if the new value is
# not an instance of <tt>:class_name</tt>. If <tt>:allow_nil</tt> is set to true, the converter
# can return nil to skip the assignment.
#
# Option examples:
# composed_of :temperature, mapping: %w(reading celsius)
# composed_of :balance, class_name: "Money", mapping: %w(balance amount),
# converter: Proc.new { |balance| balance.to_money }
# composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
# composed_of :gps_location
# composed_of :gps_location, allow_nil: true
# composed_of :ip_address,
# class_name: 'IPAddr',
# mapping: %w(ip to_i),
# constructor: Proc.new { |ip| IPAddr.new(ip, Socket::AF_INET) },
# converter: Proc.new { |ip| ip.is_a?(Integer) ? IPAddr.new(ip, Socket::AF_INET) : IPAddr.new(ip.to_s) }
#
def composed_of(part_id, options = {})
options.assert_valid_keys(:class_name, :mapping, :allow_nil, :constructor, :converter)
name = part_id.id2name
class_name = options[:class_name] || name.camelize
mapping = options[:mapping] || [ name, name ]
mapping = [ mapping ] unless mapping.first.is_a?(Array)
allow_nil = options[:allow_nil] || false
constructor = options[:constructor] || :new
converter = options[:converter]
reader_method(name, class_name, mapping, allow_nil, constructor)
writer_method(name, class_name, mapping, allow_nil, converter)
reflection = ActiveRecord::Reflection.create(:composed_of, part_id, nil, options, self)
Reflection.add_aggregate_reflection self, part_id, reflection
end
private
def reader_method(name, class_name, mapping, allow_nil, constructor)
define_method(name) do
if @aggregation_cache[name].nil? && (!allow_nil || mapping.any? {|key, _| !_read_attribute(key).nil? })
attrs = mapping.collect {|key, _| _read_attribute(key)}
object = constructor.respond_to?(:call) ?
constructor.call(*attrs) :
class_name.constantize.send(constructor, *attrs)
@aggregation_cache[name] = object
end
@aggregation_cache[name]
end
end
def writer_method(name, class_name, mapping, allow_nil, converter)
define_method("#{name}=") do |part|
klass = class_name.constantize
if part.is_a?(Hash)
raise ArgumentError unless part.size == part.keys.max
part = klass.new(*part.sort.map(&:last))
end
unless part.is_a?(klass) || converter.nil? || part.nil?
part = converter.respond_to?(:call) ? converter.call(part) : klass.send(converter, part)
end
if part.nil? && allow_nil
mapping.each { |key, _| self[key] = nil }
@aggregation_cache[name] = nil
else
mapping.each { |key, value| self[key] = part.send(value) }
@aggregation_cache[name] = part.freeze
end
end
end
end
end
end
| 51.425373 | 135 | 0.646205 |
bfe04aceeb0bb51821f61127a9c3f563832d03b3 | 5,705 | # TL;DR: YOU SHOULD DELETE THIS FILE
#
# This file was generated by Cucumber-Rails and is only here to get you a head start
# These step definitions are thin wrappers around the Capybara/Webrat API that lets you
# visit pages, interact with widgets and make assertions about page content.
#
# If you use these step definitions as basis for your features you will quickly end up
# with features that are:
#
# * Hard to maintain
# * Verbose to read
#
# A much better approach is to write your own higher level step definitions, following
# the advice in the following blog posts:
#
# * http://benmabey.com/2008/05/19/imperative-vs-declarative-scenarios-in-user-stories.html
# * http://dannorth.net/2011/01/31/whose-domain-is-it-anyway/
# * http://elabs.se/blog/15-you-re-cuking-it-wrong
#
require 'uri'
require 'cgi'
require File.expand_path(File.join(File.dirname(__FILE__), "..", "support", "paths"))
require File.expand_path(File.join(File.dirname(__FILE__), "..", "support", "selectors"))
module WithinHelpers
def with_scope(locator)
locator ? within(*selector_for(locator)) { yield } : yield
end
end
World(WithinHelpers)
# Single-line step scoper
When /^(.*) within (.*[^:])$/ do |step, parent|
with_scope(parent) { step step }
end
# Multi-line step scoper
When /^(.*) within (.*[^:]):$/ do |step, parent, table_or_string|
with_scope(parent) { step "#{step}:", table_or_string }
end
Given /^(?:|I )am on (.+)$/ do |page_name|
visit path_to(page_name)
end
When /^(?:|I )go to (.+)$/ do |page_name|
visit path_to(page_name)
end
When /^(?:|I )press "([^"]*)"$/ do |button|
click_button(button)
end
When /^(?:|I )follow "([^"]*)"$/ do |link|
click_link(link)
end
When /^(?:|I )fill in "([^"]*)" with "([^"]*)"$/ do |field, value|
fill_in(field, :with => value)
end
When /^(?:|I )fill in "([^"]*)" for "([^"]*)"$/ do |value, field|
fill_in(field, :with => value)
end
# Use this to fill in an entire form with data from a table. Example:
#
# When I fill in the following:
# | Account Number | 5002 |
# | Expiry date | 2009-11-01 |
# | Note | Nice guy |
# | Wants Email? | |
#
# TODO: Add support for checkbox, select og option
# based on naming conventions.
#
When /^(?:|I )fill in the following:$/ do |fields|
fields.rows_hash.each do |name, value|
When %{I fill in "#{name}" with "#{value}"}
end
end
When /^(?:|I )select "([^"]*)" from "([^"]*)"$/ do |value, field|
select(value, :from => field)
end
When /^(?:|I )check "([^"]*)"$/ do |field|
check(field)
end
When /^(?:|I )uncheck "([^"]*)"$/ do |field|
uncheck(field)
end
When /^(?:|I )choose "([^"]*)"$/ do |field|
choose(field)
end
When /^(?:|I )attach the file "([^"]*)" to "([^"]*)"$/ do |path, field|
attach_file(field, File.expand_path(path))
end
Then /^(?:|I )should see "([^"]*)"$/ do |text|
if page.respond_to? :should
page.should have_content(text)
else
assert page.has_content?(text)
end
end
Then /^(?:|I )should see \/([^\/]*)\/$/ do |regexp|
regexp = Regexp.new(regexp)
if page.respond_to? :should
page.should have_xpath('//*', :text => regexp)
else
assert page.has_xpath?('//*', :text => regexp)
end
end
Then /^(?:|I )should not see "([^"]*)"$/ do |text|
if page.respond_to? :should
page.should have_no_content(text)
else
assert page.has_no_content?(text)
end
end
Then /^(?:|I )should not see \/([^\/]*)\/$/ do |regexp|
regexp = Regexp.new(regexp)
if page.respond_to? :should
page.should have_no_xpath('//*', :text => regexp)
else
assert page.has_no_xpath?('//*', :text => regexp)
end
end
Then /^the "([^"]*)" field(?: within (.*))? should contain "([^"]*)"$/ do |field, parent, value|
with_scope(parent) do
field = find_field(field)
field_value = (field.tag_name == 'textarea') ? field.text : field.value
if field_value.respond_to? :should
field_value.should =~ /#{value}/
else
assert_match(/#{value}/, field_value)
end
end
end
Then /^the "([^"]*)" field(?: within (.*))? should not contain "([^"]*)"$/ do |field, parent, value|
with_scope(parent) do
field = find_field(field)
field_value = (field.tag_name == 'textarea') ? field.text : field.value
if field_value.respond_to? :should_not
field_value.should_not =~ /#{value}/
else
assert_no_match(/#{value}/, field_value)
end
end
end
Then /^the "([^"]*)" checkbox(?: within (.*))? should be checked$/ do |label, parent|
with_scope(parent) do
field_checked = find_field(label)['checked']
if field_checked.respond_to? :should
field_checked.should be_true
else
assert field_checked
end
end
end
Then /^the "([^"]*)" checkbox(?: within (.*))? should not be checked$/ do |label, parent|
with_scope(parent) do
field_checked = find_field(label)['checked']
if field_checked.respond_to? :should
field_checked.should be_false
else
assert !field_checked
end
end
end
Then /^(?:|I )should be on (.+)$/ do |page_name|
current_path = URI.parse(current_url).path
if current_path.respond_to? :should
current_path.should == path_to(page_name)
else
assert_equal path_to(page_name), current_path
end
end
Then /^(?:|I )should have the following query string:$/ do |expected_pairs|
query = URI.parse(current_url).query
actual_params = query ? CGI.parse(query) : {}
expected_params = {}
expected_pairs.rows_hash.each_pair{|k,v| expected_params[k] = v.split(',')}
if actual_params.respond_to? :should
actual_params.should == expected_params
else
assert_equal expected_params, actual_params
end
end
Then /^show me the page$/ do
save_and_open_page
end
| 26.910377 | 100 | 0.638563 |
1c58c3e26c8d8f39cc44bc1056bcffaba5d8f2d7 | 1,505 |
module Enocean
module Esp3
# Radio package, main package for 4BS and RPS data
# Header
# Data (byte offsets)
# 0 byte: RORG (type of radio package)
# 1-4 bytes: Radio data
# 4 bytes: sender_id
# 5: status field
class Radio < BasePacket
class << self
def packet_type
0x01
end
def from_data(data, optional_data = [])
result = nil
if data[0] == Rps.rorg
result = Rps.from_data(data, optional_data)
elsif data[0] == FourBS.rorg
result = FourBS.from_data(data, optional_data)
end
result
end
end
attr_accessor :sender_id, :radio_data, :rorg, :flags, :status
def initialize(data, optional_data)
super(Radio.packet_type, data, optional_data)
self.sender_id = [ 0xff, 0xff, 0xff, 0xff ]
self.status = 0
end
def build_data
raise "This needs to be defined by the subclass"
end
def content
s =<<-EOT
**** Received at: #{Time.now} ******
**** Data ****
Choice : 0x#{rorg.to_s(16)}
Data : 0x#{radio_data.flatten.collect{ |d| d.to_s(16) }.join("-")}
Sender ID : 0x#{sender_id.collect{ | i| i.to_s(16)}.join(":")}
Status : 0x#{status.to_s(16)}
**** Optional Data ****
EOT
return s
end
end
end
end
| 24.672131 | 85 | 0.506312 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.