hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
61058e9362606aca61d6dc8fd4c01e8d9edf889f | 356 | # frozen_string_literal: true
module Canary
module Types
module InputObjects
class CharacterFavoriteOrder < Canary::Types::InputObjects::Base
argument :field, Canary::Types::Enums::CharacterFavoriteOrderField, required: true
argument :direction, Canary::Types::Enums::OrderDirection, required: true
end
end
end
end
| 27.384615 | 90 | 0.727528 |
1cd33c37a3484ed76f70257caa6d670cd516fa9a | 8,897 | # frozen_string_literal: true
require 'curses'
require 'forwardable'
module Fasten
module UI
class Curses
include ::Curses
extend Forwardable
def_delegators :runner, :workers, :tasks
def_delegators :runner, :name, :jobs, :jobs=, :state, :state=
attr_accessor :n_rows, :n_cols, :selected, :sel_index, :clear_needed, :message, :runner
SPINNER_STR = '⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏'
SPINNER_LEN = SPINNER_STR.length
MOON_STR = '🌑🌒🌓🌔🌕'
MOON_LEN = MOON_STR.length
PROGRESSBAR_STR = ' ▏▎▍▌▋▊▉'
PROGRESSBAR_LEN = PROGRESSBAR_STR.length
def initialize(runner:)
@runner = runner
end
def update
setup unless @setup_done
ui_keyboard
clear if clear_needed
draw_title
ui_jobs
ui_tasks
refresh
self.clear_needed = false
end
def draw_title
ui_text_aligned(0, :left, 'Fasten your seatbelts!')
ui_text_aligned(0, :center, "#{name} #{$PID}")
ui_text_aligned(0, :right, Time.new.to_s)
end
def cleanup
close_screen
@setup_done = nil
end
def setup
init_screen
self.n_rows = lines
self.n_cols = cols
stdscr.keypad = true
stdscr.nodelay = true
setup_color
noecho
cbreak
nonl
curs_set 0
@setup_done = true
end
def setup_color
start_color
use_default_colors
init_pair 1, Curses::COLOR_YELLOW, -1
init_pair 2, Curses::COLOR_GREEN, -1
init_pair 3, Curses::COLOR_RED, -1
init_pair 4, Curses::COLOR_WHITE, -1
end
def ui_text_aligned(row, align, str, attrs = nil)
if align == :center
setpos row, (n_cols - str.length) / 2
elsif align == :right
setpos row, n_cols - str.length
else
setpos row, 0
end
attrset attrs if attrs
addstr str
attroff attrs if attrs
str.length
end
def force_clear
self.clear_needed = true
end
def ui_keyboard
return unless (key = stdscr.getch)
self.message = nil
if key == Curses::Key::LEFT
if jobs <= 1
self.message = "Can't remove 1 worker left, press [P] to pause"
else
self.jobs -= 1
self.message = "Decreasing jobs to #{jobs}"
end
elsif key == Curses::Key::RIGHT
self.jobs += 1
self.message = "Increasing jobs to #{jobs}"
elsif key == Curses::Key::DOWN
self.sel_index = sel_index ? [sel_index + 1, tasks.count - 1].min : 0
self.selected = tasks[sel_index]
elsif key == Curses::Key::UP
self.sel_index = sel_index ? [sel_index - 1, 0].max : tasks.count - 1
self.selected = tasks[sel_index]
elsif key == 'q'
self.message = 'Will quit when running tasks end'
self.state = :QUITTING
elsif key == 'p'
self.message = 'Will pause when running tasks end'
self.state = :PAUSING
elsif key == 'r'
self.state = :RUNNING
end
force_clear
end
def ui_jobs_summary
running = tasks.running.count
waiting = tasks.waiting.count
working = workers.count
idle = working - running
"Procs running: #{running} idle: #{idle} waiting: #{waiting} #{runner.use_threads ? 'threads' : 'processes'}: #{jobs}"
end
def ui_jobs
l = ui_text_aligned(1, :left, ui_jobs_summary) + 1
workers.each_with_index do |worker, index|
setpos 1, l + index
attrs = worker.running? ? A_STANDOUT : color_pair(4) | A_DIM
attrset attrs
addstr worker.running? ? 'R' : '_'
attroff attrs
end
ui_state
end
def ui_state
if runner.running?
attrs = color_pair(2)
elsif runner.pausing?
attrs = color_pair(1) | A_BLINK | A_STANDOUT
elsif runner.paused?
attrs = color_pair(1) | A_STANDOUT
elsif runner.quitting?
attrs = color_pair(3) | A_BLINK | A_STANDOUT
end
l = ui_text_aligned(1, :right, state.to_s, attrs)
return unless message
setpos 1, n_cols - l - message.length - 1
addstr message
end
def ui_progressbar(row, col_ini, col_fin, count, total)
slice = total.to_f / (col_fin - col_ini + 1)
col_ini.upto col_fin do |col|
setpos row, col
count -= slice
if count >= 0
addstr PROGRESSBAR_STR[-1]
elsif count > -slice
addstr PROGRESSBAR_STR[(count * PROGRESSBAR_LEN / slice) % PROGRESSBAR_LEN]
else
addstr '.'
end
end
end
def ui_task_icon(task)
case task.state
when :RUNNING
SPINNER_STR[task.worker&.spinner]
when :FAIL
'✘'
when :DONE
'✔'
when :WAIT
'…'
else
' '
end
end
def ui_task_clock(task, cur, avg)
return unless task.ini
dif = cur - task.ini
avg = avg.to_f
if task.ini && avg.positive?
percent = dif / avg
index = (percent * MOON_LEN).to_i
index = MOON_LEN - 1 if index > MOON_LEN - 1
format ' %.2f s %s ', dif, MOON_STR[index]
else
format ' %.2f s ', dif
end
end
def ui_task_color(task)
rev = task == selected ? A_REVERSE : 0
case task.state
when :RUNNING
color_pair(1) | A_TOP | rev
when :DONE
color_pair(2) | A_TOP | rev
when :FAIL
color_pair(3) | A_TOP | rev
when :WAIT
A_TOP | rev
else
color_pair(4) | A_DIM | rev
end
end
def ui_task_string(task, y, x, icon: nil, str: nil)
setpos y, x
attrs = ui_task_color(task)
icon = ui_task_icon(task) if icon
str ||= icon ? "#{icon} #{task}" : task.to_s
delta = x + str.length - n_cols
str = str[0...-delta] if delta.positive?
attrset attrs if attrs
addstr str
attroff attrs if attrs
x + str.length
end
def ui_tasks
workers.each do |worker|
worker.spinner = (worker.spinner + 1) % SPINNER_LEN if worker.running?
end
cur = Time.new
count_done = tasks.done.count
count_total = tasks.count
tl = count_total.to_s.length
percentstr = count_total.positive? && " #{(count_done * 100 / count_total).to_i}%"
elapsed_str = format ' %.2f s', (dif = cur - runner.ini) if runner.ini
@stat_str ||= begin
@runner_last_avg = runner.last_avg
if runner.last_avg && runner.last_err
format '≈ %.2f s ± %.2f', runner.last_avg, runner.last_err
elsif runner.last_avg
format '≈ %.2f s', runner.last_avg
end
end
end_str = [elapsed_str, @stat_str].compact.join(' ')
if @runner_last_avg
a = dif
b = @runner_last_avg
else
a = count_done
b = count_total
end
col_ini = ui_text_aligned(2, :left, format("Tasks %#{tl}d/%d%s", count_done, count_total, percentstr)) + 1
col_fin = n_cols - 1 - end_str.length
ui_text_aligned(2, :right, end_str)
ui_progressbar(2, col_ini, col_fin, a, b)
max = 2
list = tasks.sort_by.with_index { |x, index| [x.run_score, index] }
list.each_with_index do |task, index|
next if 3 + index >= n_rows
x = ui_task_string(task, 3 + index, 2, icon: true)
max = x if x > max
end
list.each_with_index do |task, index|
next if 3 + index >= n_rows
if task.depends && !task.depends.empty?
x = max
x = ui_task_string(task, 3 + index, x, str: ':') + 1
task.depends.each do |dependant_task|
x = ui_task_string(dependant_task, 3 + index, x) + 1
end
else
x = max + 1
last_avg = task.last_avg
last_err = task.last_err
if task.dif
str = format ' %.2f s', task.dif
elsif last_avg && last_err
str = format '%s ≈ %.2f s ± %.2f %s', ui_task_clock(task, cur, last_avg), last_avg, last_err, task.worker&.name
elsif last_avg
str = format '%s ≈ %.2f s %s', ui_task_clock(task, cur, last_avg), last_avg, task.worker&.name
else
str = ui_task_clock(task, cur, 0)
end
ui_task_string(task, 3 + index, x, str: str) if str
end
end
end
end
end
end
| 27.207951 | 126 | 0.536698 |
f779023bd9b7a03b5e85d86cf990e5d39de2d22e | 846 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Amadeus::Namespaces::Travel::Analytics::
AirTraffic::BusiestPeriod do
before do
@client = double('Amadeus::Client')
@api = Amadeus::Namespaces::Travel::Analytics::AirTraffic::
BusiestPeriod.new(
@client
)
end
describe '.get' do
it 'should call client.get with the right params' do
params = { cityCode: 'MAD', period: '2017',
direction: Amadeus::Direction::ARRIVING }
expect(@client).to receive(:get).with(
'/v1/travel/analytics/air-traffic/busiest-period', params
)
@api.get(params)
end
it 'should default to an empty hash' do
expect(@client).to receive(:get).with(
'/v1/travel/analytics/air-traffic/busiest-period', {}
)
@api.get
end
end
end
| 23.5 | 65 | 0.628842 |
380c470e7eab49b89ed4d1a32a98c49ace963ae2 | 205 | class ShortLinksController < ApplicationController
def post
# TODO: add logging for analytics
redirect_to post_url(params[:id], host: 'www.rubygamedev.com'), status: :moved_permanently
end
end
| 29.285714 | 94 | 0.760976 |
21458308f07ea1762038fef251cc6c87da081e42 | 42 | # typed: true
def fun(a); end;
fun (1) {}
| 10.5 | 16 | 0.547619 |
7a6f31d5cb7785536b7936693be347304e2d6ee9 | 249 | module BeachApiCore::UserInteractor
class Email
include Interactor
def call
return if context.user.confirmed?
BeachApiCore::UserMailer.register_confirm(context.application, context.user, false).deliver_later
end
end
end
| 22.636364 | 103 | 0.75502 |
79ef10e2c644c89e494c3a5fda4fcbbe6b755c48 | 246 | # typed: strict
# frozen_string_literal: true
class Application < Dry::System::Container
configure do |config|
config.name = :getpocket
config.auto_register = %w(lib)
end
load_paths!('lib')
register('link_opener', Launchy)
end
| 17.571429 | 42 | 0.707317 |
87e29b06e622c40ad03dbb427f11b7b1e9819b29 | 125 | module Dmr
class ApplicationMailer < ActionMailer::Base
default from: '[email protected]'
layout 'mailer'
end
end
| 17.857143 | 46 | 0.72 |
38dd0e38af353ae8bd2378c3b4fbc93bb8436344 | 7,444 | #
# Author:: Adam Jacob (<[email protected]>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/log'
require 'chef/exceptions'
require 'chef/mixin/params_validate'
require 'chef/version_constraint/platform'
require 'chef/provider'
class Chef
class Platform
class << self
attr_writer :platforms
def platforms
@platforms ||= { default: {} }
end
include Chef::Mixin::ParamsValidate
def find(name, version)
provider_map = platforms[:default].clone
name_sym = name
if name.kind_of?(String)
name = name.downcase
name.gsub!(/\s/, "_")
name_sym = name.to_sym
end
if platforms.has_key?(name_sym)
platform_versions = platforms[name_sym].select {|k, v| k != :default }
if platforms[name_sym].has_key?(:default)
provider_map.merge!(platforms[name_sym][:default])
end
platform_versions.each do |platform_version, provider|
begin
version_constraint = Chef::VersionConstraint::Platform.new(platform_version)
if version_constraint.include?(version)
Chef::Log.debug("Platform #{name.to_s} version #{version} found")
provider_map.merge!(provider)
end
rescue Chef::Exceptions::InvalidPlatformVersion
Chef::Log.debug("Chef::Version::Comparable does not know how to parse the platform version: #{version}")
end
end
end
provider_map
end
def find_platform_and_version(node)
platform = nil
version = nil
if node[:platform]
platform = node[:platform]
elsif node.attribute?("os")
platform = node[:os]
end
raise ArgumentError, "Cannot find a platform for #{node}" unless platform
if node[:platform_version]
version = node[:platform_version]
elsif node[:os_version]
version = node[:os_version]
elsif node[:os_release]
version = node[:os_release]
end
raise ArgumentError, "Cannot find a version for #{node}" unless version
return platform, version
end
def provider_for_resource(resource, action=:nothing)
node = resource.run_context && resource.run_context.node
raise ArgumentError, "Cannot find the provider for a resource with no run context set" unless node
provider = find_provider_for_node(node, resource).new(resource, resource.run_context)
provider.action = action
provider
end
def provider_for_node(node, resource_type)
raise NotImplementedError, "#{self.class.name} no longer supports #provider_for_node"
end
def find_provider_for_node(node, resource_type)
platform, version = find_platform_and_version(node)
find_provider(platform, version, resource_type)
end
def set(args)
validate(
args,
{
:platform => {
:kind_of => Symbol,
:required => false,
},
:version => {
:kind_of => String,
:required => false,
},
:resource => {
:kind_of => Symbol,
},
:provider => {
:kind_of => [ String, Symbol, Class ],
}
}
)
if args.has_key?(:platform)
if args.has_key?(:version)
if platforms.has_key?(args[:platform])
if platforms[args[:platform]].has_key?(args[:version])
platforms[args[:platform]][args[:version]][args[:resource].to_sym] = args[:provider]
else
platforms[args[:platform]][args[:version]] = {
args[:resource].to_sym => args[:provider]
}
end
else
platforms[args[:platform]] = {
args[:version] => {
args[:resource].to_sym => args[:provider]
}
}
end
else
if platforms.has_key?(args[:platform])
if platforms[args[:platform]].has_key?(:default)
platforms[args[:platform]][:default][args[:resource].to_sym] = args[:provider]
elsif args[:platform] == :default
platforms[:default][args[:resource].to_sym] = args[:provider]
else
platforms[args[:platform]] = { :default => { args[:resource].to_sym => args[:provider] } }
end
else
platforms[args[:platform]] = {
:default => {
args[:resource].to_sym => args[:provider]
}
}
end
end
else
if platforms.has_key?(:default)
platforms[:default][args[:resource].to_sym] = args[:provider]
else
platforms[:default] = {
args[:resource].to_sym => args[:provider]
}
end
end
end
def find_provider(platform, version, resource_type)
provider_klass = explicit_provider(platform, version, resource_type) ||
platform_provider(platform, version, resource_type) ||
resource_matching_provider(platform, version, resource_type)
raise Chef::Exceptions::ProviderNotFound, "Cannot find a provider for #{resource_type} on #{platform} version #{version}" if provider_klass.nil?
provider_klass
end
private
def explicit_provider(platform, version, resource_type)
resource_type.kind_of?(Chef::Resource) ? resource_type.provider : nil
end
def platform_provider(platform, version, resource_type)
pmap = Chef::Platform.find(platform, version)
rtkey = resource_type.kind_of?(Chef::Resource) ? resource_type.resource_name.to_sym : resource_type
pmap.has_key?(rtkey) ? pmap[rtkey] : nil
end
include Chef::Mixin::ConvertToClassName
def resource_matching_provider(platform, version, resource_type)
if resource_type.kind_of?(Chef::Resource)
class_name = resource_type.class.name ? resource_type.class.name.split('::').last :
convert_to_class_name(resource_type.resource_name.to_s)
begin
result = Chef::Provider.const_get(class_name)
Chef::Log.warn("Class Chef::Provider::#{class_name} does not declare 'resource_name #{convert_to_snake_case(class_name).to_sym.inspect}'.")
Chef::Log.warn("This will no longer work in Chef 13: you must use 'resource_name' to provide DSL.")
rescue NameError
end
end
result
end
end
end
end
| 34.462963 | 153 | 0.586781 |
6a6b672e7370f5f2ba6cec91ee5f2ae7aed83dac | 2,134 | require './config/environment'
class ApplicationController < Sinatra::Base
configure do
set :public_folder, 'public'
set :views, 'app/views'
enable :sessions
set :sessions, true
set :session_secret, ENV["SECRET"] # environment variable
end
get "/" do
@landing_page = "Landing Page"
erb :the_league
end
helpers do
def logged_in?
session[:user_id]
end
def current_user
@user ||= User.find_by(id: session[:user_id])
end
def not_users_stuff?
session[:user_id] != params[:id].to_i
end
def redirect_if_not_logged_in
if !logged_in?
erb :the_league
end
end
def redirect_if_logged_in
if logged_in?
redirect "/users/#{current_user.id}"
end
end
def invalid?
params.has_value?("invalid")
end
def same_name?
Team.find_by_name(params[:name])
end
def valid_team?
params[:name].strip != "" || params[:location].strip != "" || !same_name?
end
def player_id_array
array = [params[:qb], params[:rb], params[:wr], params[:te], params[:k]]
end
def assign_players_to_team(player_array, team)
if player_array.include?("invalid")
redirect "/invalid_team"
else
Player.all.where(team_id: team.id).each{|p| p.update(team_id: nil)}
player_array.each{|id| Player.find_by_id(id).update(team_id: team.id)}
end
end
def adopt_team(params)
team = Team.find_by_id(params[:id])
team.update(user_id: current_user.id, slogan: params[:slogan])
assign_players_to_team(player_id_array, team)
end
def create_team_from_scratch(params)
team = Team.new(
name: params[:name],
location: params[:location],
slogan: params[:slogan],
logo: "/logos/your_logo_here.png",
user_id: current_user.id
)
team.save
assign_players_to_team(player_id_array, team)
team
# redirect "/teams/#{ @team.id }"
# else
# redirect '/errors/invalid_team'
# end
end
end
end
| 21.77551 | 79 | 0.604967 |
d53fc263c573121da47f6bff3474d1d3ba27f540 | 1,091 | class Ired < Formula
desc "Minimalistic hexadecimal editor designed to be used in scripts"
homepage "https://github.com/radare/ired"
url "https://github.com/radare/ired/archive/0.6.tar.gz"
sha256 "c15d37b96b1a25c44435d824bd7ef1f9aea9dc191be14c78b689d3156312d58a"
bottle do
cellar :any_skip_relocation
sha256 "e74475e811c38aa46bf3e7e69e0a264a2d30c08cfcbd801433e03c14944b8366" => :catalina
sha256 "7821d818af4c7d28b4cbf26c627685b77f18a1004369d4a57bee2582620008b7" => :mojave
sha256 "f6af714455a74c02769c9726855a92832e43c37c79a0c589a0c7744beac8956c" => :high_sierra
sha256 "5d10dfac87e4a4ca589a9fa76e8f9aff62625ef6358b6ab29360e79fe4a6dc35" => :sierra
sha256 "4fc558225913b629b144661385841e63ebb167beb9900475fadb0c0e886b4997" => :el_capitan
end
def install
system "make"
system "make", "install", "PREFIX=#{prefix}"
end
test do
input = <<~EOS
w"hello wurld"
s+7
r-4
w"orld"
q
EOS
pipe_output("#{bin}/ired test.text", input)
assert_equal "hello world", (testpath/"test.text").read.chomp
end
end
| 33.060606 | 93 | 0.749771 |
0385651eaa637a4875fd2a34634c33aebe3991c5 | 37,658 | # encoding: utf-8
include ApplicationHelper
include ListingsHelper
include TruncateHtmlHelper
# rubocop:disable ClassLength
class PersonMailer < ActionMailer::Base
include MailUtils
# Enable use of method to_date.
require 'active_support/core_ext'
require "truncate_html"
default :from => APP_CONFIG.sharetribe_mail_from_address
layout 'email'
add_template_helper(EmailTemplateHelper)
# This method will be called by the device_event_notifications rake task about
# every 30 minutes as is expected to deliver all new listing events like bookings
# create, delete, ... to the subscribers of the listing
def self.deliver_device_event_notifications(user_id=nil)
newEvents = ListingEvent.where(:send_to_subscribers => false)
listings_and_events = [] # get events per listing
newEvents.each do |event|
already_there = -1
listings_and_events.each_with_index do |listing_and_events, index|
if event.listing_id == listing_and_events[:listing].id
already_there = index
end
end
if already_there > -1
listings_and_events[already_there][:events] << event
else
listings_and_events << {listing: event.listing, events: [event]}
end
event.update_attribute :send_to_subscribers, true
end
# send one email per user and listing
listings_and_events.each do |listing_and_events|
listing_and_events[:listing].subscribers.each do |subscriber_to_notify|
if user_id == nil
PersonMailer.device_event_notifications(subscriber_to_notify, listing_and_events).deliver
elsif user_id == subscriber_to_notify.id # for test
PersonMailer.device_event_notifications(subscriber_to_notify, listing_and_events).deliver
end
end
end
end
def device_event_notifications(subscriber_to_notify, listing_and_events)
@recipient = Person.find(subscriber_to_notify)
@listing = listing_and_events[:listing]
@events = listing_and_events[:events]
@community = Community.first
with_locale(@recipient.locale, @community.locales.map(&:to_sym), @community.id) do
delivery_method = set_url_params_and_delivery_method("device_event")
@title_link_text = t("emails.community_updates.title_link_text", :community_name => @community.full_name(@recipient.locale))
subject = t("emails.device_event_notifications.subject")
# Send email
premailer_mail(:to => @recipient.confirmed_notification_emails_to,
:from => community_specific_sender(@community),
:subject => subject,
:delivery_method => delivery_method) do |format|
format.html { render :layout => 'email_blank_layout' }
end
end
end
# This task is expected to be run on daily scheduling
# It iterates through all overdue and currently active listings to send out
# user notifications. The overdue bookings are also stored into the overdue
# bookings table in the db.
# Optional the user_id can be given for test purposes
def self.deliver_device_return_notifications(user_id=nil)
# Go through all the overdue bookings to store only the latest overdue booking
# of one user for one specific listing into the users_to_notify_of_overdue array.
latestOverdueBookingsWithUsers = getLatestOverdueBookingsWithUsers(user_id)
# Store the latest overdue bookings into db
# wah: todo ...
# Get all currently active bookings
if user_id.nil?
activeBookings = Booking.getActiveBookings
else
activeBookings = Booking.getActiveBookingsOfUser(user_id)
end
# Get users to notify because of an overdue
users_to_notify_of_overdue = getUsersToNotifyOfOverdue(latestOverdueBookingsWithUsers, activeBookings)
# Get users for same day pre-notification
users_to_pre_notify_0day = getUsersToPreNotify(Date.today, activeBookings)
# Get users for 2 days pre-notification
users_to_pre_notify_2days = getUsersToPreNotify(Date.today + 2, activeBookings)
# Send emails to users with overdue bookings
users_to_notify_of_overdue.each do |user_to_notify|
PersonMailer.device_return_notifications(user_to_notify).deliver
end
# Send emails to users where the booking lasts longer than one day and they
# have to give back the device today
users_to_pre_notify_0day.each do |user_to_pre_notify_0day|
PersonMailer.pre_device_return_notification(user_to_pre_notify_0day).deliver
end
# Send emails to users where the booking lasts longer than 5 days and they
# have to give back the device the day after tomorrow
users_to_pre_notify_2days.each do |user_to_pre_notify_2days|
PersonMailer.pre_device_return_notification(user_to_pre_notify_2days).deliver
end
end
# Get all users and the devices to notify of an overdue
def self.getUsersToNotifyOfOverdue(latestOverdueBookingsWithUsers, activeBookings)
# Go through all active bookings
activeBookings.each do |activeBooking|
current_user = activeBooking.tx.starter
# Remove all listings of each user from the latestOverdueBookingsWithUsers
# array, where there is an active booking. This is
# because users who have active Bookings will get an extra notification
# for this listing. They do not have to give back the device yet.
latestOverdueBookingsWithUsers.each do |user_to_notify|
if user_to_notify[:user].id == activeBooking.tx.starter_id
user_to_notify[:listings].each do |listing|
if listing[:listing].id == activeBooking.tx.listing_id
user_to_notify[:listings].delete(listing)
if user_to_notify[:listings] == []
latestOverdueBookingsWithUsers.delete(user_to_notify)
end
end
end
end
end
end
latestOverdueBookingsWithUsers
end
# Get all users and their devices to pre notify for returning the devices
def self.getUsersToPreNotify(return_date, activeBookings)
users_to_pre_notify = []
# Go through all active bookings
activeBookings.each do |activeBooking|
current_user = activeBooking.tx.starter
user_already_there = false
if activeBooking.end_on == return_date
users_to_pre_notify.each do |pre_notify_user|
# If user already exists, just add the listing to the listing-array
if pre_notify_user[:user].id == current_user.id
pre_notify_user[:listings] << {
listing: activeBooking.tx.listing,
transaction_id: activeBooking.transaction_id,
return_on: activeBooking.end_on,
return_token: activeBooking.device_return_token
}
user_already_there = true
break
end
end
# Create new user-booking-pre-notification entry
unless user_already_there
new_listing = {
listing: activeBooking.tx.listing,
transaction_id: activeBooking.transaction_id,
return_on: activeBooking.end_on,
return_token: activeBooking.device_return_token
}
users_to_pre_notify << {
user: current_user,
listings: [new_listing]
}
end
end
end
users_to_pre_notify
end
# Go through all the overdue bookings to store only the latest overdue booking
# of one user for one specific listing into the users_to_notify_of_overdue array.
def self.getLatestOverdueBookingsWithUsers(user_id=nil)
if user_id.nil?
overdueBookings = Booking.getOverdueBookings
else
overdueBookings = Booking.getOverdueBookingsOfUser(user_id)
end
users_to_notify_of_overdue = []
overdueBookings.each do |booking|
current_user = booking.tx.starter
user_already_there = false
# Iterate through all existing users_to_notify_of_overdue
users_to_notify_of_overdue.each do |user_to_notify|
# If the user is already whithin the array
if current_user.id == user_to_notify[:user].id
# Go through all the already attached listings (with an overdue booking)
# If the listing to the current booking already exists, then just adapt
# the listings properties. Otherwise attach the listing to the array
listing_already_there = false
user_to_notify[:listings].each do |listing|
# If the listing of this current booking already exists within the array
if listing[:listing].id == booking.tx.listing_id
# If the current booking ends later, then take over it's attributes
if listing[:return_on] < booking.end_on
listing[:transaction_id] = booking.transaction_id
listing[:return_on] = booking.end_on
listing[:return_token] = booking.device_return_token
end
listing_already_there = true
break
end
end
# If the listing was not within the array yet
unless listing_already_there
user_to_notify[:listings] << {
listing: booking.tx.listing,
transaction_id: booking.transaction_id,
return_on: booking.end_on,
return_token: booking.device_return_token
}
end
user_already_there = true
break
end
end
# If the user was not in the array yet, then create a new user with a new
# listing
unless user_already_there
new_listing = {
listing: booking.tx.listing,
transaction_id: booking.transaction_id,
return_on: booking.end_on,
return_token: booking.device_return_token
}
users_to_notify_of_overdue << {
user: current_user,
listings: [new_listing]
}
end
end
users_to_notify_of_overdue
end
def calibration_request(calibration_request, community)
@calibration_request = calibration_request
receiver = community.admin_emails
mail_locale = "de"
set_up_urls(nil, community)
@url_params[:locale] = mail_locale
with_locale(mail_locale, community.locales.map(&:to_sym), community.id) do
subject = t("emails.calibration_request.new_request")
premailer_mail(:to => receiver,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => subject,
:reply_to => @calibration_request.email_address)
end
end
def device_return_notifications(user_to_notify)
@recipient = user_to_notify[:user]
@listings = user_to_notify[:listings]
@community = Community.first
with_locale(@recipient.locale, @community.locales.map(&:to_sym), @community.id) do
# Set url params for all the links in the emails
delivery_method = set_url_params_and_delivery_method("device_return")
@title_link_text = t("emails.community_updates.title_link_text", :community_name => @community.full_name(@recipient.locale))
subject = t("emails.device_return_notifications.subject")
# Send email
premailer_mail(:to => @recipient.confirmed_notification_emails_to,
:from => community_specific_sender(@community),
:subject => subject,
:delivery_method => delivery_method) do |format|
format.html { render :layout => 'email_blank_layout' }
end
end
end
def pre_device_return_notification(user_to_pre_notify)
@recipient = user_to_pre_notify[:user]
@listings = user_to_pre_notify[:listings]
@community = Community.first
with_locale(@recipient.locale, @community.locales.map(&:to_sym), @community.id) do
delivery_method = set_url_params_and_delivery_method("device_return_pre_notification")
@title_link_text = t("emails.community_updates.title_link_text", :community_name => @community.full_name(@recipient.locale))
subject = t("emails.pre_device_return_notification.subject")
# Send email
premailer_mail(:to => @recipient.confirmed_notification_emails_to,
:from => community_specific_sender(@community),
:subject => subject,
:delivery_method => delivery_method) do |format|
format.html { render :layout => 'email_blank_layout' }
end
end
end
def conversation_status_changed(transaction, community)
@email_type = (transaction.status == "accepted" ? "email_when_conversation_accepted" : "email_when_conversation_rejected")
recipient = transaction.other_party(transaction.listing.author)
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@transaction = transaction
if @transaction.payment_gateway == "braintree" || @transaction.payment_process == "postpay"
# Payment url concerns only braintree and postpay, otherwise we show only the message thread
@payment_url = community.payment_gateway.new_payment_url(@recipient, @transaction, @recipient.locale, @url_params)
end
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.conversation_status_changed.your_request_was_#{transaction.status}"))
end
end
def new_message_notification(message, community)
@email_type = "email_about_new_messages"
recipient = message.conversation.other_party(message.sender)
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@message = message
sending_params = {:to => recipient.confirmed_notification_emails_to,
:subject => t("emails.new_message.you_have_a_new_message", :sender_name => message.sender.name(community)),
:from => community_specific_sender_with_alias(community, "Rentog Services")}
premailer_mail(sending_params)
end
end
def new_payment(payment, community)
@email_type = "email_about_new_payments"
@payment = payment
recipient = @payment.recipient
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.new_payment.new_payment"))
end
end
def receipt_to_payer(payment, community)
@email_type = "email_about_new_payments"
@payment = payment
recipient = @payment.payer
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.receipt_to_payer.receipt_of_payment"))
end
end
def transaction_confirmed(conversation, community)
@email_type = "email_about_completed_transactions"
@conversation = conversation
recipient = conversation.seller
set_up_urls(conversation.seller, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.transaction_confirmed.request_marked_as_#{@conversation.status}"))
end
end
def transaction_automatically_confirmed(conversation, community)
@email_type = "email_about_completed_transactions"
@conversation = conversation
recipient = conversation.buyer
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:template_path => 'person_mailer/automatic_confirmation',
:subject => t("emails.transaction_automatically_confirmed.subject"))
end
end
def booking_transaction_automatically_confirmed(transaction, community)
@email_type = "email_about_completed_transactions"
@transaction = transaction
recipient = @transaction.buyer
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
mail(:to => @recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:template_path => 'person_mailer/automatic_confirmation',
:subject => t("emails.booking_transaction_automatically_confirmed.subject"))
end
end
def escrow_canceled_to(conversation, community, to)
@email_type = "email_about_canceled_escrow"
@conversation = conversation
recipient = conversation.seller
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
premailer_mail(:to => to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.escrow_canceled.subject")) do |format|
format.html { render "escrow_canceled" }
end
end
end
def escrow_canceled(conversation, community)
escrow_canceled_to(conversation, community, conversation.seller.confirmed_notification_emails_to)
end
def admin_escrow_canceled(conversation, community)
escrow_canceled_to(conversation, community, community.admin_emails.join(","))
end
def new_testimonial(testimonial, community)
@email_type = "email_about_new_received_testimonials"
recipient = testimonial.receiver
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@testimonial = testimonial
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.new_testimonial.has_given_you_feedback_in_kassi", :name => testimonial.author.name(community)))
end
end
# Remind users of conversations that have not been accepted or rejected
# NOTE: the not_really_a_recipient is at the same spot in params
# to keep the call structure similar for reminder mails
# but the actual recipient is always the listing author.
def accept_reminder(conversation, not_really_a_recipient, community)
@email_type = "email_about_accept_reminders"
recipient = conversation.listing.author
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@conversation = conversation
premailer_mail(:to => @recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.accept_reminder.remember_to_accept_request", :sender_name => conversation.other_party(recipient).name(community)))
end
end
# Remind users to pay
def payment_reminder(conversation, recipient, community)
@email_type = "email_about_payment_reminders"
recipient = conversation.payment.payer
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@conversation = conversation
@pay_url = payment_url(conversation, recipient, @url_params)
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.payment_reminder.remember_to_pay", :listing_title => conversation.listing.title))
end
end
# Remind user to fill in payment details
def payment_settings_reminder(listing, recipient, community)
set_up_urls(recipient, community)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@listing = listing
@recipient = recipient
if community.payments_in_use?
@payment_settings_link = payment_settings_url(MarketplaceService::Community::Query.payment_type(community.id), recipient, @url_params)
end
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.payment_settings_reminder.remember_to_add_payment_details")) do |format|
format.html {render :locals => {:skip_unsubscribe_footer => true} }
end
end
end
# Braintree account was approved (via Webhook)
def braintree_account_approved(recipient, community)
set_up_urls(recipient, community)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@recipient = recipient
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.braintree_account_approved.account_ready")) do |format|
format.html {render :locals => {:skip_unsubscribe_footer => true} }
end
end
end
# Remind users of conversations that have not been accepted or rejected
def confirm_reminder(conversation, recipient, community, days_to_cancel)
@email_type = "email_about_confirm_reminders"
recipient = conversation.buyer
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@conversation = conversation
@days_to_cancel = days_to_cancel
escrow = community.payment_gateway && community.payment_gateway.hold_in_escrow
template = escrow ? "confirm_reminder_escrow" : "confirm_reminder"
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.confirm_reminder.remember_to_confirm_request")) do |format|
format.html { render template }
end
end
end
# Remind users to give feedback
def testimonial_reminder(conversation, recipient, community)
@email_type = "email_about_testimonial_reminders"
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@conversation = conversation
@other_party = @conversation.other_party(recipient)
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.testimonial_reminder.remember_to_give_feedback_to", :name => @other_party.name(community)))
end
end
def new_comment_to_own_listing_notification(comment, community)
@email_type = "email_about_new_comments_to_own_listing"
recipient = comment.listing.author
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@comment = comment
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.new_comment.you_have_a_new_comment", :author => comment.author.name(community)))
end
end
def new_comment_to_followed_listing_notification(comment, recipient, community)
set_up_urls(recipient, community)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@comment = comment
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.new_comment.listing_you_follow_has_a_new_comment", :author => comment.author.name(community)))
end
end
def new_update_to_followed_listing_notification(listing, recipient, community)
set_up_urls(recipient, community)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@listing = listing
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.new_update_to_listing.listing_you_follow_has_been_updated"))
end
end
def new_listing_by_followed_person(listing, recipient, community)
set_up_urls(recipient, community)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@listing = listing
@no_recipient_name = true
@author_name = listing.author.name(community)
@listing_url = listing_url(@url_params.merge({:id => listing.id}))
@translate_scope = [ :emails, :new_listing_by_followed_person ]
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => t("emails.new_listing_by_followed_person.subject",
:author_name => @author_name,
:community => community.full_name_with_separator(recipient.locale)))
end
end
def invitation_to_kassi(invitation)
@invitation = invitation
mail_locale = @invitation.inviter.locale
@invitation_code_required = invitation.community.join_with_invite_only
set_up_urls(nil, invitation.community)
@url_params[:locale] = mail_locale
from = Mail::Address.new community_specific_sender(invitation.community)
from.display_name = "Rentog Team"
with_locale(mail_locale, invitation.community.locales.map(&:to_sym), invitation.community.id) do
subject = t("emails.invitation_to_kassi.you_have_been_invited_to_kassi", :inviter => invitation.inviter.name(invitation.community), :community => invitation.community.full_name_with_separator(invitation.inviter.locale))
premailer_mail(:to => invitation.email,
:from => from.format,
:subject => subject,
:reply_to => invitation.inviter.confirmed_notification_email_to)
end
end
def listing_request_to_seller(lr, community)
@listing_request = lr
if @listing_request.listing_id != 0
receiver = @listing_request.author.confirmed_notification_emails_to
mail_locale = @listing_request.author.locale
else
receiver = community.admin_emails
mail_locale = "de"
end
set_up_urls(nil, community)
@url_params[:locale] = mail_locale
with_locale(mail_locale, community.locales.map(&:to_sym), community.id) do
subject = t("emails.listing_request.new_listing_request")
premailer_mail(:to => receiver,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => subject,
:reply_to => @listing_request.email)
end
end
def listing_request_to_customer(lr, community)
@listing_request = lr
mail_locale = @listing_request.locale
set_up_urls(nil, community)
@url_params[:locale] = mail_locale
with_locale(mail_locale, community.locales.map(&:to_sym), community.id) do
subject = t("emails.listing_request.new_listing_request_confirmation")
premailer_mail(:to => @listing_request.email,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => subject,
:reply_to => @listing_request.email)
end
end
def price_comparison_request_to_admin(pce, community)
@price_comparison_event = pce
receiver = community.admin_emails
mail_locale = "de"
set_up_urls(nil, community)
@url_params[:locale] = mail_locale
with_locale(mail_locale, community.locales.map(&:to_sym), community.id) do
subject = t("emails.price_comparison_request.new_request")
premailer_mail(:to => receiver,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => subject,
:reply_to => @price_comparison_event.email)
end
end
def price_comparison_request_to_user(pce, community)
@price_comparison_event = pce
receiver = @price_comparison_event.email
mail_locale = "en"
set_up_urls(nil, community)
@url_params[:locale] = mail_locale
with_locale(mail_locale, community.locales.map(&:to_sym), community.id) do
subject = t("emails.price_comparison_request.new_request")
premailer_mail(:to => receiver,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => subject,
:reply_to => @price_comparison_event.email)
end
end
# A message from the community admin to a single community member
def community_member_email(sender, recipient, email_subject, email_content, community)
@email_type = "email_from_admins"
set_up_urls(recipient, community, @email_type)
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@email_content = email_content
@no_recipient_name = true
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => email_subject,
:reply_to => "\"#{sender.name(community)}\"<#{sender.confirmed_notification_email_to}>")
end
end
# Used to send notification to marketplace admins when somebody
# gives feedback on marketplace throught the contact us button in menu
def new_feedback(feedback, community)
subject = t("feedback.feedback_subject", service_name: community.name(I18n.locale))
premailer_mail(
:to => mail_feedback_to(community, APP_CONFIG.feedback_mailer_recipients),
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => subject,
:reply_to => feedback.email) do |format|
format.html {
render locals: {
author_name_and_email: feedback_author_name_and_email(feedback.author, feedback.email, community),
community_name: community.name(I18n.locale),
content: feedback.content
}
}
end
end
def new_test_email(community)
subject = "Scheduler Test 1"
premailer_mail(
:to => "[email protected]",
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => subject,
:reply_to => community_specific_sender_with_alias(community, "Rentog Services"))
end
def mail_feedback_to(community, platform_admin_email)
if community.feedback_to_admin? && community.admin_emails.any?
community.admin_emails.join(",")
else
platform_admin_email
end
end
# Old layout
def new_member_notification(person, community, email)
@community = community
@no_settings = true
@person = person
@email = email
premailer_mail(:to => @community.admin_emails,
:from => community_specific_sender(@community),
:subject => "New member in #{@community.full_name(@person.locale)}")
end
def new_employee_notification(employee, company, community, email)
@community = community
@no_settings = true
@employee = employee
@email = email
premailer_mail(:to => [company.emails.first.address],
:from => community_specific_sender(@community),
:subject => "New employee in #{@community.full_name(@employee.locale)}")
end
def new_trustedRelationship_notification(truster, other_company, community, email)
@community = community
@no_settings = true
@truster = truster
@email = email
premailer_mail(:to => [other_company.emails.first.address],
:from => community_specific_sender(@community),
:subject => "Company #{@truster} now trusts you")
end
def email_confirmation(email, community)
@current_community = community
@no_settings = true
@resource = email.person
@confirmation_token = email.confirmation_token
@host = community.full_domain
from = Mail::Address.new community_specific_sender_with_alias(community, "Rentog Services")
from.display_name = "Rentog Team"
with_locale(email.person.locale, community.locales.map(&:to_sym) ,community.id) do
email.update_attribute(:confirmation_sent_at, Time.now)
premailer_mail(:to => email.address,
:from => from.format,
:subject => t("devise.mailer.confirmation_instructions.subject"),
:template_path => 'devise/mailer',
:template_name => 'confirmation_instructions')
end
end
def reset_password_instructions(person, email_address, reset_token, community)
set_up_urls(nil, community) # Using nil as recipient, as we don't want auth token here.
@person = person
@no_settings = true
premailer_mail(
to: email_address,
from: community_specific_sender(@community),
subject: t("devise.mailer.reset_password_instructions.subject")) do |format|
format.html { render layout: false, locals: { reset_token: reset_token } }
end
end
def welcome_email(person, community, regular_email=nil, test_email=false)
@recipient = person
recipient = person
with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do
@community = @current_community = community
@regular_email = regular_email
set_url_params_and_delivery_method("welcome_email")
@test_email = test_email
if @recipient.has_admin_rights_in?(community) && !@test_email
subject = t("emails.welcome_email.welcome_email_subject_for_marketplace_creator")
else
subject = t("emails.welcome_email.welcome_email_subject", :community => community.full_name(recipient.locale), :person => person.given_name_or_username)
end
premailer_mail(:to => recipient.confirmed_notification_emails_to,
:from => community_specific_sender_with_alias(community, "Rentog Services"),
:subject => subject) do |format|
format.html { render :layout => 'email_blank_layout' }
end
end
end
# A message from the community admin to a community member
def self.community_member_email_from_admin(sender, recipient, community, email_subject, email_content, email_locale)
if recipient.should_receive?("email_from_admins") && (email_locale.eql?("any") || recipient.locale.eql?(email_locale))
begin
MailCarrier.deliver_now(community_member_email(sender, recipient, email_subject, email_content, community))
rescue => e
# Catch the exception and continue sending the emails
ApplicationHelper.send_error_notification("Error sending email to all the members of community #{community.full_name(email_locale)}: #{e.message}", e.class)
end
end
end
def premailer_mail(opts, &block)
premailer(mail(opts, &block))
end
# This is an ugly method. Ideas how to improve are very welcome.
# Depending on a class name prevents refactoring.
def payment_url(conversation, recipient, url_params)
if conversation.payment.is_a? BraintreePayment
edit_person_message_braintree_payment_url(url_params.merge({:id => conversation.payment.id, :person_id => recipient.id.to_s, :message_id => conversation.id}))
else
new_person_message_payment_url(recipient, url_params.merge({:message_id => conversation.id}))
end
end
private
def feedback_author_name_and_email(author, email, community)
present = ->(x) {x.present?}
case [author, email]
when matches([present, present])
"#{PersonViewUtils.person_display_name(author, community)} (#{email})"
when matches([nil, present])
"#{t("feedback.unlogged_user")} (#{email})"
else
"#{t("feedback.anonymous_user")}"
end
end
def set_url_params_and_delivery_method(url_ref)
# Set url params for all the links in the emails
@url_params = {}
@url_params[:host] = "#{@community.full_domain}"
@url_params[:locale] = @recipient.locale
@url_params[:ref] = url_ref
@url_params.freeze # to avoid accidental modifications later
# mail delivery method
if APP_CONFIG.mail_delivery_method == "postmark"
# Postmark doesn't support bulk emails, so use Sendmail for this
delivery_method = :sendmail
else
delivery_method = APP_CONFIG.mail_delivery_method.to_sym unless Rails.env.test?
end
end
end
| 42.075978 | 225 | 0.700011 |
ab26c802cb473d0f8e87e9ce678fb1b8cdd52802 | 440 | # frozen_string_literal: true
# typed: true
# compiled: true
require_relative './block_return_unsafe__2'
extend T::Sig
sig { returns(Integer) }
def f
puts (yield_from_2 { return T.unsafe("whoops, this is not an integer") })
42
end
begin
puts f
rescue TypeError
puts "got a TypeError as expected from the block"
end
begin
puts some_lambda.call
rescue TypeError
puts "whoops, got an unexpected TypeError from the lambda"
end
| 16.923077 | 75 | 0.747727 |
e828dca681f6c5ecbfe9487a83fe78e43e4a6fef | 1,950 | require 'linguist/blob_helper'
require 'linguist/language'
require 'rugged'
module Linguist
class LazyBlob
GIT_ATTR = ['linguist-documentation',
'linguist-language',
'linguist-vendored',
'linguist-generated']
GIT_ATTR_OPTS = { :priority => [:index], :skip_system => true }
GIT_ATTR_FLAGS = Rugged::Repository::Attributes.parse_opts(GIT_ATTR_OPTS)
include BlobHelper
MAX_SIZE = 128 * 1024
attr_reader :repository
attr_reader :oid
attr_reader :path
attr_reader :mode
alias :name :path
def initialize(repo, oid, path, mode = nil)
@repository = repo
@oid = oid
@path = path
@mode = mode
end
def git_attributes
@git_attributes ||= repository.fetch_attributes(
name, GIT_ATTR, GIT_ATTR_FLAGS)
end
def documentation?
if attr = git_attributes['linguist-documentation']
boolean_attribute(attr)
else
super
end
end
def generated?
if attr = git_attributes['linguist-generated']
boolean_attribute(attr)
else
super
end
end
def vendored?
if attr = git_attributes['linguist-vendored']
return boolean_attribute(attr)
else
super
end
end
def language
return @language if defined?(@language)
@language = if lang = git_attributes['linguist-language']
Language.find_by_alias(lang)
else
super
end
end
def data
load_blob!
@data
end
def size
load_blob!
@size
end
def cleanup!
@data.clear if @data
end
protected
# Returns true if the attribute is present and not the string "false".
def boolean_attribute(attribute)
attribute != "false"
end
def load_blob!
@data, @size = Rugged::Blob.to_buffer(repository, oid, MAX_SIZE) if @data.nil?
end
end
end
| 19.897959 | 84 | 0.610769 |
1d1d89c8c071025e758627741025a4484ae1259c | 202 | require "spec_helper"
RSpec.describe Mnrbcop do
it "has a version number" do
expect(Mnrbcop::VERSION).not_to be nil
end
it "does something useful" do
expect(false).to eq(true)
end
end
| 16.833333 | 42 | 0.707921 |
03fc1210dd0ce76c02a20039a4de5409bf2a494a | 924 | module Spree
module Admin
class SaleImagesController < ResourceController
belongs_to 'spree/active_sale_event', find_by: :id
before_action :load_data
create.before :set_viewable
update.before :set_viewable
destroy.before :destroy_before
private
def location_after_save
admin_active_sale_active_sale_event_sale_images_url(@active_sale_event.active_sale, @active_sale_event)
end
def load_data
@active_sale ||= Spree::ActiveSale.find_by_permalink!(params[:active_sale_id])
@active_sale_event ||= Spree::ActiveSaleEvent.find(params[:active_sale_event_id])
end
def set_viewable
@sale_image.viewable_type = 'Spree::ActiveSaleEvent'
@sale_image.viewable_id = @active_sale_event.id
end
def destroy_before
@viewable = @sale_image.viewable
end
end
end
end
| 28 | 113 | 0.683983 |
7acde2279989b26c2ce8d2d97179d6df81e75edd | 1,712 | # -*- encoding: binary -*-
#
# Each Raindrops object is a container that holds several counters.
# It is internally a page-aligned, shared memory area that allows
# atomic increments, decrements, assignments and reads without any
# locking.
#
# rd = Raindrops.new 4
# rd.incr(0, 1) -> 1
# rd.to_ary -> [ 1, 0, 0, 0 ]
#
# Unlike many classes in this package, the core Raindrops class is
# intended to be portable to all reasonably modern *nix systems
# supporting mmap(). Please let us know if you have portability
# issues, patches or pull requests at mailto:[email protected]
class Raindrops
# Used to represent the number of +active+ and +queued+ sockets for
# a single listen socket across all threads and processes on a
# machine.
#
# For TCP listeners, only sockets in the TCP_ESTABLISHED state are
# accounted for. For Unix domain listeners, only CONNECTING and
# CONNECTED Unix domain sockets are accounted for.
#
# +active+ connections is the number of accept()-ed but not-yet-closed
# sockets in all threads/processes sharing the given listener.
#
# +queued+ connections is the number of un-accept()-ed sockets in the
# queue of a given listen socket.
#
# These stats are currently only available under \Linux
class ListenStats < Struct.new(:active, :queued)
# the sum of +active+ and +queued+ sockets
def total
active + queued
end
end
autoload :Linux, 'raindrops/linux'
autoload :Struct, 'raindrops/struct'
autoload :Middleware, 'raindrops/middleware'
autoload :Aggregate, 'raindrops/aggregate'
autoload :LastDataRecv, 'raindrops/last_data_recv'
autoload :Watcher, 'raindrops/watcher'
end
require 'raindrops_ext'
| 34.938776 | 72 | 0.724299 |
bfa2837d425fba77c784f0304c626b52126fcd64 | 2,884 | # frozen_string_literal: true
require 'aws_backend'
class AWSRAMResourceShares < AwsResourceBase
name 'aws_ram_resource_shares'
desc 'Lists the resources that you added to a resource shares or the resources that are shared with you.'
example "
describe aws_ram_resource_shares(resource_owner: 'SELF') do
it { should exist }
end
"
attr_reader :table
FilterTable.create
.register_column(:resource_share_arns, field: :resource_share_arn)
.register_column(:names, field: :name)
.register_column(:owning_account_ids, field: :owning_account_id)
.register_column(:allow_external_principals, field: :allow_external_principals)
.register_column(:statuses, field: :status)
.register_column(:status_messages, field: :status_message)
.register_column(:tags, field: :tags)
.register_column(:creation_times, field: :creation_time)
.register_column(:last_updated_times, field: :last_updated_time)
.register_column(:feature_sets, field: :feature_set)
.install_filter_methods_on_resource(self, :table)
def initialize(opts = {})
super(opts)
validate_parameters(required: %i(resource_owner))
@query_params = {}
@query_params[:resource_owner] = opts[:resource_owner]
if opts.key?(:resource_owner)
raise ArgumentError, "#{@__resource_name__}: resource_owner must be provided" unless opts[:resource_owner] && !opts[:resource_owner].empty?
@query_params[:resource_owner] = opts[:resource_owner]
end
@table = fetch_data
end
def fetch_data
rows = []
@query_params[:max_results] = 10
loop do
catch_aws_errors do
@api_response = @aws.ram_client.get_resource_shares(@query_params)
end
return rows if !@api_response || @api_response.empty?
@api_response.resource_shares.each do |res|
rows += [{ resource_share_arn: res.resource_share_arn,
name: res.name,
owning_account_id: res.owning_account_id,
allow_external_principals: res.allow_external_principals,
status: res.status,
status_message: res.status_message,
tags: res.tags,
creation_time: res.creation_time,
last_updated_time: res.last_updated_time,
feature_set: res.feature_set }]
end
break unless @api_response.next_token
@query_params[:next_token] = @api_response.next_token
end
rows
end
end
| 42.411765 | 145 | 0.596394 |
26c1247dc16b9ea8c05fc7063dfaa6aca3fcf289 | 5,217 | class PostgresqlAT95 < Formula
desc "Object-relational database system"
homepage "https://www.postgresql.org/"
url "https://ftp.postgresql.org/pub/source/v9.5.25/postgresql-9.5.25.tar.bz2"
sha256 "7628c55eb23768a2c799c018988d8f2ab48ee3d80f5e11259938f7a935f0d603"
license "PostgreSQL"
bottle do
sha256 big_sur: "3be3ba84a56c9977313e3cc9e5473230fa2805f69d9dd4dceb18c1fc26dd824c"
sha256 catalina: "07d235dbc8b2d5e9fb31194f163aa2903513738264a95cdb6cff31f7eb9e9ca4"
sha256 mojave: "56c45cc5b2bb7a86447ad7a79c94e3e5eed6283c2465ca2c0075632e1ba2097c"
sha256 x86_64_linux: "8e6d26c5991a8e76b3570fe93f694f680f0feda983b1c28230b8424e127a8a0e"
end
keg_only :versioned_formula
# https://www.postgresql.org/support/versioning/
deprecate! date: "2021-02-11", because: :unsupported
depends_on arch: :x86_64
depends_on "[email protected]"
depends_on "readline"
uses_from_macos "libxslt"
uses_from_macos "perl"
on_linux do
depends_on "util-linux"
end
def install
ENV.prepend "LDFLAGS", "-L#{Formula["[email protected]"].opt_lib} -L#{Formula["readline"].opt_lib}"
ENV.prepend "CPPFLAGS", "-I#{Formula["[email protected]"].opt_include} -I#{Formula["readline"].opt_include}"
# avoid adding the SDK library directory to the linker search path
ENV["XML2_CONFIG"] = "xml2-config --exec-prefix=/usr"
args = %W[
--disable-debug
--prefix=#{prefix}
--datadir=#{pkgshare}
--libdir=#{lib}
--sysconfdir=#{prefix}/etc
--docdir=#{doc}
--enable-thread-safety
--with-libxml
--with-libxslt
--with-openssl
--with-perl
--with-uuid=e2fs
]
if OS.mac?
args += %w[
--with-bonjour
--with-gssapi
--with-ldap
--with-pam
--with-tcl
]
end
# PostgreSQL by default uses xcodebuild internally to determine this,
# which does not work on CLT-only installs.
args << "PG_SYSROOT=#{MacOS.sdk_path}" if MacOS.sdk_root_needed?
system "./configure", *args
system "make"
dirs = %W[datadir=#{pkgshare} libdir=#{lib} pkglibdir=#{lib}]
# Temporarily disable building/installing the documentation.
# Postgresql seems to "know" the build system has been altered and
# tries to regenerate the documentation when using `install-world`.
# This results in the build failing:
# `ERROR: `osx' is missing on your system.`
# Attempting to fix that by adding a dependency on `open-sp` doesn't
# work and the build errors out on generating the documentation, so
# for now let's simply omit it so we can package Postgresql for Mojave.
if DevelopmentTools.clang_build_version >= 1000
system "make", "all"
system "make", "-C", "contrib", "install", "all", *dirs
system "make", "install", "all", *dirs
else
system "make", "install-world", *dirs
end
unless OS.mac?
inreplace lib/"pgxs/src/Makefile.global",
"LD = #{HOMEBREW_PREFIX}/Homebrew/Library/Homebrew/shims/linux/super/ld",
"LD = #{HOMEBREW_PREFIX}/bin/ld"
end
end
def post_install
(var/"log").mkpath
postgresql_datadir.mkpath
# Don't initialize database, it clashes when testing other PostgreSQL versions.
return if ENV["HOMEBREW_GITHUB_ACTIONS"]
system "#{bin}/initdb", postgresql_datadir unless pg_version_exists?
end
def postgresql_datadir
var/name
end
def postgresql_log_path
var/"log/#{name}.log"
end
def pg_version_exists?
(postgresql_datadir/"PG_VERSION").exist?
end
def caveats
<<~EOS
If builds of PostgreSQL 9 are failing and you have version 8.x installed,
you may need to remove the previous version first. See:
https://github.com/Homebrew/legacy-homebrew/issues/2510
This formula has created a default database cluster with:
initdb #{postgresql_datadir}
For more details, read:
https://www.postgresql.org/docs/#{version.major}/app-initdb.html
EOS
end
plist_options manual: "pg_ctl -D #{HOMEBREW_PREFIX}/var/[email protected] start"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/postgres</string>
<string>-D</string>
<string>#{postgresql_datadir}</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardErrorPath</key>
<string>#{postgresql_log_path}</string>
</dict>
</plist>
EOS
end
test do
system "#{bin}/initdb", testpath/"test" unless ENV["HOMEBREW_GITHUB_ACTIONS"]
assert_equal pkgshare.to_s, shell_output("#{bin}/pg_config --sharedir").chomp
assert_equal lib.to_s, shell_output("#{bin}/pg_config --libdir").chomp
assert_equal lib.to_s, shell_output("#{bin}/pg_config --pkglibdir").chomp
end
end
| 31.618182 | 108 | 0.66245 |
287a75780759fcc534174550ddba5bf5b46d61de | 78 | class NameGuess < ApplicationRecord
belongs_to :user
belongs_to :quiz
end
| 15.6 | 35 | 0.794872 |
f7eb305e970e56ecacc02b69c356eab696e3044d | 162 | begin
require 'spec'
rescue LoadError
require 'rubygems'
gem 'rspec'
require 'spec'
end
$:.unshift(File.dirname(__FILE__) + '/../lib')
require 'clouder'
| 14.727273 | 46 | 0.685185 |
1dbdc09b45efe36b38def5947b65d4ce391dd6a4 | 609 | # frozen_string_literal: true
class Lane < ApplicationRecord
include Archivable
include LaneStages
include Sortable.new parent_id: :board_id
nilify_blanks
belongs_to :board, touch: true
has_many :cards, inverse_of: :lane, dependent: :destroy
has_many :ordered_alive_cards, -> { ordered.alive }, inverse_of: :lane, class_name: 'Card'
has_one :account, through: :board
scope :income, -> { todo }
validates :title, presence: true, uniqueness: { scope: :board_id }
def reorder_tasks
tasks.each_with_index do |task, index|
task.update_column :position, index
end
end
end
| 25.375 | 92 | 0.727422 |
870b80dd9d4fa117f06e6fd27f358c7288997b79 | 214 | # frozen_string_literal: true
module Extensions::HasManyInverseThrough::ActiveRecord::Associations::Builder::HasMany
module PrependMethods
def valid_options
[:inverse_through] + super
end
end
end
| 23.777778 | 86 | 0.771028 |
bfaebc66dea87e95d59508fa5b20ed9f3cee8989 | 15,481 | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "google/cloud/errors"
require "google/cloud/pubsub/service"
require "google/cloud/pubsub/credentials"
require "google/cloud/pubsub/topic"
require "google/cloud/pubsub/batch_publisher"
require "google/cloud/pubsub/snapshot"
module Google
module Cloud
module PubSub
##
# # Project
#
# Represents the project that pubsub messages are pushed to and pulled
# from. {Topic} is a named resource to which messages are sent by
# publishers. {Subscription} is a named resource representing the stream
# of messages from a single, specific topic, to be delivered to the
# subscribing application. {Message} is a combination of data and
# attributes that a publisher sends to a topic and is eventually delivered
# to subscribers.
#
# See {Google::Cloud#pubsub}
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# topic.publish "task completed"
#
class Project
##
# @private The Service object.
attr_accessor :service
##
# @private Creates a new Pub/Sub Project instance.
def initialize service
@service = service
end
# The Pub/Sub project connected to.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new(
# project_id: "my-project",
# credentials: "/path/to/keyfile.json"
# )
#
# pubsub.project_id #=> "my-project"
#
def project_id
service.project
end
alias project project_id
##
# Retrieves topic by name.
#
# @param [String] topic_name Name of a topic.
# @param [String] project If the topic belongs to a project other than
# the one currently connected to, the alternate project ID can be
# specified here. Optional.
# @param [Boolean] skip_lookup Optionally create a {Topic} object
# without verifying the topic resource exists on the Pub/Sub service.
# Calls made on this object will raise errors if the topic resource
# does not exist. Default is `false`. Optional.
# @param [Hash] async A hash of values to configure the topic's
# {AsyncPublisher} that is created when {Topic#publish_async}
# is called. Optional.
#
# Hash keys and values may include the following:
#
# * `:max_bytes` (Integer) The maximum size of messages to be collected before the batch is published. Default
# is 1,000,000 (1MB).
# * `:max_messages` (Integer) The maximum number of messages to be collected before the batch is published.
# Default is 100.
# * `:interval` (Numeric) The number of seconds to collect messages before the batch is published. Default is
# 0.01.
# * `:threads` (Hash) The number of threads to create to handle concurrent calls by the publisher:
# * `:publish` (Integer) The number of threads used to publish messages. Default is 2.
# * `:callback` (Integer) The number of threads to handle the published messages' callbacks. Default is 4.
#
# @return [Google::Cloud::PubSub::Topic, nil] Returns `nil` if topic
# does not exist.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "existing-topic"
#
# @example By default `nil` will be returned if topic does not exist.
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "non-existing-topic" # nil
#
# @example Create topic in a different project with the `project` flag.
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "another-topic", project: "another-project"
#
# @example Skip the lookup against the service with `skip_lookup`:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "another-topic", skip_lookup: true
#
# @example Configuring AsyncPublisher to increase concurrent callbacks:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "my-topic",
# async: { threads: { callback: 16 } }
#
# topic.publish_async "task completed" do |result|
# if result.succeeded?
# log_publish_success result.data
# else
# log_publish_failure result.data, result.error
# end
# end
#
# topic.async_publisher.stop.wait!
#
def topic topic_name, project: nil, skip_lookup: nil, async: nil
ensure_service!
options = { project: project }
return Topic.from_name topic_name, service, options if skip_lookup
grpc = service.get_topic topic_name
Topic.from_grpc grpc, service, async: async
rescue Google::Cloud::NotFoundError
nil
end
alias get_topic topic
alias find_topic topic
##
# Creates a new topic.
#
# @param [String] topic_name Name of a topic.
# @param [Hash] labels A hash of user-provided labels associated with
# the topic. You can use these to organize and group your topics.
# Label keys and values can be no longer than 63 characters, can only
# contain lowercase letters, numeric characters, underscores and
# dashes. International characters are allowed. Label values are
# optional. Label keys must start with a letter and each label in the
# list must have a different key. See [Creating and Managing
# Labels](https://cloud.google.com/pubsub/docs/labels).
# @param [String] kms_key The Cloud KMS encryption key that will be used
# to protect access to messages published on this topic. Optional.
# For example: `projects/a/locations/b/keyRings/c/cryptoKeys/d`
# @param [Array<String>] persistence_regions The list of GCP region IDs
# where messages that are published to the topic may be persisted in
# storage. Optional.
# @param [Hash] async A hash of values to configure the topic's
# {AsyncPublisher} that is created when {Topic#publish_async}
# is called. Optional.
#
# Hash keys and values may include the following:
#
# * `:max_bytes` (Integer) The maximum size of messages to be collected before the batch is published. Default
# is 1,000,000 (1MB).
# * `:max_messages` (Integer) The maximum number of messages to be collected before the batch is published.
# Default is 100.
# * `:interval` (Numeric) The number of seconds to collect messages before the batch is published. Default is
# 0.01.
# * `:threads` (Hash) The number of threads to create to handle concurrent calls by the publisher:
# * `:publish` (Integer) The number of threads used to publish messages. Default is 2.
# * `:callback` (Integer) The number of threads to handle the published messages' callbacks. Default is 4.
#
# @return [Google::Cloud::PubSub::Topic]
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.create_topic "my-topic"
#
def create_topic topic_name, labels: nil, kms_key: nil, persistence_regions: nil, async: nil
ensure_service!
grpc = service.create_topic topic_name,
labels: labels,
kms_key_name: kms_key,
persistence_regions: persistence_regions
Topic.from_grpc grpc, service, async: async
end
alias new_topic create_topic
##
# Retrieves a list of topics for the given project.
#
# @param [String] token The `token` value returned by the last call to
# `topics`; indicates that this is a continuation of a call, and that
# the system should return the next page of data.
# @param [Integer] max Maximum number of topics to return.
#
# @return [Array<Google::Cloud::PubSub::Topic>] (See
# {Google::Cloud::PubSub::Topic::List})
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topics = pubsub.topics
# topics.each do |topic|
# puts topic.name
# end
#
# @example Retrieve all topics: (See {Topic::List#all})
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topics = pubsub.topics
# topics.all do |topic|
# puts topic.name
# end
#
def topics token: nil, max: nil
ensure_service!
options = { token: token, max: max }
grpc = service.list_topics options
Topic::List.from_grpc grpc, service, max
end
alias find_topics topics
alias list_topics topics
##
# Retrieves subscription by name.
#
# @param [String] subscription_name Name of a subscription.
# @param [String] project If the subscription belongs to a project other
# than the one currently connected to, the alternate project ID can be
# specified here.
# @param [Boolean] skip_lookup Optionally create a {Subscription} object
# without verifying the subscription resource exists on the Pub/Sub
# service. Calls made on this object will raise errors if the service
# resource does not exist. Default is `false`.
#
# @return [Google::Cloud::PubSub::Subscription, nil] Returns `nil` if
# the subscription does not exist
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# sub = pubsub.subscription "my-sub"
# sub.name #=> "projects/my-project/subscriptions/my-sub"
#
# @example Skip the lookup against the service with `skip_lookup`:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# # No API call is made to retrieve the subscription information.
# sub = pubsub.subscription "my-sub", skip_lookup: true
# sub.name #=> "projects/my-project/subscriptions/my-sub"
#
def subscription subscription_name, project: nil, skip_lookup: nil
ensure_service!
options = { project: project }
return Subscription.from_name subscription_name, service, options if skip_lookup
grpc = service.get_subscription subscription_name
Subscription.from_grpc grpc, service
rescue Google::Cloud::NotFoundError
nil
end
alias get_subscription subscription
alias find_subscription subscription
##
# Retrieves a list of subscriptions for the given project.
#
# @param [String] token A previously-returned page token representing
# part of the larger set of results to view.
# @param [Integer] max Maximum number of subscriptions to return.
#
# @return [Array<Google::Cloud::PubSub::Subscription>] (See
# {Google::Cloud::PubSub::Subscription::List})
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# subs = pubsub.subscriptions
# subs.each do |sub|
# puts sub.name
# end
#
# @example Retrieve all subscriptions: (See {Subscription::List#all})
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# subs = pubsub.subscriptions
# subs.all do |sub|
# puts sub.name
# end
#
def subscriptions token: nil, max: nil
ensure_service!
options = { token: token, max: max }
grpc = service.list_subscriptions options
Subscription::List.from_grpc grpc, service, max
end
alias find_subscriptions subscriptions
alias list_subscriptions subscriptions
##
# Retrieves a list of snapshots for the given project.
#
# @param [String] token A previously-returned page token representing
# part of the larger set of results to view.
# @param [Integer] max Maximum number of snapshots to return.
#
# @return [Array<Google::Cloud::PubSub::Snapshot>] (See
# {Google::Cloud::PubSub::Snapshot::List})
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# snapshots = pubsub.snapshots
# snapshots.each do |snapshot|
# puts snapshot.name
# end
#
# @example Retrieve all snapshots: (See {Snapshot::List#all})
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# snapshots = pubsub.snapshots
# snapshots.all do |snapshot|
# puts snapshot.name
# end
#
def snapshots token: nil, max: nil
ensure_service!
options = { token: token, max: max }
grpc = service.list_snapshots options
Snapshot::List.from_grpc grpc, service, max
end
alias find_snapshots snapshots
alias list_snapshots snapshots
protected
##
# @private Raise an error unless an active connection to the service is
# available.
def ensure_service!
raise "Must have active connection to service" unless service
end
##
# Call the publish API with arrays of data data and attrs.
def publish_batch_messages topic_name, batch
grpc = service.publish topic_name, batch.messages
batch.to_gcloud_messages Array(grpc.message_ids)
end
end
end
Pubsub = PubSub unless const_defined? :Pubsub
end
end
| 39.291878 | 120 | 0.591047 |
e9003ccb5875666780eaad8a5bce447cc0fedcd4 | 7,287 | require "digest/md5"
require "fileutils"
require "thread"
require "log4r"
require "vagrant/util/silence_warnings"
module VagrantPlugins
module DockerProvider
class Provider < Vagrant.plugin("2", :provider)
@@host_vm_mutex = Mutex.new
def initialize(machine)
@logger = Log4r::Logger.new("vagrant::provider::docker")
@machine = machine
if host_vm?
# We need to use a special communicator that proxies our
# SSH requests over our host VM to the container itself.
@machine.config.vm.communicator = :docker_hostvm
end
end
# @see Vagrant::Plugin::V2::Provider#action
def action(name)
action_method = "action_#{name}"
return Action.send(action_method) if Action.respond_to?(action_method)
nil
end
# Returns the driver instance for this provider.
def driver
return @driver if @driver
@driver = Driver.new
# If we are running on a host machine, then we set the executor
# to execute remotely.
if host_vm?
@driver.executor = Executor::Vagrant.new(host_vm)
end
@driver
end
# This returns the {Vagrant::Machine} that is our host machine.
# It does not perform any action on the machine or verify it is
# running.
#
# @return [Vagrant::Machine]
def host_vm
return @host_vm if @host_vm
vf_path = @machine.provider_config.vagrant_vagrantfile
host_machine_name = @machine.provider_config.vagrant_machine || :default
if !vf_path
# We don't have a Vagrantfile path set, so we're going to use
# the default but we need to copy it into the data dir so that
# we don't write into our installation dir (we can't).
default_path = File.expand_path("../hostmachine/Vagrantfile", __FILE__)
vf_path = @machine.env.data_dir.join("docker-host", "Vagrantfile")
begin
@machine.env.lock("docker-provider-hostvm") do
vf_path.dirname.mkpath
FileUtils.cp(default_path, vf_path)
end
rescue Vagrant::Errors::EnvironmentLockedError
# Lock contention, just retry
retry
end
# Set the machine name since we hardcode that for the default
host_machine_name = :default
end
# Expand it so that the home directories and so on get processed
# properly.
vf_path = File.expand_path(vf_path, @machine.env.root_path)
vf_file = File.basename(vf_path)
vf_path = File.dirname(vf_path)
# Create the env to manage this machine
@host_vm = Vagrant::Util::SilenceWarnings.silence! do
host_env = Vagrant::Environment.new(
cwd: vf_path,
home_path: @machine.env.home_path,
ui_class: @machine.env.ui_class,
vagrantfile_name: vf_file,
)
# If there is no root path, then the Vagrantfile wasn't found
# and it is an error...
raise Errors::VagrantfileNotFound if !host_env.root_path
host_env.machine(
host_machine_name,
host_env.default_provider(
exclude: [:docker],
force_default: false,
))
end
@host_vm
end
# This acquires a lock on the host VM.
def host_vm_lock
hash = Digest::MD5.hexdigest(host_vm.data_dir.to_s)
# We do a process-level mutex on the outside, since we can
# wait for that a short amount of time. Then, we do a process lock
# on the inside, which will raise an exception if locked.
host_vm_mutex.synchronize do
@machine.env.lock(hash) do
return yield
end
end
end
# This is a process-local mutex that can be used by parallel
# providers to lock the host VM access.
def host_vm_mutex
@@host_vm_mutex
end
# This says whether or not Docker will be running within a VM
# rather than directly on our system. Docker needs to run in a VM
# when we're not on Linux, or not on a Linux that supports Docker.
def host_vm?
if ENV['VAGRANT_DOCKER_REMOTE_HOST_PATCH'] == "1"
false
else
@machine.provider_config.force_host_vm ||
!Vagrant::Util::Platform.linux?
end
end
# Returns the real forwarded SSH port on the host. If no port forwarding
# for "22/tcp" is found we raise an exception
def forwarded_ssh_host_port
network = driver.inspect_container(@machine.id)['NetworkSettings']
forwarded_ssh_ports = network['Ports']['22/tcp']
if forwarded_ssh_ports.nil? || forwarded_ssh_ports.empty?
raise "ssh port not forwarded from container!"
end
# return the first forwarded host port for 22/tcp we find
forwarded_ssh_ports[0]['HostPort']
end
# Returns the remote docker host by parsing the `DOCKER_HOST` env var
def remote_docker_host
docker_host_uri = ENV.fetch('DOCKER_HOST', 'tcp://192.168.59.103:2376')
docker_host = URI.parse(docker_host_uri).host
docker_host
end
# Returns the SSH info for accessing the Container.
def ssh_info
# If the container isn't running, we can't SSH into it
return nil if state.id != :running
network = driver.inspect_container(@machine.id)['NetworkSettings']
ip = network['IPAddress']
# If we were not able to identify the container's IP, we return nil
# here and we let Vagrant core deal with it ;)
return nil if !ip
if ENV['VAGRANT_DOCKER_REMOTE_HOST_PATCH'] == "1"
{
host: remote_docker_host,
port: forwarded_ssh_host_port
}
else
{
host: ip,
port: @machine.config.ssh.guest_port
}
end
end
def state
state_id = nil
state_id = :not_created if [email protected]
begin
state_id = :host_state_unknown if !state_id && \
host_vm? && !host_vm.communicate.ready?
rescue Errors::VagrantfileNotFound
state_id = :host_state_unknown
end
state_id = :not_created if !state_id && \
([email protected] || !driver.created?(@machine.id))
state_id = driver.state(@machine.id) if @machine.id && !state_id
state_id = :unknown if !state_id
# This is a special pseudo-state so that we don't set the
# NOT_CREATED_ID while we're setting up the machine. This avoids
# clearing the data dir.
state_id = :preparing if @machine.id == "preparing"
short = state_id.to_s.gsub("_", " ")
long = I18n.t("docker_provider.status.#{state_id}")
# If we're not created, then specify the special ID flag
if state_id == :not_created
state_id = Vagrant::MachineState::NOT_CREATED_ID
end
Vagrant::MachineState.new(state_id, short, long)
end
def to_s
id = @machine.id ? @machine.id : "new container"
"Docker (#{id})"
end
end
end
end
| 32.53125 | 81 | 0.607246 |
217a87acef9c29a9658f386d3fbd2310bb573b46 | 2,904 | # frozen_string_literal: true
module PhisherPhinder
module MailParser
module ReceivedHeaders
class ByParser
def initialize(ip_factory:, starttls_parser:)
@extended_ip_factory = ip_factory
@starttls_parser = starttls_parser
end
def parse(component)
unless component
return {
recipient: nil,
protocol: nil,
id: nil,
recipient_additional: nil,
authenticated_as: nil
}.merge(@starttls_parser.parse(nil))
end
patterns = [
%r{by\s(?<recipient>\S+)\s
\((?<additional>[^)]+)\)\s
with\sMicrosoft\sSMTP\sServer\s(?<starttls>\([^\)]+\))\s
id\s(?<id>\S+)\s
via\s(?<protocol>Frontend\sTransport)
}x,
%r{by\s(?<recipient>\S+)\s
\((?<additional>[^)]+)\)\s
with\sMicrosoft\sSMTP\sServer\s(?<starttls>\([^\)]+\))\s
id\s(?<id>\S+)
}x,
/by\s(?<recipient>\S+)\swith\s(?<protocol>\S+)\sid\s(?<id>\S+)/,
/by\s(?<recipient>\S+)\s\((?<additional>[^)]+)\)\swith\s(?<protocol>\S+)\sid\s(?<id>\S+)/,
/by\s(?<recipient>\S+)\s(?<additional>.+)\swith\s(?<protocol>\S+)\sid\s(?<id>\S+)/,
/by\s(?<recipient>\S+)\s\((?<additional>[^)]+)\)\sid\s(?<id>\S+)/,
/by\s(?<recipient>\S+)\s\((?<additional>[^)]+)\)\swith\s(?<protocol>.+)\sid\s(?<id>\S+)/,
/by\s(?<recipient>\S+)\s\((?<additional>[^)]+)\)\swith\s(?<protocol>\S+)\sID\s(?<id>\S+)/,
/by\s(?<recipient>\S+)\swith\s(?<protocol>.+)\sid\s(?<id>\S+)/,
/by\s(?<recipient>\S+)\swith\s(?<protocol>.+)/,
/by\s(?<recipient>\S+)\s\((?<additional>[^)]+)\)\s\(authenticated as (?<authenticated_as>[^\)]+)\)\sid\s(?<id>\S+)/,
/by\s(?<recipient>\S+)\sid\s(?<id>\S+)/,
/by\s(?<recipient>\S+)/
]
matches = patterns.inject(nil) do |memo, pattern|
memo || component.match(pattern)
end
{
recipient: enrich_recipient(matches[:recipient]),
protocol: matches.names.include?('protocol') ? matches[:protocol]: nil,
id: matches.names.include?('id') ? matches[:id]: nil,
recipient_additional: matches.names.include?('additional') ? matches[:additional] : nil,
authenticated_as: matches.names.include?('authenticated_as') ? matches[:authenticated_as] : nil,
}.merge(
if matches.names.include?('starttls')
@starttls_parser.parse(matches[:starttls])
else
@starttls_parser.parse(nil)
end
)
end
private
def enrich_recipient(recipient)
@extended_ip_factory.build(recipient) || recipient
end
end
end
end
end
| 38.210526 | 128 | 0.499656 |
08c3af4983b08146456296a18fe0f4c5c97f0eba | 13,470 | # typed: false
# frozen_string_literal: true
require "delegate"
require "bottle_api"
require "cli/args"
module Homebrew
module CLI
# Helper class for loading formulae/casks from named arguments.
#
# @api private
class NamedArgs < Array
extend T::Sig
def initialize(*args, parent: Args.new, override_spec: nil, force_bottle: false, flags: [], cask_options: false)
require "cask/cask"
require "cask/cask_loader"
require "formulary"
require "keg"
require "missing_formula"
@args = args
@override_spec = override_spec
@force_bottle = force_bottle
@flags = flags
@cask_options = cask_options
@parent = parent
super(@args)
end
attr_reader :parent
def to_casks
@to_casks ||= to_formulae_and_casks(only: :cask).freeze
end
def to_formulae
@to_formulae ||= to_formulae_and_casks(only: :formula).freeze
end
# Convert named arguments to {Formula} or {Cask} objects.
# If both a formula and cask with the same name exist, returns
# the formula and prints a warning unless `only` is specified.
sig {
params(
only: T.nilable(Symbol),
ignore_unavailable: T.nilable(T::Boolean),
method: T.nilable(Symbol),
uniq: T::Boolean,
prefer_loading_from_json: T::Boolean,
).returns(T::Array[T.any(Formula, Keg, Cask::Cask)])
}
def to_formulae_and_casks(only: parent&.only_formula_or_cask, ignore_unavailable: nil, method: nil, uniq: true,
prefer_loading_from_json: false)
@to_formulae_and_casks ||= {}
@to_formulae_and_casks[only] ||= downcased_unique_named.flat_map do |name|
load_formula_or_cask(name, only: only, method: method, prefer_loading_from_json: prefer_loading_from_json)
rescue FormulaUnreadableError, FormulaClassUnavailableError,
TapFormulaUnreadableError, TapFormulaClassUnavailableError,
Cask::CaskUnreadableError
# Need to rescue before `*UnavailableError` (superclass of this)
# The formula/cask was found, but there's a problem with its implementation
raise
rescue NoSuchKegError, FormulaUnavailableError, Cask::CaskUnavailableError, FormulaOrCaskUnavailableError
ignore_unavailable ? [] : raise
end.freeze
if uniq
@to_formulae_and_casks[only].uniq.freeze
else
@to_formulae_and_casks[only]
end
end
def to_formulae_to_casks(only: parent&.only_formula_or_cask, method: nil)
@to_formulae_to_casks ||= {}
@to_formulae_to_casks[[method, only]] = to_formulae_and_casks(only: only, method: method)
.partition { |o| o.is_a?(Formula) }
.map(&:freeze).freeze
end
def to_formulae_and_casks_and_unavailable(only: parent&.only_formula_or_cask, method: nil)
@to_formulae_casks_unknowns ||= {}
@to_formulae_casks_unknowns[method] = downcased_unique_named.map do |name|
load_formula_or_cask(name, only: only, method: method)
rescue FormulaOrCaskUnavailableError => e
e
end.uniq.freeze
end
def load_formula_or_cask(name, only: nil, method: nil, prefer_loading_from_json: false)
unreadable_error = nil
if only != :cask
if prefer_loading_from_json && ENV["HOMEBREW_JSON_CORE"].present? && BottleAPI.bottle_available?(name)
BottleAPI.fetch_bottles(name)
end
begin
formula = case method
when nil, :factory
Formulary.factory(name, *spec, force_bottle: @force_bottle, flags: @flags)
when :resolve
resolve_formula(name)
when :latest_kegs
resolve_latest_keg(name)
when :default_kegs
resolve_default_keg(name)
when :keg
odeprecated "`load_formula_or_cask` with `method: :keg`",
"`load_formula_or_cask` with `method: :default_kegs`"
resolve_default_keg(name)
when :kegs
_, kegs = resolve_kegs(name)
kegs
else
raise
end
warn_if_cask_conflicts(name, "formula") unless only == :formula
return formula
rescue FormulaUnreadableError, FormulaClassUnavailableError,
TapFormulaUnreadableError, TapFormulaClassUnavailableError => e
# Need to rescue before `FormulaUnavailableError` (superclass of this)
# The formula was found, but there's a problem with its implementation
unreadable_error ||= e
rescue NoSuchKegError, FormulaUnavailableError => e
raise e if only == :formula
end
end
if only != :formula
begin
config = Cask::Config.from_args(@parent) if @cask_options
cask = Cask::CaskLoader.load(name, config: config)
if unreadable_error.present?
onoe <<~EOS
Failed to load formula: #{name}
#{unreadable_error}
EOS
opoo "Treating #{name} as a cask."
end
return cask
rescue Cask::CaskUnreadableError => e
# Need to rescue before `CaskUnavailableError` (superclass of this)
# The cask was found, but there's a problem with its implementation
unreadable_error ||= e
rescue Cask::CaskUnavailableError => e
raise e if only == :cask
end
end
raise unreadable_error if unreadable_error.present?
user, repo, short_name = name.downcase.split("/", 3)
if repo.present? && short_name.present?
tap = Tap.fetch(user, repo)
raise TapFormulaOrCaskUnavailableError.new(tap, short_name)
end
raise FormulaOrCaskUnavailableError, name
end
private :load_formula_or_cask
def resolve_formula(name)
Formulary.resolve(name, spec: spec, force_bottle: @force_bottle, flags: @flags)
end
private :resolve_formula
sig { params(uniq: T::Boolean).returns(T::Array[Formula]) }
def to_resolved_formulae(uniq: true)
@to_resolved_formulae ||= to_formulae_and_casks(only: :formula, method: :resolve, uniq: uniq)
.freeze
end
def to_resolved_formulae_to_casks(only: parent&.only_formula_or_cask)
to_formulae_to_casks(only: only, method: :resolve)
end
def to_formulae_paths
to_paths(only: :formula)
end
# Keep existing paths and try to convert others to tap, formula or cask paths.
# If a cask and formula with the same name exist, includes both their paths
# unless `only` is specified.
sig { params(only: T.nilable(Symbol), recurse_tap: T::Boolean).returns(T::Array[Pathname]) }
def to_paths(only: parent&.only_formula_or_cask, recurse_tap: false)
@to_paths ||= {}
@to_paths[only] ||= downcased_unique_named.flat_map do |name|
if File.exist?(name)
Pathname(name)
elsif name.count("/") == 1 && !name.start_with?("./", "/")
tap = Tap.fetch(name)
if recurse_tap
next tap.formula_files if only == :formula
next tap.cask_files if only == :cask
end
tap.path
else
next Formulary.path(name) if only == :formula
next Cask::CaskLoader.path(name) if only == :cask
formula_path = Formulary.path(name)
cask_path = Cask::CaskLoader.path(name)
paths = []
paths << formula_path if formula_path.exist?
paths << cask_path if cask_path.exist?
paths.empty? ? Pathname(name) : paths
end
end.uniq.freeze
end
sig { returns(T::Array[Keg]) }
def to_default_kegs
@to_default_kegs ||= begin
to_formulae_and_casks(only: :formula, method: :default_kegs).freeze
rescue NoSuchKegError => e
if (reason = MissingFormula.suggest_command(e.name, "uninstall"))
$stderr.puts reason
end
raise e
end
end
sig { returns(T::Array[Keg]) }
def to_latest_kegs
@to_latest_kegs ||= begin
to_formulae_and_casks(only: :formula, method: :latest_kegs).freeze
rescue NoSuchKegError => e
if (reason = MissingFormula.suggest_command(e.name, "uninstall"))
$stderr.puts reason
end
raise e
end
end
sig { returns(T::Array[Keg]) }
def to_kegs
@to_kegs ||= begin
to_formulae_and_casks(only: :formula, method: :kegs).freeze
rescue NoSuchKegError => e
if (reason = MissingFormula.suggest_command(e.name, "uninstall"))
$stderr.puts reason
end
raise e
end
end
sig {
params(only: T.nilable(Symbol), ignore_unavailable: T.nilable(T::Boolean), all_kegs: T.nilable(T::Boolean))
.returns([T::Array[Keg], T::Array[Cask::Cask]])
}
def to_kegs_to_casks(only: parent&.only_formula_or_cask, ignore_unavailable: nil, all_kegs: nil)
method = all_kegs ? :kegs : :default_kegs
@to_kegs_to_casks ||= {}
@to_kegs_to_casks[method] ||=
to_formulae_and_casks(only: only, ignore_unavailable: ignore_unavailable, method: method)
.partition { |o| o.is_a?(Keg) }
.map(&:freeze).freeze
end
sig { returns(T::Array[Tap]) }
def to_taps
@to_taps ||= downcased_unique_named.map { |name| Tap.fetch name }.uniq.freeze
end
sig { returns(T::Array[Tap]) }
def to_installed_taps
@to_installed_taps ||= to_taps.each do |tap|
raise TapUnavailableError, tap.name unless tap.installed?
end.uniq.freeze
end
sig { returns(T::Array[String]) }
def homebrew_tap_cask_names
downcased_unique_named.grep(HOMEBREW_CASK_TAP_CASK_REGEX)
end
private
sig { returns(T::Array[String]) }
def downcased_unique_named
# Only lowercase names, not paths, bottle filenames or URLs
map do |arg|
if arg.include?("/") || arg.end_with?(".tar.gz") || File.exist?(arg)
arg
else
arg.downcase
end
end.uniq
end
def spec
@override_spec
end
private :spec
def resolve_kegs(name)
raise UsageError if name.blank?
require "keg"
rack = Formulary.to_rack(name.downcase)
kegs = rack.directory? ? rack.subdirs.map { |d| Keg.new(d) } : []
raise NoSuchKegError, rack.basename if kegs.none?
[rack, kegs]
end
def resolve_latest_keg(name)
_, kegs = resolve_kegs(name)
# Return keg if it is the only installed keg
return kegs if kegs.length == 1
stable_kegs = kegs.reject { |k| k.version.head? }
return kegs.max_by { |keg| Tab.for_keg(keg).source_modified_time } if stable_kegs.blank?
stable_kegs.max_by(&:version)
end
def resolve_default_keg(name)
rack, kegs = resolve_kegs(name)
linked_keg_ref = HOMEBREW_LINKED_KEGS/rack.basename
opt_prefix = HOMEBREW_PREFIX/"opt/#{rack.basename}"
begin
return Keg.new(opt_prefix.resolved_path) if opt_prefix.symlink? && opt_prefix.directory?
return Keg.new(linked_keg_ref.resolved_path) if linked_keg_ref.symlink? && linked_keg_ref.directory?
return kegs.first if kegs.length == 1
f = if name.include?("/") || File.exist?(name)
Formulary.factory(name)
else
Formulary.from_rack(rack)
end
unless (prefix = f.latest_installed_prefix).directory?
raise MultipleVersionsInstalledError, <<~EOS
#{rack.basename} has multiple installed versions
Run `brew uninstall --force #{rack.basename}` to remove all versions.
EOS
end
Keg.new(prefix)
rescue FormulaUnavailableError
raise MultipleVersionsInstalledError, <<~EOS
Multiple kegs installed to #{rack}
However we don't know which one you refer to.
Please delete (with rm -rf!) all but one and then try again.
EOS
end
end
def warn_if_cask_conflicts(ref, loaded_type)
message = "Treating #{ref} as a #{loaded_type}."
begin
cask = Cask::CaskLoader.load ref
message += " For the cask, use #{cask.tap.name}/#{cask.token}" if cask.tap.present?
rescue Cask::CaskUnreadableError => e
# Need to rescue before `CaskUnavailableError` (superclass of this)
# The cask was found, but there's a problem with its implementation
onoe <<~EOS
Failed to load cask: #{ref}
#{e}
EOS
rescue Cask::CaskUnavailableError
# No ref conflict with a cask, do nothing
return
end
opoo message.freeze
end
end
end
end
| 34.627249 | 118 | 0.597105 |
bb0d54bae8204886a9dffb5d6ec64739b97093f9 | 294 | cask :v1 => 'fotowall' do
version '0.8.2'
sha256 'f49ad020eb6d36b9ad5492edd24ce608aef4466b727b5d0811ed4218b35d0c8c'
url "http://fotowall.googlecode.com/files/Fotowall-#{version}-OSX.dmg"
homepage 'http://www.enricoros.com/opensource/fotowall/'
license :oss
app 'Fotowall.app'
end
| 26.727273 | 75 | 0.755102 |
d5f10656895524f04d34d8279b81b5e3cc9374d6 | 20,158 | require 'rails_helper'
feature 'doc auth document capture step' do
include IdvStepHelper
include DocAuthHelper
include ActionView::Helpers::DateHelper
let(:max_attempts) { IdentityConfig.store.doc_auth_max_attempts }
let(:user) { user_with_2fa }
let(:liveness_enabled) { false }
let(:fake_analytics) { FakeAnalytics.new }
let(:sp_name) { 'Test SP' }
before do
allow(IdentityConfig.store).to receive(:liveness_checking_enabled).
and_return(liveness_enabled)
allow(Identity::Hostdata::EC2).to receive(:load).
and_return(OpenStruct.new(region: 'us-west-2', account_id: '123456789'))
allow_any_instance_of(ApplicationController).to receive(:analytics).and_return(fake_analytics)
allow_any_instance_of(ServiceProviderSessionDecorator).to receive(:sp_name).and_return(sp_name)
if liveness_enabled
visit_idp_from_oidc_sp_with_ial2_strict
else
visit_idp_from_oidc_sp_with_ial2
end
sign_in_and_2fa_user(user)
complete_doc_auth_steps_before_document_capture_step
end
it 'shows the step indicator' do
expect(page).to have_css(
'.step-indicator__step--current',
text: t('step_indicator.flows.idv.verify_id'),
)
end
context 'when javascript is enabled', js: true do
it 'logs return to sp link click' do
new_window = window_opened_by do
click_on t('idv.troubleshooting.options.get_help_at_sp', sp_name: sp_name)
end
within_window new_window do
expect(fake_analytics).to have_logged_event(
Analytics::RETURN_TO_SP_FAILURE_TO_PROOF,
step: 'document_capture',
location: 'documents_having_trouble',
)
end
end
end
context 'when liveness checking is enabled' do
let(:liveness_enabled) { true }
it 'is on the correct_page and shows the document upload options' do
expect(current_path).to eq(idv_doc_auth_document_capture_step)
expect(page).to have_content(t('doc_auth.headings.document_capture_front'))
expect(page).to have_content(t('doc_auth.headings.document_capture_back'))
end
it 'shows the selfie upload option' do
expect(page).to have_content(t('doc_auth.headings.document_capture_selfie'))
end
it 'displays doc capture tips' do
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_header_text'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_id_text1'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_id_text2'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_id_text3'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_id_text4'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_hint'))
end
it 'displays selfie tips' do
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_selfie_text1'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_selfie_text2'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_selfie_text3'))
end
it 'proceeds to the next page with valid info and logs analytics info' do
expect_any_instance_of(DocAuth::Mock::DocAuthMockClient).
to receive(:post_images).
with(hash_including(image_source: DocAuth::ImageSources::UNKNOWN)).
and_call_original
attach_and_submit_images
expect(page).to have_current_path(next_step)
expect(fake_analytics).to have_logged_event(
Analytics::DOC_AUTH + ' submitted',
step: 'document_capture',
flow_path: 'standard',
doc_auth_result: 'Passed',
billed: true,
)
expect(fake_analytics).to have_logged_event(
'IdV: ' + "#{Analytics::DOC_AUTH} document_capture submitted".downcase,
step: 'document_capture',
flow_path: 'standard',
doc_auth_result: 'Passed',
billed: true,
)
expect_costing_for_document
end
it 'does not proceed to the next page with invalid info' do
mock_general_doc_auth_client_error(:create_document)
attach_and_submit_images
expect(page).to have_current_path(idv_doc_auth_document_capture_step)
end
it 'does not proceed to the next page with a successful doc auth but missing information' do
mock_doc_auth_no_name_pii(:post_images)
attach_and_submit_images
expect(page).to have_current_path(idv_doc_auth_document_capture_step)
expect(fake_analytics).to have_logged_event(
Analytics::DOC_AUTH + ' submitted',
step: 'document_capture',
flow_path: 'standard',
doc_auth_result: 'Passed',
billed: true,
success: false,
)
expect(fake_analytics).to have_logged_event(
'IdV: ' + "#{Analytics::DOC_AUTH} document_capture submitted".downcase,
step: 'document_capture',
flow_path: 'standard',
doc_auth_result: 'Passed',
billed: true,
success: false,
)
end
it 'throttles calls to acuant and allows retry after the attempt window' do
allow(IdentityConfig.store).to receive(:doc_auth_max_attempts).and_return(max_attempts)
freeze_time do
max_attempts.times do
attach_and_submit_images
expect(page).to have_current_path(next_step)
click_on t('doc_auth.buttons.start_over')
complete_doc_auth_steps_before_document_capture_step
end
attach_and_submit_images
timeout = distance_of_time_in_words(
Throttle.attempt_window_in_minutes(:idv_doc_auth).minutes,
)
message = strip_tags(t('errors.doc_auth.throttled_text_html', timeout: timeout))
expect(page).to have_content(message)
end
expect(page).to have_current_path(idv_session_errors_throttled_path)
expect(fake_analytics).to have_logged_event(
Analytics::THROTTLER_RATE_LIMIT_TRIGGERED,
throttle_type: :idv_doc_auth,
)
travel_to(IdentityConfig.store.doc_auth_attempt_window_in_minutes.minutes.from_now + 1) do
sign_in_and_2fa_user(user)
complete_doc_auth_steps_before_document_capture_step
attach_and_submit_images
expect(page).to have_current_path(next_step)
end
end
it 'catches network connection errors on post_front_image' do
DocAuth::Mock::DocAuthMockClient.mock_response!(
method: :post_front_image,
response: DocAuth::Response.new(
success: false,
errors: { network: I18n.t('doc_auth.errors.general.network_error') },
),
)
attach_and_submit_images
expect(page).to have_current_path(idv_doc_auth_document_capture_step)
expect(page).to have_content(I18n.t('doc_auth.errors.general.network_error'))
end
end
context 'when liveness checking is not enabled' do
let(:liveness_enabled) { false }
it 'is on the correct_page and shows the document upload options' do
expect(current_path).to eq(idv_doc_auth_document_capture_step)
expect(page).to have_content(t('doc_auth.headings.document_capture_front'))
expect(page).to have_content(t('doc_auth.headings.document_capture_back'))
end
it 'does not show the selfie upload option' do
expect(page).not_to have_content(t('doc_auth.headings.document_capture_selfie'))
end
it 'displays document capture tips' do
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_header_text'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_id_text1'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_id_text2'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_id_text3'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_id_text4'))
expect(page).to have_content(I18n.t('doc_auth.tips.document_capture_hint'))
end
it 'does not display selfie tips' do
expect(page).not_to have_content(I18n.t('doc_auth.tips.document_capture_selfie_text1'))
expect(page).not_to have_content(I18n.t('doc_auth.tips.document_capture_selfie_text2'))
expect(page).not_to have_content(I18n.t('doc_auth.tips.document_capture_selfie_text3'))
end
it 'proceeds to the next page with valid info' do
attach_and_submit_images
expect(page).to have_current_path(next_step)
expect_costing_for_document
expect(DocAuthLog.find_by(user_id: user.id).state).to eq('MT')
end
it 'does not track state if state tracking is disabled' do
allow(IdentityConfig.store).to receive(:state_tracking_enabled).and_return(false)
attach_and_submit_images
expect(DocAuthLog.find_by(user_id: user.id).state).to be_nil
end
it 'throttles calls to acuant and allows retry after the attempt window' do
allow(IdentityConfig.store).to receive(:doc_auth_max_attempts).and_return(max_attempts)
freeze_time do
max_attempts.times do
attach_and_submit_images
expect(page).to have_current_path(next_step)
click_on t('doc_auth.buttons.start_over')
complete_doc_auth_steps_before_document_capture_step
end
attach_and_submit_images
timeout = distance_of_time_in_words(
Throttle.attempt_window_in_minutes(:idv_doc_auth).minutes,
)
message = strip_tags(t('errors.doc_auth.throttled_text_html', timeout: timeout))
expect(page).to have_content(message)
end
expect(page).to have_current_path(idv_session_errors_throttled_path)
expect(fake_analytics).to have_logged_event(
Analytics::THROTTLER_RATE_LIMIT_TRIGGERED,
throttle_type: :idv_doc_auth,
)
travel_to(IdentityConfig.store.doc_auth_attempt_window_in_minutes.minutes.from_now + 1) do
sign_in_and_2fa_user(user)
complete_doc_auth_steps_before_document_capture_step
attach_and_submit_images
expect(page).to have_current_path(next_step)
end
end
it 'catches network connection errors on post_front_image' do
DocAuth::Mock::DocAuthMockClient.mock_response!(
method: :post_front_image,
response: DocAuth::Response.new(
success: false,
errors: { network: I18n.t('doc_auth.errors.general.network_error') },
),
)
attach_and_submit_images
expect(page).to have_current_path(idv_doc_auth_document_capture_step)
expect(page).to have_content(I18n.t('doc_auth.errors.general.network_error'))
end
end
context 'when there is a stored result' do
it 'does not proceed to the next step if there is no result' do
submit_empty_form
expect(page).to have_current_path(idv_doc_auth_document_capture_step)
end
it 'uses the form params if form params are present' do
document_capture_session = user.document_capture_sessions.last
response = DocAuth::Response.new(success: false)
document_capture_session.store_result_from_response(response)
document_capture_session.save!
attach_and_submit_images
expect(page).to have_current_path(next_step)
end
context 'sync result' do
let(:success) { true }
before do
document_capture_session = user.document_capture_sessions.last
response = DocAuth::Response.new(success: success)
document_capture_session.store_result_from_response(response)
document_capture_session.save!
end
context 'successful result' do
let(:success) { true }
it 'proceeds to the next step' do
submit_empty_form
expect(page).to have_current_path(next_step)
end
end
context 'unsuccessful result' do
let(:success) { false }
it 'does not proceed to the next step' do
submit_empty_form
expect(page).to have_current_path(idv_doc_auth_document_capture_step)
expect(page).to have_content(I18n.t('doc_auth.errors.general.network_error'))
end
end
end
context 'async result' do
let(:success) { true }
before do
document_capture_session = user.document_capture_sessions.last
response = DocAuth::Response.new(success: success)
document_capture_session.create_doc_auth_session
document_capture_session.store_doc_auth_result(
result: response.to_h,
pii: response.pii_from_doc,
)
document_capture_session.save!
end
context 'successful result' do
let(:success) { true }
it 'proceeds to the next step' do
submit_empty_form
expect(page).to have_current_path(next_step)
end
end
context 'unsuccessful result' do
let(:success) { false }
it 'does not proceed to the next step' do
submit_empty_form
expect(page).to have_current_path(idv_doc_auth_document_capture_step)
expect(page).to have_content(I18n.t('doc_auth.errors.general.network_error'))
end
end
end
end
context 'when the only error is an expired drivers license' do
before do
allow(IdentityConfig.store).to receive(:proofing_allow_expired_license).
and_return(proofing_allow_expired_license)
allow(IdentityConfig.store).to receive(:proofing_expired_license_after).
and_return(Date.new(2020, 3, 1))
allow_any_instance_of(ApplicationController).
to receive(:analytics).and_return(fake_analytics)
DocAuth::Mock::DocAuthMockClient.mock_response!(
method: :post_images,
response: DocAuth::Response.new(
pii_from_doc: DocAuth::Mock::ResultResponseBuilder::DEFAULT_PII_FROM_DOC.merge(
state_id_expiration: '04/01/2020',
),
success: false,
errors: {
id: [DocAuth::Errors::DOCUMENT_EXPIRED_CHECK],
},
),
)
end
context 'when expired licenses are not allowed' do
let(:proofing_allow_expired_license) { false }
it 'shows an error and does not go to the next page' do
attach_and_submit_images
expect(page).to have_current_path(idv_doc_auth_document_capture_step)
expect(fake_analytics).to have_logged_event(
Analytics::DOC_AUTH + ' submitted',
document_expired: true,
would_have_passed: true,
)
end
end
context 'when expired licenses are allowed' do
let(:proofing_allow_expired_license) { true }
it 'proceeds to the next page and saves reproof_at to the profile' do
attach_and_submit_images
expect(page).to have_current_path(next_step)
expect(fake_analytics).to have_logged_event(
Analytics::DOC_AUTH + ' submitted',
document_expired: true,
)
# finish the rest of the flow so we can make sure the data is plumbed through
fill_out_ssn_form_ok
click_idv_continue
expect(page).to have_content(t('doc_auth.headings.verify'))
click_idv_continue
fill_out_phone_form_mfa_phone(user)
click_idv_continue
fill_in :user_password, with: Features::SessionHelper::VALID_PASSWORD
click_idv_continue
acknowledge_and_confirm_personal_key(js: false)
profile = user.active_profile
expect(profile.reproof_at).to eq(IdentityConfig.store.proofing_expired_license_reproof_at)
end
end
end
context 'when using async uploads', :js do
before do
allow(DocumentProofingJob).to receive(:perform_later).
and_call_original
end
it 'proceeds to the next page with valid info' do
set_up_document_capture_result(
uuid: DocumentCaptureSession.last.uuid,
idv_result: {
success: true,
errors: {},
messages: ['message'],
pii_from_doc: {
first_name: Faker::Name.first_name,
last_name: Faker::Name.last_name,
dob: Time.zone.today.to_s,
address1: Faker::Address.street_address,
city: Faker::Address.city,
state: Faker::Address.state_abbr,
zipcode: Faker::Address.zip_code,
state_id_type: 'drivers_license',
state_id_number: '111',
state_id_jurisdiction: 'WI',
},
},
)
attach_file 'Front of your ID', 'app/assets/images/logo.png'
attach_file 'Back of your ID', 'app/assets/images/logo.png'
form = page.find('#document-capture-form')
front_url = form['data-front-image-upload-url']
back_url = form['data-back-image-upload-url']
click_on 'Submit'
expect(page).to have_current_path(next_step, wait: 20)
expect(DocumentProofingJob).to have_received(:perform_later) do |encrypted_arguments:, **|
args = JSON.parse(
Encryption::Encryptors::SessionEncryptor.new.decrypt(encrypted_arguments),
symbolize_names: true,
)[:document_arguments]
original = File.read('app/assets/images/logo.png')
encryption_helper = JobHelpers::EncryptionHelper.new
encryption_key = Base64.decode64(args[:encryption_key])
Capybara.current_driver = :rack_test # ChromeDriver doesn't support `page.status_code`
page.driver.get front_url
expect(page).to have_http_status(200)
front_plain = encryption_helper.decrypt(
data: page.body, iv: Base64.decode64(args[:front_image_iv]), key: encryption_key,
)
expect(front_plain.b).to eq(original.b)
page.driver.get back_url
expect(page).to have_http_status(200)
back_plain = encryption_helper.decrypt(
data: page.body, iv: Base64.decode64(args[:back_image_iv]), key: encryption_key,
)
expect(back_plain.b).to eq(original.b)
end
end
context 'when expired licenses are allowed' do
before do
allow(IdentityConfig.store).to receive(:proofing_allow_expired_license).and_return(true)
DocAuth::Mock::DocAuthMockClient.mock_response!(
method: :post_images,
response: DocAuth::Response.new(
success: false,
pii_from_doc: DocAuth::Mock::ResultResponseBuilder::DEFAULT_PII_FROM_DOC.merge(
state_id_expiration: '04/01/2020',
),
errors: {
id: [DocAuth::Errors::DOCUMENT_EXPIRED_CHECK],
},
),
)
end
it 'proceeds to the next page and saves reproof_at to the profile' do
attach_file 'Front of your ID', 'app/assets/images/logo.png'
attach_file 'Back of your ID', 'app/assets/images/logo.png'
form = page.find('#document-capture-form')
front_url = form['data-front-image-upload-url']
back_url = form['data-back-image-upload-url']
click_on 'Submit'
expect(page).to have_current_path(next_step, wait: 20)
# finish the rest of the flow so we can make sure the data is plumbed through
fill_out_ssn_form_ok
click_idv_continue
expect(page).to have_content(t('doc_auth.headings.verify'))
click_idv_continue
fill_out_phone_form_mfa_phone(user)
click_idv_continue
fill_in :user_password, with: Features::SessionHelper::VALID_PASSWORD
click_idv_continue
acknowledge_and_confirm_personal_key(js: true)
profile = user.active_profile
expect(profile.reproof_at).to eq(IdentityConfig.store.proofing_expired_license_reproof_at)
end
end
end
def next_step
idv_doc_auth_ssn_step
end
def submit_empty_form
page.driver.put(
current_path,
doc_auth: { front_image: nil, back_image: nil, selfie_image: nil },
)
visit current_path
end
def expect_costing_for_document
%i[acuant_front_image acuant_back_image acuant_result].each do |cost_type|
expect(costing_for(cost_type)).to be_present
end
end
def costing_for(cost_type)
SpCost.where(ial: 2, issuer: 'urn:gov:gsa:openidconnect:sp:server', cost_type: cost_type.to_s)
end
end
| 34.875433 | 99 | 0.690594 |
2871a7d26a875baa45f66f1ff46a85e7bc4f2cae | 1,698 | module Fog
module HP
class Network
class Real
# Gets an existing subnet by id
#
# ==== Parameters
# * 'id'<~String>: - UUId for the subnet
#
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
# * subnet<~Array>:
# * 'id'<~String>: - UUId for the subnet
# * 'name'<~String>: - Name of the subnet
# * 'network_id'<~String>: - UUId of the network
# * 'tenant_id'<~String>: - TenantId that owns the subnet
# * 'dns_nameservers'<~Array>: - Array of DNS Nameservers
# * 'allocation_pools'<~Array>:
# * 'start'<~String>: - Start IP address
# * 'end'<~String>: - End IP address
# * 'host_routes'<~Array>: - Array of host routes
# * 'gateway_ip'<~String>: - Gateway IP address
# * 'ip_version'<~Integer>: - IP version, values 4 or 6
# * 'cidr'<~String>: - Cidr
# * 'enable_dhcp'<~Boolean>: - true or false, defaults to true
def get_subnet(subnet_id)
request(
:expects => 200,
:method => 'GET',
:path => "subnets/#{subnet_id}"
)
end
end
class Mock
def get_subnet(subnet_id)
response = Excon::Response.new
if subnet = list_subnets.body['subnets'].detect {|_| _['id'] == subnet_id}
response.status = 200
response.body = { 'subnet' => subnet }
response
else
raise Fog::HP::Network::NotFound
end
end
end
end
end
end
| 32.037736 | 84 | 0.474087 |
186ef5fb0fa9ce909ce50bd1ea6e147886aec6a0 | 1,272 | module KnifeCookbookDoc
class DefinitionsModel
include KnifeCookbookDoc::BaseModel
attr_reader :name
def initialize(name, filename)
@name = name
@filename = filename
load_descriptions
end
def params
@params ||= {}
end
private
def load_descriptions
description = extract_description
current_section = 'main'
description.each_line do |line|
if /^ *\@param *([^ ]*) (.*)$/ =~ line
params[$1] = {}
params[$1]['descr'] = $2.strip
elsif /^ *\@section (.*)$/ =~ line
current_section = $1.strip
else
lines = (top_level_descriptions[current_section] || [])
lines << line.gsub("\n", '')
top_level_descriptions[current_section] = lines
end
end
load_properties
end
def load_properties
code = IO.read(@filename)
code.gsub(/^ *define (.*?) (?=do)/m) do
all = $1.split(' ', 2)
@name = all.shift.gsub(/:|,/, '')
next if all.empty?
all = eval("{#{all.last}}") rescue {}
all.each do |k, v|
p_name = k.to_s
params[p_name] ||= {}
params[p_name]['default'] = v.nil? ? 'nil' : v
end
end
end
end
end
| 24.461538 | 65 | 0.525943 |
bf4f4c71c0db3c639c832b7807fdb2c45eca277b | 349 | FactoryBot.define do
factory :project, class: 'BeachApiCore::Project' do
name { Faker::Lorem.word }
user
organisation
after(:build) do |project|
if project.project_keepers.empty?
project.project_keepers << build(:project_keeper,
project: project)
end
end
end
end
| 23.266667 | 58 | 0.593123 |
1a2b0ffa282995e6699320c0df3d858cba48a24f | 5,146 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "app_critic_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :cloudinary
end
| 44.362069 | 114 | 0.762534 |
1d4889a8031a088dc9566cea447804b92e8b213b | 823 | module SolidusStockSupplier
class Engine < Rails::Engine
require 'spree/core'
isolate_namespace Spree
engine_name 'solidus_stock_supplier'
# use rspec for tests
config.generators do |g|
g.test_framework :rspec
end
initializer 'solidus_stock_supplier.menu', before: :load_config_initializers do
Spree::Backend::Config.configure do |config|
config.menu_items << config.class::MenuItem.new(
[:suppliers],
'industry',
condition: -> { can?(:admin, :suppliers) }
)
end
end
def self.activate
Dir.glob(File.join(File.dirname(__FILE__), '../../app/**/*_decorator*.rb')) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
end
config.to_prepare(&method(:activate).to_proc)
end
end
| 26.548387 | 88 | 0.641555 |
182eae86a681c06c92e5982a9f4f4e3b27b2bf94 | 6,757 | =begin
#DocuSign REST API
#The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
OpenAPI spec version: v2.1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
=end
require 'date'
module DocuSign_eSign
class TspHealthCheckStatusDescription
#
attr_accessor :description
#
attr_accessor :error
#
attr_accessor :hostname
#
attr_accessor :response_seconds
# Indicates the envelope status. Valid values are: * sent - The envelope is sent to the recipients. * created - The envelope is saved as a draft and can be modified and sent later.
attr_accessor :status
#
attr_accessor :type
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'description' => :'description',
:'error' => :'error',
:'hostname' => :'hostname',
:'response_seconds' => :'responseSeconds',
:'status' => :'status',
:'type' => :'type'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'description' => :'String',
:'error' => :'String',
:'hostname' => :'String',
:'response_seconds' => :'String',
:'status' => :'String',
:'type' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'description')
self.description = attributes[:'description']
end
if attributes.has_key?(:'error')
self.error = attributes[:'error']
end
if attributes.has_key?(:'hostname')
self.hostname = attributes[:'hostname']
end
if attributes.has_key?(:'responseSeconds')
self.response_seconds = attributes[:'responseSeconds']
end
if attributes.has_key?(:'status')
self.status = attributes[:'status']
end
if attributes.has_key?(:'type')
self.type = attributes[:'type']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
description == o.description &&
error == o.error &&
hostname == o.hostname &&
response_seconds == o.response_seconds &&
status == o.status &&
type == o.type
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[description, error, hostname, response_seconds, status, type].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = DocuSign_eSign.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 28.271967 | 186 | 0.610774 |
bbac9f86191acb512cfd4799ee7099f068f47cbd | 2,076 | control "ESXI-67-000048" do
title "The ESXi host must protect the confidentiality and integrity of
transmitted information by isolating vMotion traffic."
desc "While encrypted vMotion is available now vMotion traffic should still
be sequestered from other traffic to futher protect it from attack. This
network must be only be accessible to other ESXi hosts preventing outside
access to the network."
impact 0.5
tag severity: "CAT II"
tag gtitle: "SRG-OS-000423-VMM-001700"
tag rid: "ESXI-67-000048"
tag stig_id: "ESXI-67-000048"
tag cci: "CCI-002418"
tag nist: ["SC-8", "Rev_4"]
desc 'check', "The vMotion VMKernel port group should in a dedicated VLAN that
can be on a common standard or distributed virtual switch as long as the
vMotion VLAN is not shared by any other function and it not routed to anything
but ESXi hosts. The check for this will be unique per environment. From the
vSphere Client select the ESXi host and go to Configuration > Networking and
review the VLAN associated with the vMotion VMkernel(s) and verify they are
dedicated for that purpose and are logically separated from other functions.
If long distance or cross vCenter vMotion is used the vMotion network can be
routable but must be accessible to only the intended ESXi hosts.
If the vMotion port group is not on an isolated VLAN and/or is routable to
systems other than ESXi hosts, this is a finding.
For environments that do not use vCenter server to manage ESXi, this is not
applicable."
desc 'fix', "Configuration of the vMotion VMkernel will be unique to each
environment. As an example, to modify the IP address and VLAN information to
the correct network on a distributed switch do the following:
From the vSphere Client go to Networking >> Select a distributed switch >>
Select a port group >> Configure >> Settings >> Edit >> VLAN. Change the \"VLAN
Type\" to \"VLAN\" and change the \"VLAN ID\" to a network allocated and
dedicated to vMotion traffic exclusively."
describe "" do
skip 'Manual verification is required for this control'
end
end
| 45.130435 | 80 | 0.773121 |
d5988c0db72360fa488c994fa7eff64db5c3e477 | 882 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'nexive/version'
Gem::Specification.new do |spec|
spec.name = "nexive"
spec.version = Nexive::VERSION
spec.authors = ["Maurizio De Magnis"]
spec.email = ["[email protected]"]
spec.summary = %q{Interface to the Nexive API}
spec.homepage = "https://github.com/olistik/nexive"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.3"
end
| 35.28 | 104 | 0.629252 |
e865d2550d9eec85a8e1f0271e2d0aa255f806f6 | 375 | class Page < ActiveRecord::Base
belongs_to :subject
has_and_belongs_to_many :editors, :class_name => "AdminUser"
has_many :sections
scope :visible, lambda { where(:visible => true) }
scope :invisible, lambda { where(:visible => false) }
scope :sorted, lambda { order("pages.position ASC") }
scope :newest_first, lambda { order("pages.created_at DESC") }
end
| 28.846154 | 64 | 0.709333 |
ff1381abb7a530318ef1227e49c51e6ee1344099 | 2,542 | require 'rails_helper'
RSpec.describe DeferOffer do
describe '#save!' do
it 'changes the state of an accepted offer to "offer_deferred"' do
application_choice = create(:application_choice, :with_accepted_offer)
DeferOffer.new(
actor: create(:support_user),
application_choice: application_choice,
).save!
expect(application_choice.reload.status).to eq 'offer_deferred'
end
it 'sets offer_deferred_at' do
application_choice = create(:application_choice, :with_accepted_offer)
DeferOffer.new(
actor: create(:support_user),
application_choice: application_choice,
).save!
expect(application_choice.reload.offer_deferred_at).not_to be_nil
end
it 'changes the state of a recruited application choice to "offer_deferred"' do
application_choice = create(:application_choice, :with_recruited)
DeferOffer.new(
actor: create(:support_user),
application_choice: application_choice,
).save!
expect(application_choice.reload.status).to eq 'offer_deferred'
end
it 'raises an error if the user is not authorised' do
application_choice = create(:application_choice, :with_accepted_offer)
provider_user = create(:provider_user)
provider_user.providers << application_choice.offered_course.provider
service = DeferOffer.new(
actor: provider_user,
application_choice: application_choice,
)
expect { service.save! }.to raise_error(ProviderAuthorisation::NotAuthorisedError)
expect(application_choice.reload.status).to eq 'pending_conditions'
end
it 'sends the candidate an explanatory email' do
application_choice = create(:application_choice, :with_recruited)
deliverer = instance_double(ActionMailer::MessageDelivery, deliver_later: true)
allow(CandidateMailer).to receive(:deferred_offer).and_return(deliverer)
DeferOffer.new(actor: create(:support_user), application_choice: application_choice).save!
expect(CandidateMailer).to have_received(:deferred_offer).once.with(application_choice)
end
it 'notifies on the state change' do
application_choice = create(:application_choice, :with_recruited)
allow(StateChangeNotifier).to receive(:call)
DeferOffer.new(actor: create(:support_user), application_choice: application_choice).save!
expect(StateChangeNotifier).to have_received(:call).with(:defer_offer, application_choice: application_choice)
end
end
end
| 34.821918 | 116 | 0.732494 |
3817ce6037c9540223278861a8667ea5fe712278 | 1,723 | require 'rails_helper'
RSpec.describe '/de/backend/berechtigungen/permissions', type: :feature do
let(:user) { create(:ecm_user_area_user, :authenticable) }
before(:each) { sign_in(user) }
let(:resource_class) { Ecm::Rbac::Permission }
let(:resource) { create(:ecm_rbac_permission) }
let(:resources) { create_list(:ecm_rbac_permission, 3) }
# List
it { resources; expect(subject).to implement_index_action(self) }
# Create
it {
expect(subject).to implement_create_action(self)
.for(resource_class)
.within_form('#new_permission') {
# fill the needed form inputs via capybara here
#
# Example:
#
# select 'de', from: 'slider[locale]'
# fill_in 'slider[name]', with: 'My first slider'
# check 'slider[auto_start]'
# fill_in 'slider[interval]', with: '3'
fill_in 'permission[identifier]', with: 'delete_users'
}
.increasing{ Ecm::Rbac::Permission.count }.by(1)
}
# Read
it { expect(subject).to implement_show_action(self).for(resource) }
# Update
it {
expect(subject).to implement_update_action(self)
.for(resource)
.within_form('.edit_permission') {
# fill the needed form inputs via capybara here
#
# Example:
#
# fill_in 'slider[name]', with: 'New name'
fill_in 'permission[identifier]', with: 'edit_users'
}
.updating
.from(resource.attributes)
.to({ 'identifier' => 'edit_users' }) # Example: .to({ 'name' => 'New name' })
}
# Delete
it {
expect(subject).to implement_delete_action(self)
.for(resource)
.reducing{ resource_class.count }.by(1)
}
end
| 29.20339 | 84 | 0.613465 |
ed4ad73d77fb2204811002291ff07d801f954caa | 1,502 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-acmpca/types'
require_relative 'aws-sdk-acmpca/client_api'
require_relative 'aws-sdk-acmpca/client'
require_relative 'aws-sdk-acmpca/errors'
require_relative 'aws-sdk-acmpca/waiters'
require_relative 'aws-sdk-acmpca/resource'
require_relative 'aws-sdk-acmpca/customizations'
# This module provides support for AWS Certificate Manager Private Certificate Authority. This module is available in the
# `aws-sdk-acmpca` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# acmpca = Aws::ACMPCA::Client.new
# resp = acmpca.create_certificate_authority(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from AWS Certificate Manager Private Certificate Authority are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::ACMPCA::Errors::ServiceError
# # rescues all AWS Certificate Manager Private Certificate Authority API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::ACMPCA
GEM_VERSION = '1.38.0'
end
| 27.309091 | 121 | 0.754328 |
61135342eafb2b1df5d15280572fc56db06e9ecf | 742 | require "spec_helper"
describe Spree::WebMoneyDecorator, type: :model do
context 'when source is webmoney' do
it 'delegates methods' do
source = Spree::WebMoney.new(prepaid_cards: [], short_amount: 1000, payment_uuid: "123")
decorator = Spree::WebMoneyDecorator.new(source)
expect(decorator.prepaid_cards).to eq([])
expect(decorator.payment_uuid).to eq("123")
expect(decorator.short_amount).to eq(1000)
end
end
context 'when source is nill' do
it 'delegates methods to nil' do
decorator = Spree::WebMoneyDecorator.new(nil)
expect(decorator.prepaid_cards).to be_nil
expect(decorator.payment_uuid).to be_nil
expect(decorator.short_amount).to be_nil
end
end
end
| 29.68 | 94 | 0.704852 |
38b64d916fd07dea3e383667a42b1589284d5e2f | 231 | module Swagger
module Blocks
module Nodes
# v2.0: https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#securityRequirementObject
class SecurityRequirementNode < Node
end
end
end
end
| 23.1 | 111 | 0.714286 |
39bc56d6fa3bc2704cdcaf74e92b695461af5b05 | 5,348 | # frozen_string_literal: true
Rails.application.routes.draw do
root to: 'pages#root'
post '/' => 'pages#root'
get 'gql/search'
get '/texts/:id' => 'texts#show'
get 'login' => 'user_sessions#new', as: :login
get 'logout' => 'user_sessions#destroy', as: :logout
get '/descriptions/charts/:id' => 'descriptions#charts'
resources :user_sessions
resources :users, except: %i[index show edit destroy]
resources :password_resets, only: %i[new create edit update]
resource :user, only: %i[edit update destroy] do
post :confirm_delete, on: :member
end
# Old partner paths.
get '/partners/:id', to: redirect("#{Partner::REMOTE_URL}/partners/%{id}")
get '/partners', to: redirect("#{Partner::REMOTE_URL}/partners")
resources :dashboard_items, only: :show
resource :settings, only: %i[edit update]
get '/settings/dashboard', to: 'settings#dashboard'
put '/settings/dashboard', to: 'settings#update_dashboard'
put '/settings/hide_results_tip', to: 'settings#hide_results_tip'
get '/survey', to: 'survey#show'
put '/survey/:question', to: 'survey#answer_question'
namespace :admin do
root to: 'pages#index'
get 'map', to: 'pages#map', as: :map
post 'clear_cache' => 'pages#clear_cache', as: :clear_cache
get 'surveys', to: 'pages#surveys', as: :surveys
resources :general_user_notifications,
:users
resources :texts, except: [:show]
end
resources :scenarios, except: [:new, :edit] do
collection do
post :load
get :compare
post :merge
get :weighted_merge
post 'weighted_merge' => :perform_weighted_merge
end
member do
get :load
get 'energy_mix' => 'energy_mix#show'
# legacy name for the energy mix
get 'factsheet', to: redirect('scenarios/%{id}/energy_mix')
get 'export' => 'export_scenario#index'
post 'export/esdl' => 'export_scenario#esdl'
get 'export/mondaine_drive' => 'export_scenario#mondaine_drive'
end
end
resources :saved_scenarios, except: %i[new destroy] do
resource :feature, only: %i[show create update destroy], controller: 'featured_scenarios'
member do
get :load
# get 'feature' => 'featured_scenarios#edit'
# post 'feature' => 'featured_scenarios#create'
# put 'feature' => 'featured_scenarios#update'
# delete 'feature' => 'featured_scenarios#destroy'
end
get '/report/:report_name' => 'saved_scenario_reports#show'
end
get '/scenarios/:scenario_id/save', to: 'saved_scenarios#new', as: :new_saved_scenario
get '/scenario/new' => 'scenarios#new'
get '/scenario/reset' => 'scenarios#reset'
get '/scenario/grid_investment_needed' => 'scenarios#grid_investment_needed'
get '/scenario/reports/auto' => 'reports#auto'
get '/scenario/reports/:id' => 'reports#show',
constraints: { id: /[0-9a-z-]+/ }, as: :report
# This is the main action
get '/scenario/myc/:id' => 'scenarios#play_multi_year_charts'
get '/scenario(/:tab(/:sidebar(/:slide)))' => 'scenarios#play', as: :play
resources :output_elements, param: :key, only: %i[index show] do
member do
get :zoom
end
collection do
get 'batch/:keys', action: :batch
end
end
get '/input_elements/by_slide' => 'input_elements#by_slide'
match '/ete(/*url)', to: 'api_proxy#default', via: :all
match '/ete_proxy(/*url)', to: 'api_proxy#default', via: :all
get '/units' => 'pages#units'
get '/tutorial/(:tab)(/:sidebar)' => 'pages#tutorial', as: :tutorial
get '/disclaimer' => 'pages#disclaimer'
get '/privacy_statement' => 'pages#privacy_statement'
get '/show_all_countries' => 'pages#show_all_countries'
get '/show_flanders' => 'pages#show_flanders'
get '/sitemap(.:format)' => 'pages#sitemap', defaults: { format: :xml }
get '/known_issues' => 'pages#bugs', as: :bugs
get '/quality_control' => 'pages#quality', as: :quality
get '/whats-new' => 'pages#whats_new', as: :whats_new
put '/set_locale(/:locale)' => 'pages#set_locale', as: :set_locale
get '/unsupported-browser' => 'pages#unsupported_browser', as: :unsupported_browser
get '/update_footer' => 'pages#update_footer'
get '/regions/:dataset_locale' => 'pages#dataset', as: :region
post '/feedback' => 'feedback#send_message', as: :feedback
get '/local-global' => 'compare#index', as: :local_global
get '/local-global/:ids' => 'compare#show', as: :local_global_scenarios
resources :multi_year_charts, only: %i[index create destroy]
get '/import_esdl' => 'import_esdl#index'
post '/import_esdl/create' => 'import_esdl#create', as: :import_esdl_create
get '/esdl_suite/login' => 'esdl_suite#login'
get '/esdl_suite/redirect' => 'esdl_suite#redirect'
get '/esdl_suite/browse' => 'esdl_suite#browse'
namespace :embeds do
resource :pico, only: [:show]
end
# Incoming webhooks
get '/incoming_webhooks/mailchimp/:key' => 'incoming_webhooks#verify'
post '/incoming_webhooks/mailchimp/:key' => 'incoming_webhooks#mailchimp'
%w[404 422 500].each do |code|
get "/#{code}", to: 'errors#show', code: code
end
end
| 34.063694 | 94 | 0.63893 |
26158708bb6804386fc3ddc337e2b5f27618e2a1 | 12,837 | require File.expand_path(File.dirname(__FILE__) + '/../../test_helper')
begin
# Load library
require 'rubygems'
gem 'sequel', ENV['SEQUEL_VERSION'] ? "=#{ENV['SEQUEL_VERSION']}" : '>=2.8.0'
require 'sequel'
require 'logger'
# Establish database connection
DB = Sequel.connect('sqlite:///', :loggers => [Logger.new("#{File.dirname(__FILE__)}/../../sequel.log")])
module SequelTest
class BaseTestCase < Test::Unit::TestCase
def default_test
end
protected
# Creates a new Sequel model (and the associated table)
def new_model(auto_migrate = true, &block)
DB.create_table! :foo do
primary_key :id
column :state, :string
end if auto_migrate
model = Class.new(Sequel::Model(:foo)) do
self.raise_on_save_failure = false
plugin :validation_class_methods if respond_to?(:plugin)
def self.name; 'SequelTest::Foo'; end
end
model.class_eval(&block) if block_given?
model
end
end
class IntegrationTest < BaseTestCase
def test_should_match_if_class_inherits_from_sequel
assert StateMachine::Integrations::Sequel.matches?(new_model)
end
def test_should_not_match_if_class_does_not_inherit_from_sequel
assert !StateMachine::Integrations::Sequel.matches?(Class.new)
end
end
class MachineByDefaultTest < BaseTestCase
def setup
@model = new_model
@machine = StateMachine::Machine.new(@model)
end
def test_should_use_save_as_action
assert_equal :save, @machine.action
end
def test_should_use_transactions
assert_equal true, @machine.use_transactions
end
end
class MachineTest < BaseTestCase
def setup
@model = new_model
@machine = StateMachine::Machine.new(@model)
@machine.state :parked, :idling, :first_gear
end
def test_should_create_singular_with_scope
assert @model.respond_to?(:with_state)
end
def test_should_only_include_records_with_state_in_singular_with_scope
parked = @model.create :state => 'parked'
idling = @model.create :state => 'idling'
assert_equal [parked], @model.with_state(:parked).all
end
def test_should_create_plural_with_scope
assert @model.respond_to?(:with_states)
end
def test_should_only_include_records_with_states_in_plural_with_scope
parked = @model.create :state => 'parked'
idling = @model.create :state => 'idling'
assert_equal [parked, idling], @model.with_states(:parked, :idling).all
end
def test_should_create_singular_without_scope
assert @model.respond_to?(:without_state)
end
def test_should_only_include_records_without_state_in_singular_without_scope
parked = @model.create :state => 'parked'
idling = @model.create :state => 'idling'
assert_equal [parked], @model.without_state(:idling).all
end
def test_should_create_plural_without_scope
assert @model.respond_to?(:without_states)
end
def test_should_only_include_records_without_states_in_plural_without_scope
parked = @model.create :state => 'parked'
idling = @model.create :state => 'idling'
first_gear = @model.create :state => 'first_gear'
assert_equal [parked, idling], @model.without_states(:first_gear).all
end
def test_should_allow_chaining_scopes_and_fitlers
parked = @model.create :state => 'parked'
idling = @model.create :state => 'idling'
assert_equal [idling], @model.without_state(:parked).filter(:state => 'idling').all
end
def test_should_rollback_transaction_if_false
@machine.within_transaction(@model.new) do
@model.create
false
end
assert_equal 0, @model.count
end
def test_should_not_rollback_transaction_if_true
@machine.within_transaction(@model.new) do
@model.create
true
end
assert_equal 1, @model.count
end
def test_should_invalidate_using_errors
record = @model.new
record.state = 'parked'
@machine.invalidate(record, :state, :invalid_transition, [[:event, :park]])
assert_equal ['cannot transition via "park"'], record.errors.on(:state)
end
def test_should_clear_errors_on_reset
record = @model.new
record.state = 'parked'
record.errors.add(:state, 'is invalid')
@machine.reset(record)
assert_nil record.errors.on(:id)
end
def test_should_not_override_the_column_reader
record = @model.new
record[:state] = 'parked'
assert_equal 'parked', record.state
end
def test_should_not_override_the_column_writer
record = @model.new
record.state = 'parked'
assert_equal 'parked', record[:state]
end
end
class MachineUnmigratedTest < BaseTestCase
def setup
@model = new_model(false)
end
def test_should_allow_machine_creation
assert_nothing_raised { StateMachine::Machine.new(@model) }
end
end
class MachineWithInitialStateTest < BaseTestCase
def setup
@model = new_model
@machine = StateMachine::Machine.new(@model, :initial => 'parked')
@record = @model.new
end
def test_should_set_initial_state_on_created_object
assert_equal 'parked', @record.state
end
end
class MachineWithNonColumnStateAttributeUndefinedTest < BaseTestCase
def setup
@model = new_model do
def initialize
# Skip attribute initialization
end
end
@machine = StateMachine::Machine.new(@model, :status, :initial => 'parked')
@record = @model.new
end
def test_should_not_define_a_reader_attribute_for_the_attribute
assert [email protected]_to?(:status)
end
def test_should_not_define_a_writer_attribute_for_the_attribute
assert [email protected]_to?(:status=)
end
def test_should_define_an_attribute_predicate
assert @record.respond_to?(:status?)
end
end
class MachineWithNonColumnStateAttributeDefinedTest < BaseTestCase
def setup
@model = new_model do
attr_accessor :status
end
@machine = StateMachine::Machine.new(@model, :status, :initial => 'parked')
@record = @model.new
end
def test_should_set_initial_state_on_created_object
assert_equal 'parked', @record.status
end
end
class MachineWithComplexPluralizationTest < BaseTestCase
def setup
@model = new_model
@machine = StateMachine::Machine.new(@model, :status)
end
def test_should_create_singular_with_scope
assert @model.respond_to?(:with_status)
end
def test_should_create_plural_with_scope
assert @model.respond_to?(:with_statuses)
end
end
class MachineWithCallbacksTest < BaseTestCase
def setup
@model = new_model
@machine = StateMachine::Machine.new(@model)
@machine.state :parked, :idling
@machine.event :ignite
@record = @model.new(:state => 'parked')
@transition = StateMachine::Transition.new(@record, @machine, :ignite, :parked, :idling)
end
def test_should_run_before_callbacks
called = false
@machine.before_transition(lambda {called = true})
@transition.perform
assert called
end
def test_should_pass_transition_to_before_callbacks_with_one_argument
transition = nil
@machine.before_transition(lambda {|arg| transition = arg})
@transition.perform
assert_equal @transition, transition
end
def test_should_pass_transition_to_before_callbacks_with_multiple_arguments
callback_args = nil
@machine.before_transition(lambda {|*args| callback_args = args})
@transition.perform
assert_equal [@transition], callback_args
end
def test_should_run_before_callbacks_within_the_context_of_the_record
context = nil
@machine.before_transition(lambda {context = self})
@transition.perform
assert_equal @record, context
end
def test_should_run_after_callbacks
called = false
@machine.after_transition(lambda {called = true})
@transition.perform
assert called
end
def test_should_pass_transition_to_after_callbacks_with_multiple_arguments
callback_args = nil
@machine.after_transition(lambda {|*args| callback_args = args})
@transition.perform
assert_equal [@transition], callback_args
end
def test_should_run_after_callbacks_with_the_context_of_the_record
context = nil
@machine.after_transition(lambda {context = self})
@transition.perform
assert_equal @record, context
end
def test_should_allow_symbolic_callbacks
callback_args = nil
klass = class << @record; self; end
klass.send(:define_method, :after_ignite) do |*args|
callback_args = args
end
@machine.before_transition(:after_ignite)
@transition.perform
assert_equal [@transition], callback_args
end
def test_should_allow_string_callbacks
class << @record
attr_reader :callback_result
end
@machine.before_transition('@callback_result = [1, 2, 3]')
@transition.perform
assert_equal [1, 2, 3], @record.callback_result
end
end
class MachineWithStateDrivenValidationsTest < BaseTestCase
def setup
@model = new_model do
attr_accessor :seatbelt
end
@machine = StateMachine::Machine.new(@model)
@machine.state :first_gear do
validates_presence_of :seatbelt
end
@machine.other_states :parked
end
def test_should_be_valid_if_validation_fails_outside_state_scope
record = @model.new(:state => 'parked', :seatbelt => nil)
assert record.valid?
end
def test_should_be_invalid_if_validation_fails_within_state_scope
record = @model.new(:state => 'first_gear', :seatbelt => nil)
assert !record.valid?
end
def test_should_be_valid_if_validation_succeeds_within_state_scope
record = @model.new(:state => 'first_gear', :seatbelt => true)
assert record.valid?
end
end
class MachineWithEventAttributesOnValidationTest < BaseTestCase
def setup
@model = new_model
@machine = StateMachine::Machine.new(@model)
@machine.event :ignite do
transition :parked => :idling
end
@record = @model.new
@record.state = 'parked'
@record.state_event = 'ignite'
end
def test_should_fail_if_event_is_invalid
@record.state_event = 'invalid'
assert [email protected]?
assert_equal ['state_event is invalid'], @record.errors.full_messages
end
def test_should_fail_if_event_has_no_transition
@record.state = 'idling'
assert [email protected]?
assert_equal ['state_event cannot transition when idling'], @record.errors.full_messages
end
def test_should_be_successful_if_event_has_transition
assert @record.valid?
end
def test_should_run_before_callbacks
ran_callback = false
@machine.before_transition { ran_callback = true }
@record.valid?
assert ran_callback
end
def test_should_persist_new_state
@record.valid?
assert_equal 'idling', @record.state
end
def test_should_not_run_after_callbacks
ran_callback = false
@machine.after_transition { ran_callback = true }
@record.valid?
assert !ran_callback
end
end
end
rescue LoadError
$stderr.puts "Skipping Sequel tests. `gem install sequel#{" -v #{ENV['SEQUEL_VERSION']}" if ENV['SEQUEL_VERSION']}` and try again."
end
| 30.419431 | 133 | 0.627327 |
f7c01a7e69d1b8295cf97ff79076b48a5d2e284a | 167 | # Good Guess
# I worked on this challenge [by myself, with: ].
# Your Solution Below
def good_guess?(x)
if x == 42
return true
else return false
end
end
| 11.928571 | 49 | 0.658683 |
5d33f2fe62d1120a0a61bea77935d443b13375f0 | 995 | # frozen_string_literal: true
module Gitlab
module SlashCommands
class IssueClose < IssueCommand
def self.match(text)
/\Aissue\s+close\s+#{Issue.reference_prefix}?(?<iid>\d+)/.match(text)
end
def self.help_message
"issue close <id>"
end
def self.allowed?(project, user)
can?(user, :update_issue, project)
end
def execute(match)
issue = find_by_iid(match[:iid])
return not_found unless issue
return presenter(issue).already_closed if issue.closed?
close_issue(issue: issue)
presenter(issue).present
end
private
def close_issue(issue:)
::Issues::CloseService.new(project: project, current_user: current_user).execute(issue)
end
def presenter(issue)
Gitlab::SlashCommands::Presenters::IssueClose.new(issue)
end
def not_found
Gitlab::SlashCommands::Presenters::Access.new.not_found
end
end
end
end
| 22.111111 | 95 | 0.635176 |
39a8db2897013b4be8e6536394a35d31a9984af5 | 1,096 | platform "ubuntu-16.04-i386" do |plat|
plat.servicedir "/lib/systemd/system"
plat.defaultdir "/etc/default"
plat.servicetype "systemd"
plat.codename "xenial"
plat.add_build_repository "http://pl-build-tools.delivery.puppetlabs.net/debian/pl-build-tools-release-#{plat.get_codename}.deb"
plat.provision_with "export DEBIAN_FRONTEND=noninteractive; apt-get update -qq; apt-get install -qy --no-install-recommends build-essential devscripts make quilt pkg-config debhelper rsync fakeroot"
plat.install_build_dependencies_with "DEBIAN_FRONTEND=noninteractive; apt-get install -qy --no-install-recommends "
packages = [
"libbz2-dev",
"libreadline-dev",
"libselinux1-dev",
"make",
"openjdk-8-jdk",
"pkg-config",
"pl-cmake",
"pl-gcc",
"swig",
"systemtap-sdt-dev",
"zlib1g-dev"
]
plat.provision_with "export DEBIAN_FRONTEND=noninteractive; apt-get update -qq; apt-get install -qy --no-install-recommends #{packages.join(' ')}"
plat.vmpooler_template "ubuntu-1604-i386"
plat.output_dir File.join("deb", plat.get_codename, "PC1")
end
| 36.533333 | 200 | 0.724453 |
26170b39b5c5761f72e20546b37f064eeb27609a | 3,183 | # $Id$
#
# Author:: Francis Cianfrocca (gmail: blackhedd)
# Homepage:: http://rubyeventmachine.com
# Date:: 15 Nov 2006
#
# See EventMachine and EventMachine::Connection for documentation and
# usage examples.
#
#----------------------------------------------------------------------------
#
# Copyright (C) 2006-07 by Francis Cianfrocca. All Rights Reserved.
# Gmail: blackhedd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of either: 1) the GNU General Public License
# as published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version; or 2) Ruby's License.
#
# See the file COPYING for complete licensing information.
#
#---------------------------------------------------------------------------
#
#
module EventMachine
module Protocols
class HeaderAndContentProtocol < LineAndTextProtocol
ContentLengthPattern = /Content-length:\s*(\d+)/i
def initialize *args
super
init_for_request
end
def receive_line line
case @hc_mode
when :discard_blanks
unless line == ""
@hc_mode = :headers
receive_line line
end
when :headers
if line == ""
raise "unrecognized state" unless @hc_headers.length > 0
if respond_to?(:receive_headers)
receive_headers @hc_headers
end
# @hc_content_length will be nil, not 0, if there was no content-length header.
if @hc_content_length.to_i > 0
set_binary_mode @hc_content_length
else
dispatch_request
end
else
@hc_headers << line
if ContentLengthPattern =~ line
# There are some attacks that rely on sending multiple content-length
# headers. This is a crude protection, but needs to become tunable.
raise "extraneous content-length header" if @hc_content_length
@hc_content_length = $1.to_i
end
if @hc_headers.length == 1 and respond_to?(:receive_first_header_line)
receive_first_header_line line
end
end
else
raise "internal error, unsupported mode"
end
end
def receive_binary_data text
@hc_content = text
dispatch_request
end
def dispatch_request
if respond_to?(:receive_request)
receive_request @hc_headers, @hc_content
end
init_for_request
end
private :dispatch_request
def init_for_request
@hc_mode = :discard_blanks
@hc_headers = []
# originally was @hc_headers ||= []; @hc_headers.clear to get a performance
# boost, but it's counterproductive because a subclassed handler will have to
# call dup to use the header array we pass in receive_headers.
@hc_content_length = nil
@hc_content = ""
end
private :init_for_request
# Basically a convenience method. We might create a subclass that does this
# automatically. But it's such a performance killer.
def headers_2_hash hdrs
self.class.headers_2_hash hdrs
end
class << self
def headers_2_hash hdrs
hash = {}
hdrs.each {|h|
if /\A([^\s:]+)\s*:\s*/ =~ h
tail = $'.dup
hash[ $1.downcase.gsub(/-/,"_").intern ] = tail
end
}
hash
end
end
end
end
end
| 25.669355 | 82 | 0.651272 |
edab13eb9ae7e82696d399dd11e2bd79289d4846 | 5,516 | describe MiqSearch do
describe '#descriptions' do
it "hashes" do
srchs = [
FactoryBot.create(:miq_search, :description => 'a'),
FactoryBot.create(:miq_search, :description => 'b'),
FactoryBot.create(:miq_search, :description => 'c')
]
expect(MiqSearch.descriptions).to eq(
srchs[0].id.to_s => srchs[0].description,
srchs[1].id.to_s => srchs[1].description,
srchs[2].id.to_s => srchs[2].description)
end
it "supports scopes" do
srchs = [
FactoryBot.create(:miq_search, :description => 'a', :db => 'Vm'),
FactoryBot.create(:miq_search, :description => 'b', :db => 'Vm'),
FactoryBot.create(:miq_search, :description => 'c', :db => 'Host')
]
expect(MiqSearch.where(:db => 'Vm').descriptions).to eq(
srchs[0].id.to_s => srchs[0].description,
srchs[1].id.to_s => srchs[1].description)
end
end
let(:vm_location_search) do
FactoryBot.create(:miq_search,
:db => "Vm",
:filter => MiqExpression.new("=" => {"field" => "Vm-location", "value" => "good"})
)
end
let(:matched_vms) { FactoryBot.create_list(:vm_vmware, 2, :location => "good") }
let(:other_vms) { FactoryBot.create_list(:vm_vmware, 1, :location => "other") }
let(:all_vms) { matched_vms + other_vms }
let(:partial_matched_vms) { [matched_vms.first] }
let(:partial_vms) { partial_matched_vms + other_vms }
describe "#quick_search?" do
let(:qs) { MiqExpression.new("=" => {"field" => "Vm-name", "value" => :user_input}) }
it "supports no filter" do
expect(FactoryBot.build(:miq_search, :filter => nil)).not_to be_quick_search
end
it "supports a filter" do
expect(vm_location_search).not_to be_quick_search
end
it "supports a quick search" do
expect(FactoryBot.build(:miq_search, :filter => qs)).to be_quick_search
end
end
describe "#results" do
it "respects filter" do
all_vms
expect(vm_location_search.results).to match_array(matched_vms)
end
end
describe "#filtered" do
it "works with models" do
all_vms
expect(vm_location_search.filtered(Vm)).to match_array(matched_vms)
end
it "works with scopes" do
all_vms
expect(vm_location_search.filtered(Vm.all)).to match_array(matched_vms)
end
it "finds elements only in the array" do
all_vms
expect(vm_location_search.filtered(partial_vms)).to match_array(partial_matched_vms)
end
it "brings back empty array for empty arrays" do
all_vms
expect(vm_location_search.filtered([])).to match_array([])
end
end
describe ".filtered" do
it "uses an existing search" do
all_vms
results = MiqSearch.filtered(vm_location_search.id, "Vm", partial_vms)
expect(results).to match_array(partial_matched_vms)
end
it "calls Rbac directly for no search" do
all_vms
results = MiqSearch.filtered(0, "Vm", partial_vms)
expect(results).to match_array(partial_vms)
end
end
describe "#destroy" do
let(:search) { FactoryBot.create(:miq_search) }
it "destroys search if miq_schedule does not use it" do
expect { search.destroy! }.not_to raise_error
end
it "does not destroy search if it referenced in at least one miq_schedule" do
schedules = double
allow(search).to receive(:miq_schedules).and_return(schedules)
allow(schedules).to receive(:empty?).and_return(false)
expect { expect { search.destroy! }.to raise_error(ActiveRecord::RecordNotDestroyed) }.to_not(change { MiqSearch.count })
expect(search.errors[:base][0]).to eq("Search is referenced in a schedule and cannot be deleted")
end
end
# This test is intentionally long winded instead of breaking it up into
# multiple tests per concern because of how long a full seed may take.
describe ".seed" do
let(:tmpdir) { Pathname.new(Dir.mktmpdir) }
let(:fixture_dir) { tmpdir.join("db/fixtures") }
let(:search_yml) { fixture_dir.join("miq_searches.yml") }
before do
FileUtils.mkdir_p(fixture_dir)
FileUtils.cp_r(Rails.root.join('db', 'fixtures', 'miq_searches.yml'), search_yml)
stub_const("MiqSearch::FIXTURE_DIR", fixture_dir)
described_class.seed
end
after do
FileUtils.rm_rf(tmpdir)
end
it "seeds miq_search table from db/fixtures/miq_search.yml and keeps custom searches" do
yml = YAML.load_file(search_yml)
# check if all supplied default searches were loaded
expect(MiqSearch.count).to eq(yml.size)
# check if custom searches were not removed
custom_search = "some search"
FactoryBot.create(:miq_search, :name => custom_search)
described_class.seed
expect(MiqSearch.count).to eq(yml.size + 1)
expect(MiqSearch.where(:name => custom_search)).to exist
# check that default search removed from DB if name-db of that search was not present in miq_search_yml
old_name = yml[0]["attributes"]["name"]
db = yml[0]["attributes"]["db"]
new_name = "default_Absolutely New Name"
yml[0]["attributes"]["name"] = new_name
File.write(search_yml, yml.to_yaml)
described_class.seed
expect(MiqSearch.count).to eq(yml.size + 1)
expect(MiqSearch.where(:name => new_name, :db => db)).to exist
expect(MiqSearch.where(:name => old_name, :db => db)).to be_empty
end
end
end
| 34.26087 | 127 | 0.652647 |
1a08415a99dc73e7189090a003708edde7bd24b3 | 33,900 | require "spec_helper"
describe "Observation Index" do
before( :all ) do
@starting_time_zone = Time.zone
Time.zone = ActiveSupport::TimeZone["Samoa"]
load_test_taxa
end
after( :all ) { Time.zone = @starting_time_zone }
before(:each) { enable_elastic_indexing( Observation ) }
after(:each) { disable_elastic_indexing( Observation ) }
it "as_indexed_json should return a hash" do
o = Observation.make!
json = o.as_indexed_json
expect( json ).to be_a Hash
end
it "sets location based on private coordinates if exist" do
o = Observation.make!(latitude: 3.0, longitude: 4.0)
o.update_attributes(private_latitude: 1.0, private_longitude: 2.0)
json = o.as_indexed_json
expect( json[:location] ).to eq "1.0,2.0"
end
it "sets location based on public coordinates if there are no private" do
o = Observation.make!(latitude: 3.0, longitude: 4.0)
json = o.as_indexed_json
expect( json[:location] ).to eq "3.0,4.0"
end
it "indexes created_at based on observation time zone" do
o = Observation.make!(created_at: "2014-12-31 20:00:00 -0800")
json = o.as_indexed_json
expect( json[:created_at].day ).to eq 31
expect( json[:created_at].month ).to eq 12
expect( json[:created_at].year ).to eq 2014
end
it "indexes created_at_details based on observation time zone" do
o = Observation.make!(created_at: "2014-12-31 20:00:00 -0800")
json = o.as_indexed_json
expect( json[:created_at_details][:day] ).to eq 31
expect( json[:created_at_details][:month] ).to eq 12
expect( json[:created_at_details][:year] ).to eq 2014
end
it "sorts photos by position and ID" do
o = Observation.make!(latitude: 3.0, longitude: 4.0)
p3 = LocalPhoto.make!
p1 = LocalPhoto.make!
p2 = LocalPhoto.make!
p4 = LocalPhoto.make!
p5 = LocalPhoto.make!
make_observation_photo(photo: p1, observation: o, position: 1)
make_observation_photo(photo: p2, observation: o, position: 2)
make_observation_photo(photo: p3, observation: o, position: 3)
# these without a position will be last in order of creation
make_observation_photo(photo: p4, observation: o)
make_observation_photo(photo: p5, observation: o)
o.reload
json = o.as_indexed_json
expect( json[:photos][0][:id] ).to eq p1.id
expect( json[:photos][1][:id] ).to eq p2.id
expect( json[:photos][2][:id] ).to eq p3.id
expect( json[:photos][3][:id] ).to eq p4.id
expect( json[:photos][4][:id] ).to eq p5.id
end
it "uses private_latitude/longitude to create private_geojson" do
o = Observation.make!
o.update_columns(private_latitude: 3.0, private_longitude: 4.0, private_geom: nil)
o.reload
expect( o.private_geom ).to be nil
json = o.as_indexed_json
expect( json[:private_geojson][:coordinates] ).to eq [4.0, 3.0]
end
it "sets taxon globally threatened" do
o = Observation.make!(taxon: Taxon.make!)
expect( o.as_indexed_json[:taxon][:threatened] ).to be false
ConservationStatus.make!(place: nil, taxon: o.taxon,
status: Taxon::IUCN_NEAR_THREATENED)
o.reload
expect( o.as_indexed_json[:taxon][:threatened] ).to be true
end
it "sets taxon threatened in a place" do
present_place = make_place_with_geom(wkt: "MULTIPOLYGON(((0 0,0 1,1 1,1 0,0 0)))")
absent_place = make_place_with_geom(wkt: "MULTIPOLYGON(((2 2,2 3,3 3,3 2,2 2)))")
o = Observation.make!(taxon: Taxon.make!, latitude: present_place.latitude,
longitude: present_place.longitude)
expect( o.as_indexed_json[:taxon][:threatened] ).to be false
cs = ConservationStatus.make!(place: absent_place, taxon: o.taxon,
status: Taxon::IUCN_NEAR_THREATENED)
o.reload
expect( o.as_indexed_json[:taxon][:threatened] ).to be false
cs.update_attributes(place: present_place)
o.reload
expect( o.as_indexed_json[:taxon][:threatened] ).to be true
end
it "sets taxon introduced" do
place = make_place_with_geom
o = Observation.make!(taxon: Taxon.make!, latitude: place.latitude,
longitude: place.longitude)
expect( o.as_indexed_json[:taxon][:introduced] ).to be false
expect( o.as_indexed_json[:taxon][:native] ).to be false
expect( o.as_indexed_json[:taxon][:endemic] ).to be false
cs = ListedTaxon.make!(place: place, taxon: o.taxon, list: place.check_list,
establishment_means: "introduced")
o.reload
expect( o.as_indexed_json[:taxon][:introduced] ).to be true
expect( o.as_indexed_json[:taxon][:native] ).to be false
expect( o.as_indexed_json[:taxon][:endemic] ).to be false
end
it "sets taxon native" do
place = make_place_with_geom
o = Observation.make!(taxon: Taxon.make!, latitude: place.latitude,
longitude: place.longitude)
expect( o.as_indexed_json[:taxon][:introduced] ).to be false
expect( o.as_indexed_json[:taxon][:native] ).to be false
expect( o.as_indexed_json[:taxon][:endemic] ).to be false
cs = ListedTaxon.make!(place: place, taxon: o.taxon, list: place.check_list,
establishment_means: "native")
o.reload
expect( o.as_indexed_json[:taxon][:introduced] ).to be false
expect( o.as_indexed_json[:taxon][:native] ).to be true
expect( o.as_indexed_json[:taxon][:endemic] ).to be false
end
it "sets taxon endemic" do
place = make_place_with_geom
o = Observation.make!(taxon: Taxon.make!, latitude: place.latitude,
longitude: place.longitude)
expect( o.as_indexed_json[:taxon][:introduced] ).to be false
expect( o.as_indexed_json[:taxon][:native] ).to be false
expect( o.as_indexed_json[:taxon][:endemic] ).to be false
cs = ListedTaxon.make!(place: place, taxon: o.taxon, list: place.check_list,
establishment_means: "endemic")
o.reload
expect( o.as_indexed_json[:taxon][:introduced] ).to be false
expect( o.as_indexed_json[:taxon][:native] ).to be true
expect( o.as_indexed_json[:taxon][:endemic] ).to be true
end
it "indexes identifications" do
o = Observation.make!
Identification.where(observation_id: o.id).destroy_all
5.times{ Identification.make!(observation: o) }
json = o.as_indexed_json
expect( json[:identifications].length ).to eq 5
expect( json[:identifications].first ).to eq o.identifications.first.
as_indexed_json(no_details: true)
end
it "indexes owners_identification_from_vision" do
o = Observation.make!( taxon: Taxon.make!, owners_identification_from_vision: true )
expect( o.owners_identification_from_vision ).to be true
json = o.as_indexed_json
expect( json[:owners_identification_from_vision] ).to be true
end
it "indexes applications based on user agent" do
OauthApplication.make!(name: "iNaturalist Android App")
OauthApplication.make!(name: "iNaturalist iPhone App")
o = Observation.make!( oauth_application_id: 11 )
expect( o.as_indexed_json[:oauth_application_id] ).to eq 11
o.update_attributes( oauth_application_id: nil,
user_agent: "iNaturalist/1.5.1 (Build 195; Android 3.18..." )
expect( o.as_indexed_json[:oauth_application_id] ).to eq OauthApplication.inaturalist_android_app.id
o.update_attributes( user_agent: "iNaturalist/2.7 (iOS iOS 10.3.2 iPhone)" )
expect( o.as_indexed_json[:oauth_application_id] ).to eq OauthApplication.inaturalist_iphone_app.id
end
it "private_place_ids should include places that contain the positional_accuracy" do
place = make_place_with_geom
o = Observation.make!( latitude: place.latitude, longitude: place.longitude, positional_accuracy: 10 )
expect( o.as_indexed_json[:private_place_ids] ).to include place.id
end
it "private_place_ids should not include places that do not contain the positional_accuracy" do
place = make_place_with_geom( wkt: "MULTIPOLYGON(((0 0,0 0.1,0.1 0.1,0.1 0,0 0)))" )
o = Observation.make!( latitude: place.latitude, longitude: place.longitude, positional_accuracy: 99999 )
expect( o.as_indexed_json[:private_place_ids] ).not_to include place.id
end
it "private_place_ids should include places that do not contain the positional_accuracy but are county-level" do
place = make_place_with_geom(
wkt: "MULTIPOLYGON(((0 0,0 0.1,0.1 0.1,0.1 0,0 0)))",
place_type: Place::COUNTY,
admin_level: Place::COUNTY_LEVEL
)
o = Observation.make!(
latitude: place.latitude,
longitude: place.longitude,
positional_accuracy: 99999
)
expect( o.as_indexed_json[:private_place_ids] ).to include place.id
end
it "place_ids should include places that contain the uncertainty cell" do
place = make_place_with_geom
o = Observation.make!( latitude: place.latitude, longitude: place.longitude, geoprivacy: Observation::OBSCURED )
expect( o.as_indexed_json[:place_ids] ).to include place.id
end
it "place_ids should not include places that do not contain the uncertainty cell" do
place = make_place_with_geom
o = Observation.make!( latitude: place.bounding_box[0], longitude: place.bounding_box[1] )
expect( o.as_indexed_json[:place_ids] ).not_to include place.id
end
it "place_ids should include county-level places that do not contain the uncertainty cell" do
place = make_place_with_geom(
place_type: Place::COUNTY,
admin_level: Place::COUNTY_LEVEL
)
o = Observation.make!( latitude: place.bounding_box[0], longitude: place.bounding_box[1] )
expect( o.as_indexed_json[:place_ids] ).to include place.id
end
describe "params_to_elastic_query" do
it "returns nil when ES can't handle the params" do
expect( Observation.params_to_elastic_query(
Observation::NON_ELASTIC_ATTRIBUTES.first => "anything") ).to be nil
end
it "filters by project rules" do
project = Project.make!
rule = ProjectObservationRule.make!(operator: "identified?", ruler: project)
expect( Observation.params_to_elastic_query(apply_project_rules_for: project.id)).
to include( filters: [{ exists: { field: "taxon" } }])
end
it "filters by list taxa" do
list = List.make!
lt1 = ListedTaxon.make!( list: list, taxon: Taxon.make! )
lt2 = ListedTaxon.make!( list: list, taxon: Taxon.make! )
filtered_ancestor_ids = Observation.params_to_elastic_query( list_id: list.id )[:filters][0][:terms]["taxon.ancestor_ids"]
expect( filtered_ancestor_ids ).to include lt1.taxon_id
expect( filtered_ancestor_ids ).to include lt2.taxon_id
end
it "doesn't apply a site filter unless the site wants one" do
s = Site.make!(preferred_site_observations_filter: nil)
expect( Observation.params_to_elastic_query({ }, site: s) ).to include( filters: [ ] )
end
it "queries names" do
expect( Observation.params_to_elastic_query({ q: "s", search_on: "names" }) ).to include(
filters: [ { multi_match:
{ query: "s", operator: "and", fields: [ "taxon.names.name" ] } } ] )
end
it "queries tags" do
expect( Observation.params_to_elastic_query({ q: "s", search_on: "tags" }) ).to include(
filters: [ { multi_match:
{ query: "s", operator: "and", fields: [ :tags ] } } ] )
end
it "queries descriptions" do
expect( Observation.params_to_elastic_query({ q: "s", search_on: "description" }) ).to include(
filters: [ { multi_match:
{ query: "s", operator: "and", fields: [ :description ] } } ] )
end
it "queries places" do
expect( Observation.params_to_elastic_query({ q: "s", search_on: "place" }) ).to include(
filters: [ { multi_match:
{ query: "s", operator: "and", fields: [ :place_guess ] } } ] )
end
it "queries all fields by default" do
expect( Observation.params_to_elastic_query({ q: "s" }) ).to include(
filters: [ { multi_match:
{ query: "s", operator: "and",
fields: [ "taxon.names.name", :tags, :description, :place_guess ] } } ] )
end
it "filters by param values" do
[ { http_param: :rank, es_field: "taxon.rank" },
{ http_param: :sound_license, es_field: "sounds.license_code" },
{ http_param: :observed_on_day, es_field: "observed_on_details.day" },
{ http_param: :observed_on_month, es_field: "observed_on_details.month" },
{ http_param: :observed_on_year, es_field: "observed_on_details.year" },
{ http_param: :place_id, es_field: "place_ids" },
{ http_param: :site_id, es_field: "site_id" }
].each do |filter|
# single values
expect( Observation.params_to_elastic_query({
filter[:http_param] => "thevalue" }) ).to include(
filters: [ { terms: { filter[:es_field] => [ "thevalue" ] } } ] )
# multiple values
expect( Observation.params_to_elastic_query({
filter[:http_param] => [ "value1", "value2" ] }) ).to include(
filters: [ { terms: { filter[:es_field] => [ "value1", "value2" ] } } ] )
end
end
it "filters by boolean params" do
[ { http_param: :introduced, es_field: "taxon.introduced" },
{ http_param: :threatened, es_field: "taxon.threatened" },
{ http_param: :native, es_field: "taxon.native" },
{ http_param: :endemic, es_field: "taxon.endemic" },
{ http_param: :id_please, es_field: "id_please" },
{ http_param: :out_of_range, es_field: "out_of_range" },
{ http_param: :mappable, es_field: "mappable" },
{ http_param: :captive, es_field: "captive" }
].each do |filter|
expect( Observation.params_to_elastic_query({
filter[:http_param] => "true" }) ).to include(
filters: [ { term: { filter[:es_field] => true } } ] )
expect( Observation.params_to_elastic_query({
filter[:http_param] => "false" }) ).to include(
filters: [ { term: { filter[:es_field] => false } } ] )
end
end
it "filters by presence of attributes" do
[ { http_param: :with_photos, es_field: "photos.url" },
{ http_param: :with_sounds, es_field: "sounds" },
{ http_param: :with_geo, es_field: "geojson" },
{ http_param: :identified, es_field: "taxon" },
].each do |filter|
f = { exists: { field: filter[:es_field] } }
expect( Observation.params_to_elastic_query({
filter[:http_param] => "true" }) ).to include(
filters: [ f ] )
expect( Observation.params_to_elastic_query({
filter[:http_param] => "false" }) ).to include(
inverse_filters: [ f ] )
end
end
it "filters by verifiable true" do
expect( Observation.params_to_elastic_query({ verifiable: "true" }) ).to include(
filters: [ { terms: { quality_grade: [ "research", "needs_id" ] } } ] )
end
it "filters by verifiable false" do
expect( Observation.params_to_elastic_query({ verifiable: "false" }) ).to include(
filters: [ { not: { terms: { quality_grade: [ "research", "needs_id" ] } } } ] )
end
it "filters by site_id" do
s = Site.make!(preferred_site_observations_filter: Site::OBSERVATIONS_FILTERS_SITE)
expect( Observation.params_to_elastic_query({ }, site: s) ).to include(
filters: [ { terms: { "site_id" => [ s.id ] } } ] )
end
it "filters by site place" do
s = Site.make!(preferred_site_observations_filter: Site::OBSERVATIONS_FILTERS_PLACE, place: Place.make!)
expect( Observation.params_to_elastic_query({ }, site: s) ).to include(
filters: [ { terms: { "place_ids" => [ s.place.id ] } } ] )
end
it "filters by site bounding box" do
s = Site.make!(preferred_site_observations_filter: Site::OBSERVATIONS_FILTERS_BOUNDING_BOX,
preferred_geo_nelat: 55, preferred_geo_nelng: 66, preferred_geo_swlat: 77, preferred_geo_swlng: 88)
expect( Observation.params_to_elastic_query({ }, site: s) ).to include(
filters: [{ envelope: { geojson: { nelat: "55", nelng: "66", swlat: "77", swlng: "88", user: nil } } }] )
end
it "filters by user and user_id" do
expect( Observation.params_to_elastic_query({ user: 1 }) ).to include(
filters: [ { terms: { "user.id" => [ 1 ] } } ] )
expect( Observation.params_to_elastic_query({ user_id: 1 }) ).to include(
filters: [ { terms: { "user.id" => [ 1 ] } } ] )
end
it "filters by taxon_id" do
expect( Observation.params_to_elastic_query({ observations_taxon: 1 }) ).to include(
filters: [ { term: { "taxon.ancestor_ids" => 1 } } ] )
end
it "filters by taxon_ids" do
expect( Observation.params_to_elastic_query({ observations_taxon_ids: [ 1, 2 ] }) ).to include(
filters: [ { terms: { "taxon.ancestor_ids" => [ 1, 2 ] } } ] )
end
it "filters by license" do
expect( Observation.params_to_elastic_query({ license: "any" }) ).to include(
filters: [ { exists: { field: "license_code" } } ] )
expect( Observation.params_to_elastic_query({ license: "none" }) ).to include(
inverse_filters: [ { exists: { field: "license_code" } } ] )
expect( Observation.params_to_elastic_query({ license: "CC-BY" }) ).to include(
filters: [ { terms: { license_code: [ "cc-by" ] } } ] )
expect( Observation.params_to_elastic_query({ license: [ "CC-BY", "CC-BY-NC" ] }) ).to include(
filters: [ { terms: { license_code: [ "cc-by", "cc-by-nc" ] } } ] )
end
it "filters by photo license" do
expect( Observation.params_to_elastic_query({ photo_license: "any" }) ).to include(
filters: [ { exists: { field: "photos.license_code" } } ] )
expect( Observation.params_to_elastic_query({ photo_license: "none" }) ).to include(
inverse_filters: [ { exists: { field: "photos.license_code" } } ] )
expect( Observation.params_to_elastic_query({ photo_license: "CC-BY" }) ).to include(
filters: [ { terms: { "photos.license_code" => [ "cc-by" ] } } ] )
expect( Observation.params_to_elastic_query({ photo_license: [ "CC-BY", "CC-BY-NC" ] }) ).to include(
filters: [ { terms: { "photos.license_code" => [ "cc-by", "cc-by-nc" ] } } ] )
end
it "filters by created_on year" do
expect( Observation.params_to_elastic_query({ created_on: "2005" }) ).to include(
filters: [ { term: { "created_at_details.year" => 2005 } } ] )
end
it "filters by created_on year and month" do
expect( Observation.params_to_elastic_query({ created_on: "2005-01" }) ).to include(
filters: [ { term: { "created_at_details.month" => 1 } },
{ term: { "created_at_details.year" => 2005 } } ] )
end
it "filters by created_on year and month and day" do
expect( Observation.params_to_elastic_query({ created_on: "2005-01-02" }) ).to include(
filters: [ { term: { "created_at_details.day" => 2 } },
{ term: { "created_at_details.month" => 1 } },
{ term: { "created_at_details.year" => 2005 } } ] )
end
it "filters by project" do
expect( Observation.params_to_elastic_query({ project: 1 }) ).to include(
filters: [ { terms: { project_ids: [ 1 ] } } ] )
end
it "filters by pcid with a specified project" do
expect( Observation.params_to_elastic_query({ project: 1, pcid: "yes" }) ).to include(
filters: [
{ terms: { project_ids: [ 1 ] } },
{ terms: { project_ids_with_curator_id: [ 1 ] } } ] )
expect( Observation.params_to_elastic_query({ project: 1, pcid: "no" }) ).to include(
filters: [
{ terms: { project_ids: [ 1 ] } },
{ terms: { project_ids_without_curator_id: [ 1 ] } } ] )
end
it "filters by pcid" do
expect( Observation.params_to_elastic_query({ pcid: "yes" }) ).to include(
filters: [ { exists: { field: "project_ids_with_curator_id" } } ] )
expect( Observation.params_to_elastic_query({ pcid: "no" }) ).to include(
filters: [ { exists: { field: "project_ids_without_curator_id" } } ] )
end
it "filters by not_in_project" do
p = Project.make!
expect( Observation.params_to_elastic_query({ not_in_project: p.id }) ).to include(
inverse_filters: [ { term: { project_ids: p.id } } ] )
end
it "filters by lrank" do
expect( Observation.params_to_elastic_query({ lrank: "species" }) ).to include(
filters: [ { range: { "taxon.rank_level" => { gte: 10, lte: 100 } } } ])
end
it "filters by hrank" do
expect( Observation.params_to_elastic_query({ hrank: "family" }) ).to include(
filters: [ { range: { "taxon.rank_level" => { gte: 0, lte: 30 } } } ])
end
it "filters by lrank and hrank" do
expect( Observation.params_to_elastic_query({ lrank: "species", hrank: "family" }) ).to include(
filters: [ { range: { "taxon.rank_level" => { gte: 10, lte: 30 } } } ])
end
it "filters by quality_grade" do
expect( Observation.params_to_elastic_query({ quality_grade: "any" }) ).to include(
filters: [ ] )
expect( Observation.params_to_elastic_query({ quality_grade: "research" }) ).to include(
filters: [ { terms: { quality_grade: [ "research" ] } } ] )
expect( Observation.params_to_elastic_query({ quality_grade: "research,casual" }) ).to include(
filters: [ { terms: { quality_grade: [ "research", "casual" ] } } ] )
end
it "filters by identifications" do
expect( Observation.params_to_elastic_query({ identifications: "most_agree" }) ).to include(
filters: [ { term: { identifications_most_agree: true } } ] )
expect( Observation.params_to_elastic_query({ identifications: "some_agree" }) ).to include(
filters: [ { term: { identifications_some_agree: true } } ] )
expect( Observation.params_to_elastic_query({ identifications: "most_disagree" }) ).to include(
filters: [ { term: { identifications_most_disagree: true } } ] )
end
it "filters by bounding box" do
expect( Observation.params_to_elastic_query({ nelat: 1, nelng: 2, swlat: 3, swlng: 4 }) ).to include(
filters: [ { envelope: { geojson: {
nelat: 1, nelng: 2, swlat: 3, swlng: 4, user: nil } } } ])
end
it "filters by lat and lng" do
expect( Observation.params_to_elastic_query({ lat: 10, lng: 15 }) ).to include(
filters: [ { geo_distance: { distance: "10km", location: { lat: 10, lon: 15 } } } ] )
expect( Observation.params_to_elastic_query({ lat: 10, lng: 15, radius: 2 }) ).to include(
filters: [ { geo_distance: { distance: "2km", location: { lat: 10, lon: 15 } } } ] )
end
it "filters by reviewed" do
u = User.make!
# doesn't filter without a user
expect( Observation.params_to_elastic_query({ reviewed: "true" }) ).to include( filters: [ ] )
expect( Observation.params_to_elastic_query({ reviewed: "true" }, current_user: u) ).to include(
filters: [ { term: { reviewed_by: u.id } } ] )
expect( Observation.params_to_elastic_query({ reviewed: "false" }, current_user: u) ).to include(
inverse_filters: [ { term: { reviewed_by: u.id } } ] )
end
it "filters by d1 d2 dates" do
expect( Observation.params_to_elastic_query({ d1: "2015-03-25", d2: "2015-06-20" }) ).to include(
filters: [ { range: { "observed_on_details.date": { gte: "2015-03-25", lte: "2015-06-20" }}}])
end
it "defaults d2 date to now" do
expect( Observation.params_to_elastic_query({ d1: "2015-03-25" }) ).to include(
filters: [ { range: { "observed_on_details.date": { gte: "2015-03-25", lte: Time.now.strftime("%F") }}}])
end
it "defaults d1 date to 1800" do
expect( Observation.params_to_elastic_query({ d2: "2015-06-20" }) ).to include(
filters: [ { range: { "observed_on_details.date": { gte: "1800-01-01", lte: "2015-06-20" }}}])
end
it "filters by d1 d2 datetimes" do
time_filter = { time_observed_at: {
gte: "2015-03-25T01:23:45+00:00",
lte: "2015-04-25T03:33:33+00:00" } }
date_filter = { "observed_on_details.date": {
gte: "2015-03-25",
lte: "2015-04-25" } }
expect( Observation.params_to_elastic_query(
{ d1: "2015-03-25T01:23:45", d2: "2015-04-25T03:33:33" }) ).to include({
filters: [{ bool: { should: [
{ bool: { must: [ { range: time_filter }, { exists: { field: "time_observed_at" } } ] } },
{ bool: {
must: { range: date_filter },
must_not: { exists: { field: "time_observed_at" } } } }
]}}]
})
end
it "defaults d2 date to now" do
time_filter = { time_observed_at: {
gte: "2015-03-25T01:23:45+00:00",
lte: Time.now.strftime("%FT%T%:z") } }
date_filter = { "observed_on_details.date": {
gte: "2015-03-25",
lte: Time.now.strftime("%F") } }
expect( Observation.params_to_elastic_query({ d1: "2015-03-25T01:23:45" }) ).to include({
filters: [{ bool: { should: [
{ bool: { must: [ { range: time_filter }, { exists: { field: "time_observed_at" } } ] } },
{ bool: {
must: { range: date_filter },
must_not: { exists: { field: "time_observed_at" } } } }
]}}]
})
end
it "defaults d1 date to 1800" do
time_filter = { time_observed_at: {
gte: "1800-01-01T00:00:00+00:00",
lte: "2015-04-25T03:33:33+00:00" } }
date_filter = { "observed_on_details.date": {
gte: "1800-01-01",
lte: "2015-04-25" } }
expect( Observation.params_to_elastic_query({ d2: "2015-04-25T03:33:33" }) ).to include({
filters: [{ bool: { should: [
{ bool: { must: [ { range: time_filter }, { exists: { field: "time_observed_at" } } ] } },
{ bool: {
must: { range: date_filter },
must_not: { exists: { field: "time_observed_at" } } } }
]}}]
})
end
it "respects d1 d2 timezones" do
time_filter = { time_observed_at: {
gte: "2015-03-25T01:00:00+09:00",
lte: "2015-04-25T23:00:00-08:00" } }
date_filter = { "observed_on_details.date": {
gte: "2015-03-25",
lte: "2015-04-25" } }
expect( Observation.params_to_elastic_query(
{ d1: "2015-03-25T01:00:00+09:00", d2: "2015-04-25T23:00:00-08:00" }) ).to include({
filters: [{ bool: { should: [
{ bool: { must: [ { range: time_filter }, { exists: { field: "time_observed_at" } } ] } },
{ bool: {
must: { range: date_filter },
must_not: { exists: { field: "time_observed_at" } } } }
]}}]
})
end
it "filters by h1 and h2" do
expect( Observation.params_to_elastic_query({ h1: 8, h2: 10 }) ).to include(
filters: [ { range: { "observed_on_details.hour" => { gte: 8, lte: 10 } } } ] )
expect( Observation.params_to_elastic_query({ h1: 8, h2: 4 }) ).to include(
filters: [ { bool: { should: [
{ range: { "observed_on_details.hour" => { gte: 8 } } },
{ range: { "observed_on_details.hour" => { lte: 4 } } } ] } } ] )
end
it "filters by m1 and m2" do
expect( Observation.params_to_elastic_query({ m1: 8, m2: 10 }) ).to include(
filters: [ { range: { "observed_on_details.month" => { gte: 8, lte: 10 } } } ] )
expect( Observation.params_to_elastic_query({ m1: 8, m2: 4 }) ).to include(
filters: [ { bool: { should: [
{ range: { "observed_on_details.month" => { gte: 8 } } },
{ range: { "observed_on_details.month" => { lte: 4 } } } ] } } ] )
end
it "filters by updated_since" do
timeString = "2015-10-31T00:00:00+00:00"
timeObject = Chronic.parse(timeString)
expect( Observation.params_to_elastic_query({ updated_since: timeString }) ).to include(
filters: [ { range: { updated_at: { gte: timeObject } } } ] )
end
it "filters by updated_since OR aggregation_user_ids" do
timeString = "2015-10-31T00:00:00+00:00"
timeObject = Chronic.parse(timeString)
expect( Observation.params_to_elastic_query({
updated_since: timeString, aggregation_user_ids: [ 1, 2 ] }) ).to include(
filters: [ { bool: { should: [
{ range: { updated_at: { gte: timeObject } } },
{ terms: { "user.id" => [1, 2] } } ] } } ] )
end
it "filters by observation field values" do
of = ObservationField.make!
ofv_params = { whatever: { observation_field: of, value: "testvalue" } }
expect( Observation.params_to_elastic_query({ ofv_params: ofv_params }) ).to include(
filters: [ { nested: { path: "ofvs", query: { bool: { must: [
{ match: { "ofvs.name_ci" => of.name } },
{ match: { "ofvs.value_ci" => "testvalue" }}]}}}}])
end
it "filters by conservation status" do
expect( Observation.params_to_elastic_query({ cs: "testing" }) ).to include(
filters: [ { nested: { path: "taxon.statuses", query: { bool: {
must: [ { terms: { "taxon.statuses.status" => [ "testing" ] } } ],
must_not: [ { exists: { field: "taxon.statuses.place_id" }}]}}}}])
expect( Observation.params_to_elastic_query({ cs: "testing", place_id: 6 })[:filters] ).to include(
{ nested: { path: "taxon.statuses", query: { bool: { must: [
{ terms: {"taxon.statuses.status" => [ "testing" ] } },
{ bool: { should: [
{ terms: { "taxon.statuses.place_id" => [ 6 ] } },
{ bool: { must_not: { exists: { field: "taxon.statuses.place_id" }}}}]}}]}}}})
end
it "filters by IUCN conservation status" do
expect( Observation.params_to_elastic_query({ csi: "LC" }) ).to include(
filters: [ { nested: { path: "taxon.statuses", query: { bool: {
must: [ { terms: { "taxon.statuses.iucn" => [ 10 ] } } ],
must_not: [ { exists: { field: "taxon.statuses.place_id" }}]}}}}])
expect( Observation.params_to_elastic_query({ csi: "LC", place_id: 6 })[:filters] ).to include(
{ nested: { path: "taxon.statuses", query: { bool: { must: [
{ terms: {"taxon.statuses.iucn" => [ 10 ] } },
{ bool: { should: [
{ terms: { "taxon.statuses.place_id" => [ 6 ] } },
{ bool: { must_not: { exists: { field: "taxon.statuses.place_id" }}}}]}}]}}}})
end
it "filters by conservation status authority" do
expect( Observation.params_to_elastic_query({ csa: "IUCN" }) ).to include(
filters: [ { nested: { path: "taxon.statuses", query: { bool: {
must: [ { terms: { "taxon.statuses.authority" => [ "iucn" ] } } ],
must_not: [ { exists: { field: "taxon.statuses.place_id" }}]}}}}])
expect( Observation.params_to_elastic_query({ csa: "IUCN", place_id: 6 })[:filters] ).to include(
{ nested: { path: "taxon.statuses", query: { bool: { must: [
{ terms: {"taxon.statuses.authority" => [ "iucn" ] } },
{ bool: { should: [
{ terms: { "taxon.statuses.place_id" => [ 6 ] } },
{ bool: { must_not: { exists: { field: "taxon.statuses.place_id" }}}}]}}]}}}})
end
it "filters by iconic_taxa" do
animalia = Taxon.where(name: "Animalia").first
expect( Observation.params_to_elastic_query({ iconic_taxa: [ animalia.name ] }) ).to include(
filters: [ { terms: { "taxon.iconic_taxon_id" => [ animalia.id ] } } ])
expect( Observation.params_to_elastic_query({ iconic_taxa: [ animalia.name, "unknown" ] }) ).to include(
filters: [ { bool: { should: [
{ terms: { "taxon.iconic_taxon_id" => [ animalia.id ] } },
{ bool: { must_not: { exists: { field: "taxon.iconic_taxon_id" } } } } ] } } ])
end
it "filters by geoprivacy" do
expect( Observation.params_to_elastic_query({ geoprivacy: "any" }) ).to include(
filters: [ ])
expect( Observation.params_to_elastic_query({ geoprivacy: "open" }) ).to include(
inverse_filters: [ { exists: { field: :geoprivacy } } ])
expect( Observation.params_to_elastic_query({ geoprivacy: "obscured" }) ).to include(
filters: [ { term: { geoprivacy: "obscured" } } ])
expect( Observation.params_to_elastic_query({ geoprivacy: "obscured_private" }) ).to include(
filters: [ { terms: { geoprivacy: [ "obscured", "private" ] } } ])
end
it "filters by popular" do
expect( Observation.params_to_elastic_query({ popular: "true" }) ).to include(
filters: [ { range: { cached_votes_total: { gte: 1 } } } ])
expect( Observation.params_to_elastic_query({ popular: "false" }) ).to include(
filters: [ { term: { cached_votes_total: 0 } } ])
end
it "filters by min_id" do
expect( Observation.params_to_elastic_query({ min_id: 99 }) ).to include(
filters: [ { range: { id: { gte: 99 } } } ])
end
end
describe "prepare_batch_for_index" do
it "should always include country-, state-, and county-level place IDs" do
country = make_place_with_geom(
admin_level: Place::COUNTRY_LEVEL,
wkt: "MULTIPOLYGON(((0 0,0 1,1 1,1 0,0 0)))"
)
state = make_place_with_geom(
admin_level: Place::STATE_LEVEL, parent: country,
wkt: "MULTIPOLYGON(((0.1 0.1,0.1 0.9,0.9 0.9,0.9 0.1,0.1 0.1)))"
)
county = make_place_with_geom(
admin_level: Place::STATE_LEVEL, parent: state,
wkt: "MULTIPOLYGON(((0.2 0.2,0.2 0.8,0.8 0.8,0.8 0.2,0.2 0.2)))"
)
o = Observation.make!( latitude: county.latitude, longitude: county.longitude, positional_accuracy: 99999 )
Observation.prepare_batch_for_index( [o] )
[country, state, county].each do |p|
expect( o.indexed_place_ids ).to include p.id
expect( o.indexed_private_place_ids ).to include p.id
end
end
end
end
| 46.502058 | 128 | 0.624425 |
bb3e74ac02ca54f3b65000b8c33ff995ed1ee1d0 | 1,161 | # frozen_string_literal: true
require 'spec_helper'
require 'aca_entities/magi_medicaid/libraries/iap_library'
RSpec.describe ::AcaEntities::MagiMedicaid::PregnancyInformation, dbclean: :after_each do
describe 'with valid arguments' do
let(:input_params) do
{ is_pregnant: true,
is_enrolled_on_medicaid: false,
is_post_partum_period: false,
expected_children_count: 1,
pregnancy_due_on: Date.today.next_month,
pregnancy_end_on: nil }
end
before do
pregnancy_params = AcaEntities::MagiMedicaid::Contracts::PregnancyInformationContract.new.call(input_params).to_h
@result = described_class.new(pregnancy_params)
end
it 'should return PregnancyInformation entity object' do
expect(@result).to be_a(described_class)
end
it 'should return all keys of PregnancyInformation' do
expect(@result.to_h.keys).to eq(input_params.keys)
end
end
describe 'with invalid arguments' do
it 'should raise error' do
expect { described_class.new(is_pregnant: 'Test') }.to raise_error(Dry::Struct::Error, /has invalid type for :is_pregnant/)
end
end
end
| 30.552632 | 129 | 0.726098 |
6aee91fa2d2749508435edecca2d12e2e5c0d696 | 169 | class CreateCategories < ActiveRecord::Migration[5.1]
def change
create_table :categories do |t|
t.string :title
t.timestamps
end
end
end
| 16.9 | 53 | 0.650888 |
ac15c482e35babf23e6aae83f17c68650626bc74 | 698 | class Livepeer::RoundSyncService < Livepeer::BaseSyncService
ID_COLUMN = :number
ID_ATTRIBUTE = :id
private
def data
@_data ||= @data.merge(
stakes: fetch_objects(:stakes),
bonds: fetch_objects(:bonds),
unbonds: fetch_objects(:unbonds),
rebonds: fetch_objects(:rebonds),
reward_cut_changes: fetch_objects(:reward_cut_changes),
missed_reward_calls: fetch_objects(:missed_reward_calls),
deactivations: fetch_objects(:deactivations),
slashings: fetch_objects(:slashings)
)
end
def fetch_objects(resource)
query_class = Livepeer::Factories::GraphQueryFactory.new(resource).call
query_class.new(chain, @data).call
end
end
| 27.92 | 75 | 0.717765 |
1aea20ec035e2573d3f7b9b1bd835e2573890ccf | 595 | cask :v1 => 'tower' do
version :latest
sha256 :no_check
url 'https://www.git-tower.com/download'
appcast 'https://updates.fournova.com/updates/tower2-mac/stable'
name 'Tower'
homepage 'http://www.git-tower.com/'
license :commercial
app 'Tower.app'
binary 'Tower.app/Contents/MacOS/gittower'
zap :delete => [
'~/Library/Application Support/com.fournova.Tower2',
'~/Library/Caches/com.fournova.Tower2',
'~/Library/Preferences/com.fournova.Tower2.plist',
]
caveats do
files_in_usr_local
end
end
| 24.791667 | 70 | 0.628571 |
7976a268244664f3ab8661e3d3c33ebc1e3cdf88 | 212 | # encoding: utf-8
$LOAD_PATH << File.expand_path('../lib/tmrb' , File.dirname(__FILE__))
require 'tables'
include Tables
RSpec.configure do |c|
c.treat_symbols_as_metadata_keys_with_true_values = true
end
| 19.272727 | 70 | 0.754717 |
f7441f35a6c223248b520a213b5371a31cdf8af5 | 271 | require './config/environment'
if ActiveRecord::Migrator.needs_migration?
raise 'Migrations are pending. Run `rake db:migrate` to resolve the issue.'
end
use Rack::MethodOverride
use UsersController
use TweetsController
run ApplicationController
# require_all 'app'
| 20.846154 | 77 | 0.804428 |
7af870b1cfceef10060ef41fe4322ebf7da3cb92 | 79 | require "mock_last_status/version"
require "mock_last_status/mock_last_status"
| 26.333333 | 43 | 0.873418 |
f8fdf8e0cda54ac24fe792a5f057d1fb98e20dc8 | 1,435 | =begin
#OpenAPI Petstore
#This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 6.0.1-SNAPSHOT
=end
module Petstore
class ApiError < StandardError
attr_reader :code, :response_headers, :response_body
# Usage examples:
# ApiError.new
# ApiError.new("message")
# ApiError.new(:code => 500, :response_headers => {}, :response_body => "")
# ApiError.new(:code => 404, :message => "Not Found")
def initialize(arg = nil)
if arg.is_a? Hash
if arg.key?(:message) || arg.key?('message')
super(arg[:message] || arg['message'])
else
super arg
end
arg.each do |k, v|
instance_variable_set "@#{k}", v
end
else
super arg
end
end
# Override to_s to display a friendly error message
def to_s
message
end
def message
if @message.nil?
msg = "Error message: the server returns an error"
else
msg = @message
end
msg += "\nHTTP status code: #{code}" if code
msg += "\nResponse headers: #{response_headers}" if response_headers
msg += "\nResponse body: #{response_body}" if response_body
msg
end
end
end
| 24.741379 | 157 | 0.615331 |
1a4a1e3f12c3b054feaa085b57fe59c8d5d34e4e | 317 | # frozen_string_literal: true
FactoryBot.define do
factory :signed_motion do
title { Faker::Lorem.paragraph }
number { Faker::Number.between(from: 1, to: 300) }
date { Faker::Date.between(from: 1.year.ago, to: 1.day.ago) }
status { Faker::Lorem.word }
association :deputy_legislature
end
end
| 26.416667 | 65 | 0.690852 |
01d7e44208dab197d389adc238c9baf2b633efcc | 159 | class CycleThreeAttributeResponse < Api::Response
attr_reader :name
validates_presence_of :name
def initialize(hash)
@name = hash['name']
end
end
| 17.666667 | 49 | 0.742138 |
91e70eb361b3e370e2626d2a6bd6a90719ceabfd | 1,227 | # frozen_string_literal: true
# Copyright (c) 2019-present, BigCommerce Pty. Ltd. All rights reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
module Bigcommerce
module Prometheus
VERSION = '0.5.3.pre'
end
end
| 53.347826 | 120 | 0.780766 |
339498e7236fe6c4b607f925e53ae8698ca1b4e9 | 227 | class CreateHolidays < ActiveRecord::Migration[4.2]
def up
create_table :holidays do |t|
t.datetime :start
t.datetime :end
t.integer :user_id
end
end
def down
drop_table(:holidays)
end
end
| 17.461538 | 51 | 0.651982 |
2617374136b792c7cc43f3c4376e88c4c4f64e1e | 2,143 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::TrafficManager::Mgmt::V2018_03_01
module Models
#
# Class representing the Geographic hierarchy used with the Geographic
# traffic routing method.
#
class TrafficManagerGeographicHierarchy < ProxyResource
include MsRestAzure
# @return [Region] The region at the root of the hierarchy from all the
# regions in the hierarchy can be retrieved.
attr_accessor :geographic_hierarchy
#
# Mapper for TrafficManagerGeographicHierarchy class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'TrafficManagerGeographicHierarchy',
type: {
name: 'Composite',
class_name: 'TrafficManagerGeographicHierarchy',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
serialized_name: 'type',
type: {
name: 'String'
}
},
geographic_hierarchy: {
client_side_validation: true,
required: false,
serialized_name: 'properties.geographicHierarchy',
type: {
name: 'Composite',
class_name: 'Region'
}
}
}
}
}
end
end
end
end
| 28.959459 | 77 | 0.512366 |
39ae029cd561dfe705d100e338e69014a0045b7c | 3,407 | #--
# Copyright (c) 2006-2013 Philip Ross
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#++
require File.join(File.expand_path(File.dirname(__FILE__)), 'test_utils')
include TZInfo
class TCRubyCountryInfo < Test::Unit::TestCase
def test_code
ci = RubyCountryInfo.new('ZZ', 'Zzz') {|c| }
assert_equal('ZZ', ci.code)
end
def test_name
ci = RubyCountryInfo.new('ZZ', 'Zzz') {|c| }
assert_equal('Zzz', ci.name)
end
def test_zone_identifiers_empty
ci = RubyCountryInfo.new('ZZ', 'Zzz') {|c| }
assert(ci.zone_identifiers.empty?)
assert(ci.zone_identifiers.frozen?)
end
def test_zone_identifiers_no_block
ci = RubyCountryInfo.new('ZZ', 'Zzz')
assert(ci.zone_identifiers.empty?)
assert(ci.zone_identifiers.frozen?)
end
def test_zone_identifiers
ci = RubyCountryInfo.new('ZZ', 'Zzz') do |c|
c.timezone('ZZ/TimezoneB', 1, 2, 1, 2, 'Timezone B')
c.timezone('ZZ/TimezoneA', 1, 4, 1, 4, 'Timezone A')
c.timezone('ZZ/TimezoneC', -10, 3, -20, 7, 'C')
c.timezone('ZZ/TimezoneD', -10, 3, -20, 7)
end
assert_equal(['ZZ/TimezoneB', 'ZZ/TimezoneA', 'ZZ/TimezoneC', 'ZZ/TimezoneD'], ci.zone_identifiers)
assert(ci.zone_identifiers.frozen?)
end
def test_zones_empty
ci = RubyCountryInfo.new('ZZ', 'Zzz') {|c| }
assert(ci.zones.empty?)
assert(ci.zones.frozen?)
end
def test_zones_no_block
ci = RubyCountryInfo.new('ZZ', 'Zzz')
assert(ci.zones.empty?)
assert(ci.zones.frozen?)
end
def test_zones
ci = RubyCountryInfo.new('ZZ', 'Zzz') do |c|
c.timezone('ZZ/TimezoneB', 1, 2, 1, 2, 'Timezone B')
c.timezone('ZZ/TimezoneA', 1, 4, 1, 4, 'Timezone A')
c.timezone('ZZ/TimezoneC', -10, 3, -20, 7, 'C')
c.timezone('ZZ/TimezoneD', -10, 3, -20, 7)
end
assert_equal([CountryTimezone.new('ZZ/TimezoneB', 1, 2, 1, 2, 'Timezone B'),
CountryTimezone.new('ZZ/TimezoneA', 1, 4, 1, 4, 'Timezone A'),
CountryTimezone.new('ZZ/TimezoneC', -10, 3, -20, 7, 'C'),
CountryTimezone.new('ZZ/TimezoneD', -10, 3, -20, 7)],
ci.zones)
assert(ci.zones.frozen?)
end
def test_deferred_evaluate
block_called = false
ci = RubyCountryInfo.new('ZZ', 'Zzz') do |c|
block_called = true
end
assert_equal(false, block_called)
ci.zones
assert_equal(true, block_called)
end
end
| 33.07767 | 103 | 0.673907 |
28faea4de7f2394c69e41b5c7cb4b9488847bd32 | 6,162 | module Rodauth
Feature.define(:rails) do
depends :email_base
# List of overridable methods.
auth_methods(
:rails_render,
:rails_csrf_tag,
:rails_csrf_param,
:rails_csrf_token,
:rails_check_csrf!,
:rails_controller,
)
auth_cached_method :rails_controller_instance
# Renders templates with layout. First tries to render a user-defined
# template, otherwise falls back to Rodauth's template.
def view(page, *)
rails_render(action: page.tr("-", "_"), layout: true) ||
rails_render(html: super.html_safe, layout: true)
end
# Renders templates without layout. First tries to render a user-defined
# template or partial, otherwise falls back to Rodauth's template.
def render(page)
rails_render(partial: page.tr("-", "_"), layout: false) ||
rails_render(action: page.tr("-", "_"), layout: false) ||
super.html_safe
end
# Render Rails CSRF tags in Rodauth templates.
def csrf_tag(*)
rails_csrf_tag
end
# Verify Rails' authenticity token.
def check_csrf
rails_check_csrf!
end
# Have Rodauth call #check_csrf automatically.
def check_csrf?
true
end
# Reset Rails session to protect from session fixation attacks.
def clear_session
rails_controller_instance.reset_session
end
# Default the flash error key to Rails' default :alert.
def flash_error_key
:alert
end
# Evaluates the block in context of a Rodauth controller instance.
def rails_controller_eval(&block)
rails_controller_instance.instance_exec(&block)
end
def button(*)
super.html_safe
end
private
# Runs controller callbacks and rescue handlers around Rodauth actions.
def _around_rodauth(&block)
result = nil
rails_controller_rescue do
rails_controller_callbacks do
result = catch(:halt) { super(&block) }
end
end
if rails_controller_instance.performed?
rails_controller_response
elsif result
result[1].merge!(rails_controller_instance.response.headers)
throw :halt, result
else
result
end
end
# Runs any #(before|around|after)_action controller callbacks.
def rails_controller_callbacks
# don't verify CSRF token as part of callbacks, Rodauth will do that
rails_controller_forgery_protection { false }
rails_controller_instance.run_callbacks(:process_action) do
# turn the setting back to default so that form tags generate CSRF tags
rails_controller_forgery_protection { rails_controller.allow_forgery_protection }
yield
end
end
# Runs any registered #rescue_from controller handlers.
def rails_controller_rescue
yield
rescue Exception => exception
rails_controller_instance.rescue_with_handler(exception) || raise
unless rails_controller_instance.performed?
raise Rodauth::Rails::Error, "rescue_from handler didn't write any response"
end
end
# Returns Roda response from controller response if set.
def rails_controller_response
controller_response = rails_controller_instance.response
response.status = controller_response.status
response.headers.merge! controller_response.headers
response.write controller_response.body
request.halt
end
# Create emails with ActionMailer which uses configured delivery method.
def create_email_to(to, subject, body)
Mailer.create_email(to: to, from: email_from, subject: "#{email_subject_prefix}#{subject}", body: body)
end
# Delivers the given email.
def send_email(email)
email.deliver_now
end
# Calls the Rails renderer, returning nil if a template is missing.
def rails_render(*args)
return if rails_api_controller?
rails_controller_instance.render_to_string(*args)
rescue ActionView::MissingTemplate
nil
end
# Calls the controller to verify the authenticity token.
def rails_check_csrf!
rails_controller_instance.send(:verify_authenticity_token)
end
# Hidden tag with Rails CSRF token inserted into Rodauth templates.
def rails_csrf_tag
%(<input type="hidden" name="#{rails_csrf_param}" value="#{rails_csrf_token}">)
end
# The request parameter under which to send the Rails CSRF token.
def rails_csrf_param
rails_controller.request_forgery_protection_token
end
# The Rails CSRF token value inserted into Rodauth templates.
def rails_csrf_token
rails_controller_instance.send(:form_authenticity_token)
end
# allows/disables forgery protection
def rails_controller_forgery_protection(&value)
return if rails_api_controller?
rails_controller_instance.allow_forgery_protection = value.call
end
# Instances of the configured controller with current request's env hash.
def _rails_controller_instance
controller = rails_controller.new
rails_request = ActionDispatch::Request.new(scope.env)
prepare_rails_controller(controller, rails_request)
controller
end
if ActionPack.version >= Gem::Version.new("5.0")
def prepare_rails_controller(controller, rails_request)
controller.set_request! rails_request
controller.set_response! rails_controller.make_response!(rails_request)
end
else
def prepare_rails_controller(controller, rails_request)
controller.send(:set_response!, rails_request)
controller.instance_variable_set(:@_request, rails_request)
end
end
def rails_api_controller?
defined?(ActionController::API) && rails_controller <= ActionController::API
end
# ActionMailer subclass for correct email delivering.
class Mailer < ActionMailer::Base
def create_email(**options)
mail(**options)
end
end
end
# Assign feature and feature configuration to constants for introspection.
Rails::Feature = FEATURES[:rails]
Rails::FeatureConfiguration = FEATURES[:rails].configuration
end
| 29.768116 | 109 | 0.705777 |
1d2a5aaca835abd0f7e2cd40fa7c26bc55afaf86 | 11,048 | # frozen_string_literal: true
#--
# = Ruby-space definitions that completes C-space funcs for X509 and subclasses
#
# = Info
# 'OpenSSL for Ruby 2' project
# Copyright (C) 2002 Michal Rokos <[email protected]>
# All rights reserved.
#
# = Licence
# This program is licensed under the same licence as Ruby.
# (See the file 'LICENCE'.)
#++
module OpenSSL
module X509
module Marshal
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def _load(string)
new(string)
end
end
def _dump(_level)
to_der
end
end
class ExtensionFactory
def create_extension(*arg)
if arg.size > 1
create_ext(*arg)
else
send("create_ext_from_"+arg[0].class.name.downcase, arg[0])
end
end
def create_ext_from_array(ary)
raise ExtensionError, "unexpected array form" if ary.size > 3
create_ext(ary[0], ary[1], ary[2])
end
def create_ext_from_string(str) # "oid = critical, value"
oid, value = str.split(/=/, 2)
oid.strip!
value.strip!
create_ext(oid, value)
end
def create_ext_from_hash(hash)
create_ext(hash["oid"], hash["value"], hash["critical"])
end
end
class Extension
include Marshal
def ==(other)
return false unless Extension === other
to_der == other.to_der
end
def to_s # "oid = critical, value"
str = self.oid
str << " = "
str << "critical, " if self.critical?
str << self.value.gsub(/\n/, ", ")
end
def to_h # {"oid"=>sn|ln, "value"=>value, "critical"=>true|false}
{"oid"=>self.oid,"value"=>self.value,"critical"=>self.critical?}
end
def to_a
[ self.oid, self.value, self.critical? ]
end
module Helpers
def find_extension(oid)
extensions.find { |e| e.oid == oid }
end
end
module SubjectKeyIdentifier
include Helpers
# Get the subject's key identifier from the subjectKeyIdentifier
# exteension, as described in RFC5280 Section 4.2.1.2.
#
# Returns the binary String key identifier or nil or raises
# ASN1::ASN1Error.
def subject_key_identifier
ext = find_extension("subjectKeyIdentifier")
return nil if ext.nil?
ski_asn1 = ASN1.decode(ext.value_der)
if ext.critical? || ski_asn1.tag_class != :UNIVERSAL || ski_asn1.tag != ASN1::OCTET_STRING
raise ASN1::ASN1Error, "invalid extension"
end
ski_asn1.value
end
end
module AuthorityKeyIdentifier
include Helpers
# Get the issuing certificate's key identifier from the
# authorityKeyIdentifier extension, as described in RFC5280
# Section 4.2.1.1
#
# Returns the binary String keyIdentifier or nil or raises
# ASN1::ASN1Error.
def authority_key_identifier
ext = find_extension("authorityKeyIdentifier")
return nil if ext.nil?
aki_asn1 = ASN1.decode(ext.value_der)
if ext.critical? || aki_asn1.tag_class != :UNIVERSAL || aki_asn1.tag != ASN1::SEQUENCE
raise ASN1::ASN1Error, "invalid extension"
end
key_id = aki_asn1.value.find do |v|
v.tag_class == :CONTEXT_SPECIFIC && v.tag == 0
end
key_id.nil? ? nil : key_id.value
end
end
module CRLDistributionPoints
include Helpers
# Get the distributionPoint fullName URI from the certificate's CRL
# distribution points extension, as described in RFC5280 Section
# 4.2.1.13
#
# Returns an array of strings or nil or raises ASN1::ASN1Error.
def crl_uris
ext = find_extension("crlDistributionPoints")
return nil if ext.nil?
cdp_asn1 = ASN1.decode(ext.value_der)
if cdp_asn1.tag_class != :UNIVERSAL || cdp_asn1.tag != ASN1::SEQUENCE
raise ASN1::ASN1Error, "invalid extension"
end
crl_uris = cdp_asn1.map do |crl_distribution_point|
distribution_point = crl_distribution_point.value.find do |v|
v.tag_class == :CONTEXT_SPECIFIC && v.tag == 0
end
full_name = distribution_point&.value&.find do |v|
v.tag_class == :CONTEXT_SPECIFIC && v.tag == 0
end
full_name&.value&.find do |v|
v.tag_class == :CONTEXT_SPECIFIC && v.tag == 6 # uniformResourceIdentifier
end
end
crl_uris&.map(&:value)
end
end
module AuthorityInfoAccess
include Helpers
# Get the information and services for the issuer from the certificate's
# authority information access extension exteension, as described in RFC5280
# Section 4.2.2.1.
#
# Returns an array of strings or nil or raises ASN1::ASN1Error.
def ca_issuer_uris
aia_asn1 = parse_aia_asn1
return nil if aia_asn1.nil?
ca_issuer = aia_asn1.value.select do |authority_info_access|
authority_info_access.value.first.value == "caIssuers"
end
ca_issuer&.map(&:value)&.map(&:last)&.map(&:value)
end
# Get the URIs for OCSP from the certificate's authority information access
# extension exteension, as described in RFC5280 Section 4.2.2.1.
#
# Returns an array of strings or nil or raises ASN1::ASN1Error.
def ocsp_uris
aia_asn1 = parse_aia_asn1
return nil if aia_asn1.nil?
ocsp = aia_asn1.value.select do |authority_info_access|
authority_info_access.value.first.value == "OCSP"
end
ocsp&.map(&:value)&.map(&:last)&.map(&:value)
end
private
def parse_aia_asn1
ext = find_extension("authorityInfoAccess")
return nil if ext.nil?
aia_asn1 = ASN1.decode(ext.value_der)
if ext.critical? || aia_asn1.tag_class != :UNIVERSAL || aia_asn1.tag != ASN1::SEQUENCE
raise ASN1::ASN1Error, "invalid extension"
end
aia_asn1
end
end
end
class Name
include Marshal
module RFC2253DN
Special = ',=+<>#;'
HexChar = /[0-9a-fA-F]/
HexPair = /#{HexChar}#{HexChar}/
HexString = /#{HexPair}+/
Pair = /\\(?:[#{Special}]|\\|"|#{HexPair})/
StringChar = /[^\\"#{Special}]/
QuoteChar = /[^\\"]/
AttributeType = /[a-zA-Z][0-9a-zA-Z]*|[0-9]+(?:\.[0-9]+)*/
AttributeValue = /
(?!["#])((?:#{StringChar}|#{Pair})*)|
\#(#{HexString})|
"((?:#{QuoteChar}|#{Pair})*)"
/x
TypeAndValue = /\A(#{AttributeType})=#{AttributeValue}/
module_function
def expand_pair(str)
return nil unless str
return str.gsub(Pair){
pair = $&
case pair.size
when 2 then pair[1,1]
when 3 then Integer("0x#{pair[1,2]}").chr
else raise OpenSSL::X509::NameError, "invalid pair: #{str}"
end
}
end
def expand_hexstring(str)
return nil unless str
der = str.gsub(HexPair){$&.to_i(16).chr }
a1 = OpenSSL::ASN1.decode(der)
return a1.value, a1.tag
end
def expand_value(str1, str2, str3)
value = expand_pair(str1)
value, tag = expand_hexstring(str2) unless value
value = expand_pair(str3) unless value
return value, tag
end
def scan(dn)
str = dn
ary = []
while true
if md = TypeAndValue.match(str)
remain = md.post_match
type = md[1]
value, tag = expand_value(md[2], md[3], md[4]) rescue nil
if value
type_and_value = [type, value]
type_and_value.push(tag) if tag
ary.unshift(type_and_value)
if remain.length > 2 && remain[0] == ?,
str = remain[1..-1]
next
elsif remain.length > 2 && remain[0] == ?+
raise OpenSSL::X509::NameError,
"multi-valued RDN is not supported: #{dn}"
elsif remain.empty?
break
end
end
end
msg_dn = dn[0, dn.length - str.length] + " =>" + str
raise OpenSSL::X509::NameError, "malformed RDN: #{msg_dn}"
end
return ary
end
end
class << self
def parse_rfc2253(str, template=OBJECT_TYPE_TEMPLATE)
ary = OpenSSL::X509::Name::RFC2253DN.scan(str)
self.new(ary, template)
end
def parse_openssl(str, template=OBJECT_TYPE_TEMPLATE)
if str.start_with?("/")
# /A=B/C=D format
ary = str[1..-1].split("/").map { |i| i.split("=", 2) }
else
# Comma-separated
ary = str.split(",").map { |i| i.strip.split("=", 2) }
end
self.new(ary, template)
end
alias parse parse_openssl
end
def pretty_print(q)
q.object_group(self) {
q.text ' '
q.text to_s(OpenSSL::X509::Name::RFC2253)
}
end
end
class Attribute
include Marshal
def ==(other)
return false unless Attribute === other
to_der == other.to_der
end
end
class StoreContext
def cleanup
warn "(#{caller.first}) OpenSSL::X509::StoreContext#cleanup is deprecated with no replacement" if $VERBOSE
end
end
class Certificate
include Marshal
include Extension::SubjectKeyIdentifier
include Extension::AuthorityKeyIdentifier
include Extension::CRLDistributionPoints
include Extension::AuthorityInfoAccess
def pretty_print(q)
q.object_group(self) {
q.breakable
q.text 'subject='; q.pp self.subject; q.text ','; q.breakable
q.text 'issuer='; q.pp self.issuer; q.text ','; q.breakable
q.text 'serial='; q.pp self.serial; q.text ','; q.breakable
q.text 'not_before='; q.pp self.not_before; q.text ','; q.breakable
q.text 'not_after='; q.pp self.not_after
}
end
end
class CRL
include Marshal
include Extension::AuthorityKeyIdentifier
def ==(other)
return false unless CRL === other
to_der == other.to_der
end
end
class Revoked
def ==(other)
return false unless Revoked === other
to_der == other.to_der
end
end
class Request
include Marshal
def ==(other)
return false unless Request === other
to_der == other.to_der
end
end
end
end
| 28.770833 | 114 | 0.555757 |
e221ba033b057b6be1326d329514c185b488c660 | 892 | class TestPathAttributes < JabaTest
it 'warns if dir not clean' do
assert_jaba_warn "Directory 'a\\b' not specified cleanly: contains backslashes", __FILE__, 'tagA' do
jaba(barebones: true) do
type :test do
attr :a, type: :dir do
basedir_spec :definition_root
end
end
test :t do
a "a\\b" # tagA
end
end
end
end
# TODO: test all base_dir specs
# TODO: test paths starting with ./
it 'rejects slashes in basename' do
['a\b', 'a/b'].each do |val|
assert_jaba_error "Error at #{src_loc(__FILE__, :tagB)}: 't.a' attribute invalid: '#{val}' must not contain slashes." do
jaba(barebones: true) do
type :test do
attr :a, type: :basename
end
test :t do
a val # tagB
end
end
end
end
end
end
| 23.473684 | 126 | 0.552691 |
f7458bddc84c0b88de509a470bccdad4591ad9c3 | 4,618 | require 'less'
module Less
class JsRoutes
class << self
@@debug = false
def build_params segs, others = ''
s = []
segs.each do |seg|
if seg.is_a?(ActionController::Routing::DynamicSegment)
s << seg.key.to_s.gsub(':', '')
end
end
s << 'verb'
s <<( others) unless others.blank?
s.join(', ')
end
def build_path segs
s = ""
segs.each_index do |i|
seg = segs[i]
break if i == segs.size-1 && seg.is_a?(ActionController::Routing::DividerSegment)
if seg.is_a?(ActionController::Routing::DividerSegment) || seg.is_a?(ActionController::Routing::StaticSegment)
s << seg.instance_variable_get(:@value)
elsif seg.is_a?(ActionController::Routing::DynamicSegment)
s << "' + #{seg.key.to_s.gsub(':', '')} + '"
end
end
s
end
def get_params others = ''
x = ''
x += " + " unless x.blank? || others.blank?
x += "less_get_params(#{others})" unless others.blank?
x
end
def get_js_helpers
<<-JS
function less_json_eval(json){return eval('(' + json + ')')}
function less_get_params(obj){
#{'console.log("less_get_params(" + obj + ")");' if @@debug}
if (jQuery) { return obj }
if (obj == null) {return '';}
var s = [];
for (prop in obj){
s.push(prop + "=" + obj[prop]);
}
return s.join('&') + '';
}
function less_merge_objects(a, b){
#{'console.log("less_merge_objects(" + a + ", " + b + ")");' if @@debug}
if (b == null) {return a;}
z = new Object;
for (prop in a){z[prop] = a[prop]}
for (prop in b){z[prop] = b[prop]}
return z;
}
function less_ajax(url, verb, params, options){
#{'console.log("less_ajax(" + url + ", " + verb + ", " + params +", " + options + ")");' if @@debug}
if (verb == undefined) {verb = 'get';}
var res;
if (jQuery){
v = verb.toLowerCase() == 'get' ? 'GET' : 'POST'
if (verb.toLowerCase() == 'get' || verb.toLowerCase() == 'post'){p = less_get_params(params);}
else{p = less_get_params(less_merge_objects({'_method': verb.toLowerCase()}, params))}
#{'console.log("less_merge_objects:v : " + v);' if @@debug}
#{'console.log("less_merge_objects:p : " + p);' if @@debug}
res = jQuery.ajax(less_merge_objects({async:false, url: url, type: v, data: p}, options)).responseText;
} else {
new Ajax.Request(url, less_merge_objects({asynchronous: false, method: verb, parameters: less_get_params(params), onComplete: function(r){res = r.responseText;}}, options));
}
if (url.indexOf('.json') == url.length-5){ return less_json_eval(res);}
else {return res;}
}
function less_ajaxx(url, verb, params, options){
#{'console.log("less_ajax(" + url + ", " + verb + ", " + params +", " + options + ")");' if @@debug}
if (verb == undefined) {verb = 'get';}
if (jQuery){
v = verb.toLowerCase() == 'get' ? 'GET' : 'POST'
if (verb.toLowerCase() == 'get' || verb.toLowerCase() == 'post'){p = less_get_params(params);}
else{p = less_get_params(less_merge_objects({'_method': verb.toLowerCase()}, params))}
#{'console.log("less_merge_objects:v : " + v);' if @@debug}
#{'console.log("less_merge_objects:p : " + p);' if @@debug}
jQuery.ajax(less_merge_objects({ url: url, type: v, data: p, complete: function(r){eval(r.responseText)}}, options));
} else {
new Ajax.Request(url, less_merge_objects({method: verb, parameters: less_get_params(params), onComplete: function(r){eval(r.responseText);}}, options));
}
}
JS
end
def generate!
s = get_js_helpers
ActionController::Routing::Routes.routes.each do |route|
name = ActionController::Routing::Routes.named_routes.routes.index(route).to_s
next if name.blank?
# s << build_path( route.segments)
# s << "\n"
# s << route.inspect# if route.instance_variable_get(:@conditions)[:method] == :put
s << "/////\n//#{route}\n" if @@debug
s << <<-JS
function #{name}_path(#{build_params route.segments}){ return '#{build_path route.segments}';}
function #{name}_ajax(#{build_params route.segments, 'params'}, options){ return less_ajax('#{build_path route.segments}', verb, params, options);}
function #{name}_ajaxx(#{build_params route.segments, 'params'}, options){ return less_ajaxx('#{build_path route.segments}', verb, params, options);}
JS
end
File.open(RAILS_ROOT + '/public/javascripts/less_routes.js', 'w') do |f|
f.write s
end
end
end
end
end
| 36.078125 | 177 | 0.588133 |
62e0c689e63be6b9ef1c387ee6c83c4ef6796e7b | 5,798 | require './spec/spec_helper'
require './lib/neutrino/gateway/service'
require './lib/neutrino/gateway/requestor'
require 'fakeweb'
describe Neutrino::Gateway::Service do
before(:each) do
Neutrino::Api::Client.config = TestConfig.to_hash
end
describe 'self.data' do
let(:gi_json_data) { { gi_json_data: 'Some Data' } }
let(:options) { { debug: true } }
let(:params) { { patient_document_id: '01123581321', service_class: 'nlp', service_identifier: 'gi' } }
context 'when valid params are given' do
before(:each) do
FakeWeb.register_uri(
:get,
'http://testhost:4242/api/v1/debug/true/patient_document/01123581321/service/nlp/gi/data?debug=true&user%5Bextension%5D=spameggs&user%5Broot%5D=foobar',
body: gi_json_data.to_json)
end
it 'gets data' do
expect((described_class.data(params, options))).to eq({ :data => gi_json_data.to_json, :type => 'text/plain' })
end
end
context 'when no params are given' do
let(:params) { {} }
before(:each) do
FakeWeb.register_uri(
:get,
'http://testhost:4242/api/v1/debug/true/patient_document/01123581321/service/nlp/gi/data?debug=true&user%5Bextension%5D=spameggs&user%5Broot%5D=foobar',
body: {'data' => 'Some Data'}.to_json)
end
it 'raises a BadRequestError' do
expect { described_class.data(params) }.to raise_error(Neutrino::Gateway::Exceptions::BadRequestError)
end
end
context 'when a document is not found' do
before(:each) do
FakeWeb.register_uri(
:get,
'http://testhost:4242/api/v1/debug/true/patient_document/01123581321/service/nlp/gi/data?debug=true&user%5Bextension%5D=spameggs&user%5Broot%5D=foobar',
status: ['404', 'OK'])
end
it 'raises a DerivedWorkDocumentNotFoundError' do
expect do
described_class.data(params, options)
end.to raise_error(Neutrino::Gateway::Exceptions::DerivedWorkDocumentNotFoundError)
end
end
end
describe 'self.metadata' do
let(:gi_json_data) { { gi_json_data: 'Some Data' } }
let(:options) { { debug: true } }
let(:params) { { patient_document_id: '01123581321', service_class: 'nlp', service_identifier: 'gi' } }
context 'when valid params are given' do
before(:each) do
FakeWeb.register_uri(
:get,
'http://testhost:4242/api/v1/debug/true/patient_document/01123581321/service/nlp/gi?debug=true&user%5Bextension%5D=spameggs&user%5Broot%5D=foobar',
body: {'metadata' => 'Some Data'}.to_json)
end
it 'gets data' do
expect((described_class.metadata(params, options))).to eq({ 'metadata' => 'Some Data' })
end
end
context 'when no params are given' do
let(:params) { {} }
before(:each) do
FakeWeb.register_uri(
:get,
'http://testhost:4242/api/v1/debug/true/patient_document/01123581321/service/nlp/gi?debug=true&user%5Bextension%5D=spameggs&user%5Broot%5D=foobar',
body: {'data' => 'Some Data'}.to_json)
end
it 'raises a BadRequestError' do
expect { described_class.metadata(params) }.to raise_error(Neutrino::Gateway::Exceptions::BadRequestError)
end
end
context 'when no document is found' do
before(:each) do
FakeWeb.register_uri(
:get,
'http://testhost:4242/api/v1/debug/true/patient_document/01123581321/service/nlp/gi?debug=true&user%5Bextension%5D=spameggs&user%5Broot%5D=foobar',
status: ['404', 'OK'])
end
it 'raises a DerivedWorkDocumentNotFoundError' do
expect do
described_class.metadata(params, options)
end.to raise_error(Neutrino::Gateway::Exceptions::DerivedWorkDocumentNotFoundError)
end
end
end
describe 'self.base_uri' do
let(:patient_document_id) { '01123581321' }
let(:service_class) { 'nlp' }
let(:service_identifier) { 'gi' }
let(:params) { { patient_document_id: patient_document_id, service_class: service_class, service_identifier: service_identifier } }
let(:options) { {} }
context 'when the options specify debugging a valid param combination is specified' do
let(:options) { { debug: true } }
it 'returns a URI containing the debug component' do
expect(described_class.base_uri(params, options)).to match(%r{/debug/true})
end
end
context 'when the params contain :patient_document_id' do
it 'returns a URI containing the id component' do
expect(described_class.base_uri(params, options)).to match(%r{/patient_document/#{patient_document_id}})
end
end
context 'when the params contain service_class and service_identifier' do
it 'returns a URI service_class and service_identifier' do
expect(described_class.base_uri(params, options)).to match(%r{/service/#{service_class}/#{service_identifier}})
end
end
context 'when service_class and service_identifier are not in params a bad request error is thrown' do
let(:options) { { debug: true } }
let(:params) { { patient_document_id: '01123581321' } }
it 'returns a URI service_class and service_identifier' do
expect { described_class.base_uri(params, options) }.to raise_error(Neutrino::Gateway::Exceptions::BadRequestError)
end
end
context 'patient_document_id or id are not in params a bad request error is thrown' do
let(:options) { { debug: true } }
let(:params) { {} }
it 'returns a URI service_class and service_identifier' do
expect { described_class.base_uri(params, options) }.to raise_error(Neutrino::Gateway::Exceptions::BadRequestError)
end
end
end
end
| 36.465409 | 162 | 0.665402 |
bbe4aa6b3dc14ffa361e49d2f3934f0829195457 | 1,381 | describe TadpolesController, :type => :request do
before(:each) do
@pond = Pond.create(:name => "Walden", :water_type => "alkaline")
@frog = Frog.create(:name => "Kermit", :color => "green", :pond_id => @pond.id)
@tadpole = Tadpole.create(:name => "Linda", :color => "tan", :frog_id => @frog.id)
end
describe "post tadpoles/:id/metamorphosize" do
it "makes a new frog with the tadpole's name, color, and pond" do
post "/tadpoles/#{@tadpole.id}/metamorphosize"
found_by_name = Frog.find_by(:name => @tadpole.name)
found_by_color = Frog.find_by(:color => @tadpole.color)
expect(found_by_color.name).to eq(@tadpole.name)
expect(found_by_name.color).to eq(@tadpole.color)
expect(found_by_name.pond).to eq(@tadpole.pond)
expect(found_by_name).to eq(found_by_color)
end
it "deletes the tadpole from the datebase" do
post "/tadpoles/#{@tadpole.id}/metamorphosize"
found_by_name = Tadpole.find_by(:name => @tadpole.name)
found_by_color = Tadpole.find_by(:color => @tadpole.color)
expect(found_by_color).to be_nil
expect(found_by_name).to be_nil
end
it "redirects to the newly made frog's show page" do
new_frog_id = Frog.last.id + 1
post "/tadpoles/#{@tadpole.id}/metamorphosize"
expect(response).to redirect_to("/frogs/#{new_frog_id}")
end
end
end | 41.848485 | 86 | 0.664012 |
7a48fed85869633883366a2266191dbad646263d | 348 | require File.expand_path('../../config/environment', __FILE__)
require 'database_cleaner'
RSpec.configure do |config|
config.before(:suite) do
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.clean_with(:truncation)
end
config.around(:each) do |example|
DatabaseCleaner.cleaning do
example.run
end
end
end
| 18.315789 | 62 | 0.724138 |
f83855cb6c429e07df5114150c25fb6236fd2426 | 4,913 | # frozen_string_literal: true
require 'dry/schema/result'
RSpec.describe Dry::Schema::Result, '#error?' do
subject(:result) { schema.(input) }
context 'with a flat structure' do
let(:schema) do
Dry::Schema.Params { required(:name).filled }
end
context 'when there is no error' do
let(:input) do
{ name: 'test' }
end
it 'returns false' do
expect(result.error?(:name)).to be(false)
end
end
context 'when there is an error' do
let(:input) do
{ name: '' }
end
it 'returns true' do
expect(result.error?(:name)).to be(true)
end
end
context 'when spec is invalid' do
let(:input) do
{ name: '' }
end
it 'raises error' do
expect { result.error?(Object.new) }
.to raise_error(ArgumentError, '+spec+ must be either a Symbol, Array, Hash or a Path')
end
end
context 'when spec is a path already' do
let(:input) do
{ name: '' }
end
it 'returns true when there is an error' do
expect(result.error?(Dry::Schema::Path[:name])).to be(true)
end
it 'returns false when there is no error' do
expect(result.error?(Dry::Schema::Path[:foo])).to be(false)
end
end
end
context 'with a nested hash' do
let(:schema) do
Dry::Schema.Params do
required(:user).hash do
required(:address).hash do
required(:street).filled
end
end
optional(:address).filled(:string)
end
end
context 'when there is no error' do
let(:input) do
{ user: { address: { street: 'test' } } }
end
it 'returns false for a hash spec' do
expect(result.error?(user: { address: :street })).to be(false)
end
it 'returns false for dot notation spec' do
expect(result.error?('user.address.street')).to be(false)
end
end
context 'when there is an error under matching key but in another branch' do
let(:input) do
{ user: { address: { street: 'test' } }, address: '' }
end
it 'returns false for a hash spec' do
expect(result.error?(user: { address: :street })).to be(false)
end
it 'returns false for dot notation spec' do
expect(result.error?('user.address.street')).to be(false)
end
end
context 'when there is an error under the last key' do
let(:input) do
{ user: { address: { street: '' } } }
end
it 'returns true for a hash spec' do
expect(result.error?(user: { address: :street })).to be(true)
end
it 'returns true for dot notation spec' do
expect(result.error?('user.address.street')).to be(true)
end
end
context 'when there is an error under one of the intermediate keys' do
let(:input) do
{ user: { address: nil } }
end
it 'returns true for a hash spec with the error' do
expect(result.error?(user: :address)).to be(true)
end
it 'returns true for dot notation spec with the error' do
expect(result.error?('user.address')).to be(true)
end
it 'returns false for a hash spec with no error' do
expect(result.error?(user: { address: :street })).to be(false)
end
it 'returns false for dot notation spec with no error' do
expect(result.error?('user.address.street')).to be(false)
end
end
end
context 'with an array' do
let(:schema) do
Dry::Schema.Params do
required(:tags).array(:str?)
end
end
context 'when there is no error' do
let(:input) do
{ tags: ['foo', 'bar'] }
end
it 'returns false for symbol key spec' do
expect(result.error?(:tags)).to be(false)
end
it 'returns false for a path with index' do
expect(result.error?([:tags, 0])).to be(false)
expect(result.error?([:tags, 1])).to be(false)
end
end
context 'when there is an error under key' do
let(:input) do
{ tags: nil }
end
it 'returns true for symbol key spec' do
expect(result.error?(:tags)).to be(true)
end
it 'returns false for a path with index' do
expect(result.error?([:tags, 0])).to be(false)
expect(result.error?([:tags, 1])).to be(false)
end
end
context 'when there is an error under one of the indices' do
let(:input) do
{ tags: ['foo', 312] }
end
it 'returns true for symbol key spec' do
expect(result.error?(:tags)).to be(true)
end
it 'returns true for a path with index with the error' do
expect(result.error?([:tags, 1])).to be(true)
end
it 'returns false for a path with index with no error' do
expect(result.error?([:tags, 0])).to be(false)
end
end
end
end
| 25.588542 | 97 | 0.575005 |
33295e129c7686cf53fa49655a1ba5d5a039bb8a | 541 | require_relative '../classes/author'
require_relative '../classes/item'
describe Author do
context 'When testing the Author class' do
it 'The initialize method should return create new Author object' do
author = Author.new('Kerolous', 'Samy')
expect(author.first_name).to eq 'Kerolous'
end
it 'The add_item method should add one item' do
author = Author.new('Kerolous', 'Samy')
item = Item.new('2022/2/2', false)
author.add_item(item)
expect(author.items.length).to be 1
end
end
end
| 28.473684 | 72 | 0.678373 |
874b361bd99b59b08b98474302a57dae162003b3 | 1,376 | =begin
Copyright 2010-2017 Sarosys LLC <http://www.sarosys.com>
This file is part of the Arachni Framework project and is subject to
redistribution and commercial restrictions. Please see the Arachni Framework
web site for more information on licensing and terms of use.
=end
module Arachni
require Options.paths.lib + 'plugin/manager'
module RPC
class Server
# @private
module Plugin
# We need to extend the original Manager and redeclare its inherited methods
# which are required over RPC.
#
# @author Tasos "Zapotek" Laskos <[email protected]>
class Manager < ::Arachni::Plugin::Manager
# make these inherited methods visible again
private :available, :loaded, :results
public :available, :loaded, :results
def load( plugins )
if plugins.is_a?( Array )
h = {}
plugins.each { |p| h[p] = @framework.options.plugins[p] || {} }
plugins = h
end
plugins.each do |plugin, opts|
prepare_options( plugin, self[plugin], opts )
end
@framework.options.plugins.merge!( plugins )
super( plugins.keys )
end
# Merges the plug-in results of multiple instances by delegating to
# {Data::Plugins#merge_results}.
def merge_results( results )
Data.plugins.merge_results self, results
end
end
end
end
end
end
| 24.571429 | 80 | 0.673692 |
034aa3c93de5295972b1e3c412ad1db2a69464d8 | 3,155 | require 'spec_helper'
module Codebreaker
describe Game do
context "#initialize" do
it "saves 4 numbers secret code" do
expect(subject.instance_variable_get(:@secret_code).size).to eq 4
end
it "saves secret code with numbers from 1 to 6" do
expect(subject.instance_variable_get(:@secret_code)).to match /^[1-6]{4}$/
end
it "should respond_to @attempt" do
expect(subject).to respond_to(:attempt)
end
it "should respond_to @hint" do
expect(subject).to respond_to(:hint)
end
it "attempt set to 5" do
expect(subject.attempt).to eq(5)
end
end
context "#attempt=" do
it "attempt set to 10" do
subject.instance_variable_set(:@attempt, 10)
expect(subject.instance_variable_get(:@attempt)).to eq(10)
end
end
context "#compare" do
before do
subject.instance_variable_set(:@secret_code, '1134')
end
it "should return if code not a number" do
expect(subject.compare 'f123').to eq('You entered is not a number, or a length of less than 4 or greater is present number 6')
end
it "should return if code present number > 6" do
expect(subject.compare '1713').to eq('You entered is not a number, or a length of less than 4 or greater is present number 6')
end
it "should return if code length < 4" do
expect(subject.compare '1713').to eq('You entered is not a number, or a length of less than 4 or greater is present number 6')
end
it "should return ++++" do
expect(subject.compare '1134').to eq('++++')
end
it "should return +++" do
expect(subject.compare '1131').to eq('+++')
end
it "should return ++--" do
expect(subject.compare '4131').to eq('++--')
end
it "should return +-" do
expect(subject.compare '5112').to eq('+-')
end
it "should return ----" do
expect(subject.compare '3411').to eq('----')
end
it "should return -" do
expect(subject.compare '6455').to eq('-')
end
it "should return +" do
expect(subject.compare '5554').to eq('+')
end
it "should return empty string" do
expect(subject.compare '5566').to eq('')
end
end
context "#get_hint" do
before do
subject.instance_variable_set(:@secret_code, '1234')
end
it "hint should be true" do
expect(subject.hint).to eq(true)
end
it "should set instance to false" do
expect { subject.get_hint }.to change{ subject.hint }.to(false)
end
it "should return a hint" do
expect(subject.get_hint).to include('*')
end
it "hint has 4 items" do
expect(subject.get_hint.size).to eq(4)
end
it "should return **3*" do
allow(subject).to receive(:rand).and_return(2)
expect(subject.get_hint).to eq('**3*')
end
it "should return 1***" do
allow(subject).to receive(:rand).and_return(0)
expect(subject.get_hint).to eq('1***')
end
end
end
end
| 24.84252 | 134 | 0.590491 |
e914e99cbab7b614809f47d6beb74e26d81a4226 | 235 | class ChangeProjectsIdentifierLimit < ActiveRecord::Migration[4.2]
def self.up
change_column :projects, :identifier, :string, :limit => nil
end
def self.down
change_column :projects, :identifier, :string, :limit => 20
end
end
| 23.5 | 66 | 0.744681 |
bbc7321250f1134d25b9f25b2b489694d3a9c49e | 3,624 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "js_rails_countdown_app_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 42.139535 | 100 | 0.759106 |
61bf81371e3dbc93861894d2ffe21763cc9c0564 | 3,837 | # frozen_string_literal: true
# When a user submits a form, that form must match the expected workflow_state.
# We don't adjust the state to match what the user is doing like we do for viewing forms.
# We expect to receive the name of the form (for example, location_form),
# a set of valid params, a set of invalid params, and an attribute to test persistence
# Default to :reg_identifier for forms which don't submit new data
RSpec.shared_examples "POST without params form" do |form|
context "when a valid user is signed in" do
let(:user) { create(:user) }
before(:each) do
sign_in(user)
end
context "when the token is invalid" do
it "redirects to the invalid page" do
post_form_with_params(form, "foo")
expect(response).to redirect_to(page_path("invalid"))
end
end
context "when a renewal is in progress" do
let(:transient_registration) do
create(:renewing_registration,
:has_required_data,
:has_addresses,
:has_key_people,
:has_unpaid_balance,
account_email: user.email)
end
context "when the workflow_state matches the requested form" do
before do
transient_registration.update_attributes(workflow_state: form)
end
context "when the params are valid" do
it "changes the workflow_state and returns a 302 response" do
state_before_request = transient_registration[:workflow_state]
post_form_with_params(form, transient_registration.token)
expect(transient_registration.reload[:workflow_state]).to_not eq(state_before_request)
expect(response).to have_http_status(302)
end
end
context "when the token is invalid" do
it "redirects to the invalid error page" do
post_form_with_params(form, "foo")
expect(response).to redirect_to(page_path("invalid"))
end
end
context "when the registration cannot be renewed" do
before { transient_registration.update_attributes(expires_on: Date.today - Helpers::GraceWindows.current_grace_window) }
it "does not update the transient registration, including workflow_state, and redirects to the unrenewable error page" do
transient_reg_before_submitting = transient_registration
post_form_with_params(form, transient_registration.token)
expect(transient_registration.reload).to eq(transient_reg_before_submitting)
expect(response).to redirect_to(page_path("unrenewable"))
end
end
end
context "when the workflow_state does not match the requested form" do
before do
# We need to pick a different but also valid state for the transient_registration
# 'payment_summary_form' is the default, unless this would actually match!
different_state = if form == "payment_summary_form"
"other_businesses_form"
else
"payment_summary_form"
end
transient_registration.update_attributes(workflow_state: different_state)
end
it "does not update the transient_registration, including workflow_state, and redirects to the correct form for the workflow_state" do
transient_reg_before_submitting = transient_registration
workflow_state = transient_registration[:workflow_state]
post_form_with_params(form, transient_registration.token)
expect(transient_registration.reload).to eq(transient_reg_before_submitting)
expect(response).to redirect_to(new_path_for(workflow_state, transient_registration))
end
end
end
end
end
| 39.96875 | 142 | 0.674746 |
ab66a09da21b9215802bba4a7e2dffa44392d37c | 2,022 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
require "json"
package = JSON.parse(File.read(File.join(__dir__, "..", "package.json")))
version = package['version']
folly_flags = '-DFOLLY_NO_CONFIG -DFOLLY_MOBILE=1 -DFOLLY_USE_LIBCPP=1'
folly_compiler_flags = folly_flags + ' ' + '-Wno-comma -Wno-shorten-64-to-32'
folly_version = '2020.01.13.00'
boost_compiler_flags = '-Wno-documentation'
Pod::Spec.new do |s|
s.name = "ABI39_0_0React-RCTFabric"
s.version = version
s.summary = "RCTFabric for React Native."
s.homepage = "https://reactnative.dev/"
s.license = package["license"]
s.author = "Facebook, Inc. and its affiliates"
s.platforms = { :ios => "10.0", :tvos => "10.0" }
s.source = { :path => "." }
s.source_files = "Fabric/**/*.{c,h,m,mm,S,cpp}"
s.exclude_files = "**/tests/*",
"**/android/*",
s.compiler_flags = folly_compiler_flags + ' ' + boost_compiler_flags
s.header_dir = "ABI39_0_0React"
s.framework = "JavaScriptCore"
s.library = "stdc++"
s.pod_target_xcconfig = { "HEADER_SEARCH_PATHS" => "\"$(PODS_TARGET_SRCROOT)/ReactCommon\" \"$(PODS_ROOT)/boost-for-react-native\" \"$(PODS_ROOT)/DoubleConversion\" \"$(PODS_ROOT)/Folly\" \"$(PODS_ROOT)/Headers/Private/React-Core\"" }
s.xcconfig = { "HEADER_SEARCH_PATHS" => "\"$(PODS_ROOT)/boost-for-react-native\" \"$(PODS_ROOT)/glog\" \"$(PODS_ROOT)/Folly\"",
"OTHER_CFLAGS" => "$(inherited) -DRN_FABRIC_ENABLED" + " " + folly_flags }
s.dependency "ABI39_0_0React-Core", version
s.dependency "ABI39_0_0React-Fabric", version
s.dependency "ABI39_0_0React-RCTImage", version
s.dependency "Folly/Fabric", folly_version
end
| 47.023256 | 239 | 0.601879 |
39c5e11622874e17e0c2d15f55792c6965439c19 | 5,166 | require "language_pack/shell_helpers"
module LanguagePack
class RubyVersion
class BadVersionError < BuildpackError
def initialize(output = "")
msg = ""
msg << output
msg << "Can not parse Ruby Version:\n"
msg << "Valid versions listed on: https://devcenter.heroku.com/articles/ruby-support\n"
super msg
end
end
BOOTSTRAP_VERSION_NUMBER = "3.1.2".freeze
DEFAULT_VERSION_NUMBER = "3.0.3".freeze
DEFAULT_VERSION = "ruby-#{DEFAULT_VERSION_NUMBER}".freeze
LEGACY_VERSION_NUMBER = "1.9.2".freeze
LEGACY_VERSION = "ruby-#{LEGACY_VERSION_NUMBER}".freeze
RUBY_VERSION_REGEX = %r{
(?<ruby_version>\d+\.\d+\.\d+){0}
(?<patchlevel>p-?\d+){0}
(?<engine>\w+){0}
(?<engine_version>.+){0}
ruby-\g<ruby_version>(-\g<patchlevel>)?(-\g<engine>-\g<engine_version>)?
}x
attr_reader :set, :version, :version_without_patchlevel, :patchlevel, :engine, :ruby_version, :engine_version
include LanguagePack::ShellHelpers
def initialize(bundler_output, app = {})
@set = nil
@bundler_output = bundler_output
@app = app
set_version
parse_version
@version_without_patchlevel = @version.sub(/-p-?\d+/, '')
end
def warn_ruby_26_bundler?
return false if Gem::Version.new(self.ruby_version) >= Gem::Version.new("2.6.3")
return false if Gem::Version.new(self.ruby_version) < Gem::Version.new("2.6.0")
return true
end
def ruby_192_or_lower?
Gem::Version.new(self.ruby_version) <= Gem::Version.new("1.9.2")
end
# https://github.com/bundler/bundler/issues/4621
def version_for_download
if rbx?
"rubinius-#{engine_version}"
elsif patchlevel_is_significant? && @patchlevel && @patchlevel.sub(/p/, '').to_i >= 0
@version
else
version_without_patchlevel
end
end
def file_name
file = "#{version_for_download}.tgz"
file.sub!("ruby", "ruby-build") if build?
file
end
# Before Ruby 2.1 patch releases were done via patchlevel i.e. 1.9.3-p426 versus 1.9.3-p448
# With 2.1 and above patches are released in the "minor" version instead i.e. 2.1.0 versus 2.1.1
def patchlevel_is_significant?
!jruby? && Gem::Version.new(self.ruby_version) <= Gem::Version.new("2.1")
end
def rake_is_vendored?
Gem::Version.new(self.ruby_version) >= Gem::Version.new("1.9")
end
def default?
@version == none
end
# determine if we're using jruby
# @return [Boolean] true if we are and false if we aren't
def jruby?
engine == :jruby
end
# determine if we're using rbx
# @return [Boolean] true if we are and false if we aren't
def rbx?
engine == :rbx
end
# determines if a build ruby is required
# @return [Boolean] true if a build ruby is required
def build?
engine == :ruby && %w(1.8.7 1.9.2).include?(ruby_version)
end
# convert to a Gemfile ruby DSL incantation
# @return [String] the string representation of the Gemfile ruby DSL
def to_gemfile
if @engine == :ruby
"ruby '#{ruby_version}'"
else
"ruby '#{ruby_version}', :engine => '#{engine}', :engine_version => '#{engine_version}'"
end
end
# does this vendor bundler
def vendored_bundler?
false
end
# Returns the next logical version in the minor series
# for example if the current ruby version is
# `ruby-2.3.1` then then `next_logical_version(1)`
# will produce `ruby-2.3.2`.
def next_logical_version(increment = 1)
return false if patchlevel_is_significant?
split_version = @version_without_patchlevel.split(".")
teeny = split_version.pop
split_version << teeny.to_i + increment
split_version.join(".")
end
def next_minor_version(increment = 1)
split_version = @version_without_patchlevel.split(".")
split_version[1] = split_version[1].to_i + increment
split_version[2] = 0
split_version.join(".")
end
def next_major_version(increment = 1)
split_version = @version_without_patchlevel.split("-").last.split(".")
split_version[0] = Integer(split_version[0]) + increment
split_version[1] = 0
split_version[2] = 0
return "ruby-#{split_version.join(".")}"
end
private
def none
if @app[:is_new]
DEFAULT_VERSION
elsif @app[:last_version]
@app[:last_version]
else
LEGACY_VERSION
end
end
def set_version
if @bundler_output.empty?
@set = false
@version = none
else
@set = :gemfile
@version = @bundler_output
end
end
def parse_version
md = RUBY_VERSION_REGEX.match(version)
raise BadVersionError.new("'#{version}' is not valid") unless md
@ruby_version = md[:ruby_version]
@patchlevel = md[:patchlevel]
@engine_version = md[:engine_version] || @ruby_version
@engine = (md[:engine] || :ruby).to_sym
end
end
end
| 29.352273 | 113 | 0.618854 |
794194a55b72fb60b8984bb68cb0d03d77f94d92 | 750 | Pod::Spec.new do |s|
s.name = 'SwiftSMSAnalyzer'
s.version = '0.1.7'
s.summary = 'A small helper class which analyzes the SMS text.'
s.description = <<-DESC
It analyzes text for details as Encoding - UTF16, GSM 7 bit, Number of messages, Characters per message.
DESC
s.homepage = 'https://github.com/nareshdb/SwiftSMSAnalyzer'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Naresh Bhadresha' => '[email protected]' }
s.source = { :git => 'https://github.com/nareshdb/SwiftSMSAnalyzer.git', :tag => s.version.to_s }
s.ios.deployment_target = '10.0'
s.source_files = 'Resources/SwiftSMSAnalyzer.swift'
end
| 41.666667 | 109 | 0.597333 |
e8fb3ff7d4f22321f31830512bf336246fb8e43d | 463 | class Gift
def assign(famorg_id, season_id)
people = Famorg.find(famorg_id).users.shuffle
santas = people.map { |a| a.id }
the_shift = santas.shift
santa_array = [*santas, *the_shift]
i = 0
people.each do |person|
group_assign = UserFamorg.find_by(user_id: person.id, famorg_id: famorg_id)
puts "#{person.id} --> #{santa_array[i]}"
group_assign.update_attributes(santa_id: santa_array[i])
i +=1
end
end
end
| 28.9375 | 81 | 0.656587 |
f8f8622ab79bebcd1b7ba626f12e4558b230dd46 | 2,020 | class Lubyfisbang::Scraper
def initialize(agent = nil)
@agent = Mechanize.new
end
def sign_in
page = @agent.get('https://secure.meetup.com/login/')
sign_in = page.forms[1]
puts "\"Satellite from days of old, lead me to your access code.\""
puts "Sign in to Meetup.com"
puts "Email: "
sign_in.email = gets.chomp
puts "Password: "
sign_in.password = STDIN.noecho(&:gets).chomp
page = @agent.submit(sign_in)
self
end
def get_api_key
api_key = @agent.get('https://secure.meetup.com/meetup_api/key/').css("#api-key-reveal").first.attribute("value").text
end
def parse_meetups
sign_in
url = "https://api.meetup.com/2/events?key=#{get_api_key}&sign=true&photo-host=public&rsvp=yes&status=past"
results = @agent.get(url)
save = results.content
parsed_results = JSON.parse(save)
parsed_results["results"]
end
def get_meetups
menu = Lubyfisbang::Menu.new
parse_meetups.each do |attributes|
menu.get_attribute_options(attributes)
Lubyfisbang::Meetup.new(attributes)
Lubyfisbang::GroupDetails.new(attributes["group"]) if attributes["group"]
Lubyfisbang::VenueDetails.new(attributes["venue"]) if attributes["venue"]
end
end
def get_pictures
Lubyfisbang::Meetup.all.each_with_index do |meetup, index|
puts "#{index}. #{meetup.name}"
puts meetup.event_url
puts
end
puts "Copy and paste the url of the meetup you want: "
input = gets.chomp
uri = URI(input)
urlname = uri.path.split('/')[1]
event_id = uri.path.split('/')[3]
sign_in
url = "https://api.meetup.com/#{urlname}/events/#{event_id}/photos?key=#{get_api_key}&sign=true&photo-host=public"
results = @agent.get(url)
save = results.content
parsed_results = JSON.parse(save)
parsed_results.each do |hash|
puts hash["photo_link"]
puts hash["photo_album"]["title"]
puts hash["caption"]
puts "Posted by: #{hash["member"]["name"]}"
end
end
end | 30.149254 | 122 | 0.659901 |
edb20c759859eb0e0257835ef12d51dbb400fbf4 | 766 | require 'mkmf'
require 'numo/narray'
require_relative '../mkmf_linalg'
srcs = %w(
lapack
lapack_s
lapack_d
lapack_c
lapack_z
)
$objs = srcs.collect{|i| i+".o"}
dir_config("narray")
find_narray_h
if !have_header("numo/narray.h")
puts "
Header numo/narray.h was not found. Give pathname as follows:
% ruby extconf.rb --with-narray-include=narray_h_dir"
exit(1)
end
if RUBY_PLATFORM =~ /cygwin|mingw/
find_libnarray_a
unless have_library("narray","nary_new")
puts "libnarray.a not found"
exit(1)
end
end
if have_header("dlfcn.h")
exit(1) unless have_library("dl")
exit(1) unless have_func("dlopen")
elsif have_header("windows.h")
exit(1) unless have_func("LoadLibrary")
end
create_depend(__dir__)
create_makefile('numo/linalg/lapack')
| 18.682927 | 63 | 0.72846 |
628c97d8a2dfe4ef451dd594118dab131b82d8c0 | 4,101 | # frozen_string_literal: true
# See LICENSE.txt at root of repository
# GENERATED FILE - DO NOT EDIT!!
require 'ansible/ruby/modules/base'
module Ansible
module Ruby
module Modules
# This module allows configuring block device partition using the C(parted) command line tool. For a full description of the fields and the options check the GNU parted manual.
class Parted < Base
# @return [String] The block device (disk) where to operate.
attribute :device
validates :device, presence: true, type: String
# @return [:none, :cylinder, :minimal, :optimal, nil] Set alignment for newly created partitions.
attribute :align
validates :align, expression_inclusion: {:in=>[:none, :cylinder, :minimal, :optimal], :message=>"%{value} needs to be :none, :cylinder, :minimal, :optimal"}, allow_nil: true
# @return [Integer, String, nil] The number of the partition to work with or the number of the partition that will be created. Required when performing any action on the disk, except fetching information.
attribute :number
validates :number, type: MultipleTypes.new(Integer, String)
# @return [:s, :B, :KB, :KiB, :MB, :MiB, :GB, :GiB, :TB, :TiB, :%, :cyl, :chs, :compact, nil] Selects the current default unit that Parted will use to display locations and capacities on the disk and to interpret those given by the user if they are not suffixed by an unit. When fetching information about a disk, it is always recommended to specify a unit.
attribute :unit
validates :unit, expression_inclusion: {:in=>[:s, :B, :KB, :KiB, :MB, :MiB, :GB, :GiB, :TB, :TiB, :%, :cyl, :chs, :compact], :message=>"%{value} needs to be :s, :B, :KB, :KiB, :MB, :MiB, :GB, :GiB, :TB, :TiB, :%, :cyl, :chs, :compact"}, allow_nil: true
# @return [:aix, :amiga, :bsd, :dvh, :gpt, :loop, :mac, :msdos, :pc98, :sun, nil] Creates a new disk label.
attribute :label
validates :label, expression_inclusion: {:in=>[:aix, :amiga, :bsd, :dvh, :gpt, :loop, :mac, :msdos, :pc98, :sun], :message=>"%{value} needs to be :aix, :amiga, :bsd, :dvh, :gpt, :loop, :mac, :msdos, :pc98, :sun"}, allow_nil: true
# @return [:primary, :extended, :logical, nil] Is one of 'primary', 'extended' or 'logical' and may be specified only with 'msdos' or 'dvh' partition tables. A name must be specified for a 'gpt' partition table. Neither part-type nor name may be used with a 'sun' partition table.
attribute :part_type
validates :part_type, expression_inclusion: {:in=>[:primary, :extended, :logical], :message=>"%{value} needs to be :primary, :extended, :logical"}, allow_nil: true
# @return [String, nil] Where the partition will start as offset from the beginning of the disk, that is, the "distance" from the start of the disk. The distance can be specified with all the units supported by parted (except compat) and it is case sensitive. E.g. C(10GiB), C(15%).
attribute :part_start
validates :part_start, type: String
# @return [String, nil] Where the partition will end as offset from the beginning of the disk, that is, the "distance" from the start of the disk. The distance can be specified with all the units supported by parted (except compat) and it is case sensitive. E.g. C(10GiB), C(15%).
attribute :part_end
validates :part_end, type: String
# @return [Object, nil] Sets the name for the partition number (GPT, Mac, MIPS and PC98 only).
attribute :name
# @return [Array<String>, String, nil] A list of the flags that has to be set on the partition.
attribute :flags
validates :flags, type: TypeGeneric.new(String)
# @return [:present, :absent, :info, nil] If to create or delete a partition. If set to C(info) the module will only return the device information.
attribute :state
validates :state, expression_inclusion: {:in=>[:present, :absent, :info], :message=>"%{value} needs to be :present, :absent, :info"}, allow_nil: true
end
end
end
end
| 71.947368 | 365 | 0.672519 |
1a646309d7bad0726b5480e19bf560c10e950db9 | 2,183 | $:.push File.expand_path("../lib", __FILE__)
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'grape/transformations/version'
Gem::Specification.new do |spec|
spec.name = "grape-transformations"
spec.version = Grape::Transformations::VERSION
spec.authors = ["Johan Tique", "Miguel Diaz"]
spec.email = ["[email protected]", "[email protected]"]
spec.summary = %q{grape-transformations decouples your entities from your models and also organizes and lets you use multiple entities per model }
spec.description = %q{grape-transformations your entities from your models and also organizes and lets you use multiple entities per model }
spec.homepage = "https://github.com/codescrum/grape-transformations"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
#spec.test_files = spec.files.grep(%r{^(test|spec|features|generators)/})
spec.test_files = Dir["spec/**/*"]
spec.require_paths = ["lib"]
spec.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.rdoc"]
spec.add_development_dependency "rails", ">= 4.1.7"
spec.add_development_dependency "bundler", ">= 1.5"
spec.add_development_dependency "rake", ">= 10.3.2"
spec.add_development_dependency 'rspec', ">= 3.1.0"
spec.add_development_dependency 'ammeter', ">= 1.1.2"
spec.add_development_dependency 'sqlite3', ">= 1.3.10"
spec.add_development_dependency 'virtus', ">= 1.0.3"
spec.add_development_dependency 'railties', ">= 4.1.7"
spec.add_development_dependency 'rspec-rails', ">= 3.1.0"
spec.add_development_dependency 'activesupport', ">= 4.1.7"
spec.add_development_dependency 'pry-byebug', ">= 2.0.0"
spec.add_development_dependency 'simplecov', ">= 0.8.2"
spec.add_development_dependency 'rspec-mocks', ">= 3.1.0"
spec.add_development_dependency 'codeclimate-test-reporter', '0.4.0'
spec.add_runtime_dependency 'grape', '>= 0.7.0', '<= 0.9.0'
spec.add_runtime_dependency 'grape-entity', '~> 0.4.0'
end
| 48.511111 | 154 | 0.692167 |
d5337c98a6844630cd4c08ed44ad78b63fd196f0 | 6,056 | require 'set'
require 'concurrent/synchronization'
module Concurrent
# A `TVar` is a transactional variable - a single-element container that
# is used as part of a transaction - see `Concurrent::atomically`.
#
# @!macro thread_safe_variable_comparison
#
# {include:file:doc/tvar.md}
class TVar < Synchronization::Object
safe_initialization!
# Create a new `TVar` with an initial value.
def initialize(value)
@value = value
@version = 0
@lock = Mutex.new
end
# Get the value of a `TVar`.
def value
Concurrent::atomically do
Transaction::current.read(self)
end
end
# Set the value of a `TVar`.
def value=(value)
Concurrent::atomically do
Transaction::current.write(self, value)
end
end
# @!visibility private
def unsafe_value # :nodoc:
@value
end
# @!visibility private
def unsafe_value=(value) # :nodoc:
@value = value
end
# @!visibility private
def unsafe_version # :nodoc:
@version
end
# @!visibility private
def unsafe_increment_version # :nodoc:
@version += 1
end
# @!visibility private
def unsafe_lock # :nodoc:
@lock
end
end
# Run a block that reads and writes `TVar`s as a single atomic transaction.
# With respect to the value of `TVar` objects, the transaction is atomic, in
# that it either happens or it does not, consistent, in that the `TVar`
# objects involved will never enter an illegal state, and isolated, in that
# transactions never interfere with each other. You may recognise these
# properties from database transactions.
#
# There are some very important and unusual semantics that you must be aware of:
#
# * Most importantly, the block that you pass to atomically may be executed
# more than once. In most cases your code should be free of
# side-effects, except for via TVar.
#
# * If an exception escapes an atomically block it will abort the transaction.
#
# * It is undefined behaviour to use callcc or Fiber with atomically.
#
# * If you create a new thread within an atomically, it will not be part of
# the transaction. Creating a thread counts as a side-effect.
#
# Transactions within transactions are flattened to a single transaction.
#
# @example
# a = new TVar(100_000)
# b = new TVar(100)
#
# Concurrent::atomically do
# a.value -= 10
# b.value += 10
# end
def atomically
raise ArgumentError.new('no block given') unless block_given?
# Get the current transaction
transaction = Transaction::current
# Are we not already in a transaction (not nested)?
if transaction.nil?
# New transaction
begin
# Retry loop
loop do
# Create a new transaction
transaction = Transaction.new
Transaction::current = transaction
# Run the block, aborting on exceptions
begin
result = yield
rescue Transaction::AbortError => e
transaction.abort
result = Transaction::ABORTED
rescue Transaction::LeaveError => e
transaction.abort
break result
rescue => e
transaction.abort
raise e
end
# If we can commit, break out of the loop
if result != Transaction::ABORTED
if transaction.commit
break result
end
end
end
ensure
# Clear the current transaction
Transaction::current = nil
end
else
# Nested transaction - flatten it and just run the block
yield
end
end
# Abort a currently running transaction - see `Concurrent::atomically`.
def abort_transaction
raise Transaction::AbortError.new
end
# Leave a transaction without committing or aborting - see `Concurrent::atomically`.
def leave_transaction
raise Transaction::LeaveError.new
end
module_function :atomically, :abort_transaction, :leave_transaction
private
class Transaction
ABORTED = ::Object.new
ReadLogEntry = Struct.new(:tvar, :version)
AbortError = Class.new(StandardError)
LeaveError = Class.new(StandardError)
def initialize
@read_log = []
@write_log = {}
end
def read(tvar)
Concurrent::abort_transaction unless valid?
if @write_log.has_key? tvar
@write_log[tvar]
else
@read_log.push(ReadLogEntry.new(tvar, tvar.unsafe_version))
tvar.unsafe_value
end
end
def write(tvar, value)
# Have we already written to this TVar?
unless @write_log.has_key? tvar
# Try to lock the TVar
unless tvar.unsafe_lock.try_lock
# Someone else is writing to this TVar - abort
Concurrent::abort_transaction
end
# If we previously wrote to it, check the version hasn't changed
@read_log.each do |log_entry|
if log_entry.tvar == tvar and tvar.unsafe_version > log_entry.version
Concurrent::abort_transaction
end
end
end
# Record the value written
@write_log[tvar] = value
end
def abort
unlock
end
def commit
return false unless valid?
@write_log.each_pair do |tvar, value|
tvar.unsafe_value = value
tvar.unsafe_increment_version
end
unlock
true
end
def valid?
@read_log.each do |log_entry|
unless @write_log.has_key? log_entry.tvar
if log_entry.tvar.unsafe_version > log_entry.version
return false
end
end
end
true
end
def unlock
@write_log.each_key do |tvar|
tvar.unsafe_lock.unlock
end
end
def self.current
Thread.current[:current_tvar_transaction]
end
def self.current=(transaction)
Thread.current[:current_tvar_transaction] = transaction
end
end
end
| 23.382239 | 86 | 0.630284 |
26f60d1b12bc101bee28784dacc7a64f02961f34 | 1,232 | class SendgridApi
extend SingleForwardable
def_single_delegators :new, :spam_reported?, :bounced?,
:remove_from_bounce_list, :remove_from_spam_list
RETRIEVAL_ERRORS = [JSON::ParserError, SendgridToolkit::APIError]
def spam_reported?(email)
api(RETRIEVAL_ERRORS) { spam_reports.retrieve(email: email).any? }
end
def bounced?(email)
api(RETRIEVAL_ERRORS) { bounces.retrieve(email: email).any? }
end
REMOVAL_ERRORS = [SendgridToolkit::EmailDoesNotExist]
def remove_from_bounce_list(email)
api(REMOVAL_ERRORS) { bounces.delete(email: email) }
end
def remove_from_spam_list(email)
api(REMOVAL_ERRORS) { spam_reports.delete(email: email) }
end
private
def api(rescue_from_errors, &_action)
yield if can_access_sendgrid?
rescue *rescue_from_errors
false
end
def spam_reports
SendgridToolkit::SpamReports.new(user_name, password)
end
def bounces
SendgridToolkit::Bounces.new(user_name, password)
end
def can_access_sendgrid?
user_name && password
end
def user_name
smtp_settings[:user_name]
end
def password
smtp_settings[:password]
end
def smtp_settings
Rails.configuration.action_mailer.smtp_settings || {}
end
end
| 20.881356 | 70 | 0.741883 |
e2aafc00cbfdecb52c0e9053430dc5541bc7417d | 731 | # Add two numbers represented by linked lists
# Input:
# First List: 7->5->9->4->6 // represents number 64957
# Second List: 8->4 // represents number 48
# Output
# Resultant list: 5->0->0->5->6 // represents number 65005
def add_reversed_digits(list1, list2)
return false unless list1 && list2
current1 = list1.head
current2 = list2.head
stack1 = []
stack2 = []
while current1.next
stack1 << current1.value.to_i
current1 = current1.next
end
stack1 << current1.value.to_i
while current2.next
stack2 << current2.value.to_i
current2 = current2.next
end
stack2 << current2.value.to_i
stack1 = stack1.reverse.join.to_i
stack2 = stack2.reverse.join.to_i
stack1 + stack2
end
| 22.84375 | 61 | 0.681259 |
01c1f1bb5e2d5c23200c2c180bac0b4a7002b1d7 | 536 | module Hippo_eyeDoc::TransactionSets
module HIPAA_271
class L2115C < Hippo_eyeDoc::TransactionSets::Base
loop_name 'L2115C' #Subscriber Eligibility or Benefit Additional Information
#Subscriber Eligibility or Benefit Additional Information
segment Hippo_eyeDoc::Segments::III,
:name => 'Subscriber Eligibility or Benefit Additional Information',
:minimum => 0,
:maximum => 1,
:position => 1710
end
end
end
| 31.529412 | 94 | 0.613806 |
0816bebde2c3f89a241896f3eeea141ace47d7ae | 325 | # frozen_string_literal: true
module API
module Entities
class ProjectRepositoryStorage < Grape::Entity
include Gitlab::Routing
expose :disk_path do |project|
project.repository.disk_path
end
expose :id, as: :project_id
expose :repository_storage, :created_at
end
end
end
| 19.117647 | 50 | 0.689231 |
39af49b683d1140ec0fa4a3de65cc1c0cfaac82e | 731 | require 'spec_helper'
describe Admin::Authenticator do
describe '#authenticate' do
example '正しいパスワードならtrueを返す' do
m = build(:administrator)
expect(Admin::Authenticator.new(m).authenticate('pw')).to be_truthy
end
example '誤ったパスワードならfalseを返す' do
m = build(:administrator)
expect(Admin::Authenticator.new(m).authenticate('xy')).to be_falsey
end
example 'パスワード未設定ならfalseを返す' do
m = build(:administrator, password: nil)
expect(Admin::Authenticator.new(m).authenticate(nil)).to be_falsey
end
example '停止フラグが立っていればfalseを返す' do
m = build(:administrator, suspended: true)
expect(Admin::Authenticator.new(m).authenticate('pw')).to be_falsey
end
end
end
| 28.115385 | 73 | 0.694938 |
184200c64f7318b1fbd805099e7ab83b2b7256cc | 4,214 | =begin
##########################################################################
#
# INFIVERVE TECHNOLOGIES PTE LIMITED CONFIDENTIAL
# __________________
#
# (C) INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE
# All Rights Reserved.
# Product / Project: Flint IT Automation Platform
# NOTICE: All information contained herein is, and remains
# the property of INFIVERVE TECHNOLOGIES PTE LIMITED.
# The intellectual and technical concepts contained
# herein are proprietary to INFIVERVE TECHNOLOGIES PTE LIMITED.
# Dissemination of this information or any form of reproduction of this material
# is strictly forbidden unless prior written permission is obtained
# from INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE.
=end
require 'json'
begin
@log.trace("Started execution of 'flint-util:jenkins:lastfailed_build.rb' flintbit..")
@jenkins_username = @config.global("jenkins_build.username") # Username of the jenkins user
@jenkins_api_token= @config.global("jenkins_build.apitoken") #Api token of Jenkins
@jenkins_host= @config.global("jenkins_build.jenkins_host") #Jenkins host URL
@lastfailedBuild = @config.global("jenkins_build.last_failed_build_url")
@crumburl = @config.global("jenkins_build.crumb_url")
@id = @input.get('id')
@mention_name = @input.get('mention_name') # Name of chat tool user
@build_name = @input.get('build_name')
if @build_name.include? " "
@status_url = @jenkins_host << @build_name.gsub!(" ", "%20") << @lastfailedBuild
@build_name.gsub!("%20", " ")
else
@status_url = @jenkins_host << @build_name << @lastfailedBuild
end
@concatenate_string = @jenkins_username << ":" << @jenkins_api_token
@encoded_string = @util.encode64(@concatenate_string)
@concatenate_authorization = "Basic" << " " << @encoded_string
[email protected]("http")
.set("method", "GET")
.set("url",@crumburl)
.set("headers","Authorization:#{@concatenate_authorization}")
.set("body","abc")
.set("timeout",300000)
.sync
@exitcode = response_crumb.exitcode
@crum_message = response_crumb.message
response_crumb_body=response_crumb.get("body")
response_body1 [email protected](response_crumb_body) #Response Body
@crumb = response_body1.get("crumb")
@log.info("crumbLastBuild::: #{@crumb}")
if @exitcode == 0
response_buildstatus= @call.connector("http")
.set("method", "GET")
.set("url",@status_url)
.set("headers",["Authorization:#{@concatenate_authorization}","Jenkins-Crumb:#{@crumb}"])
.set("body","abc")
.set("timeout",300000)
.sync
response_body=response_buildstatus.get("body") #Response Body
@log.info ("response>>>: #{response_body}")
else
@log.error("message : #{@crum_message}")
end
@buildstatus_exitcode = response_buildstatus.exitcode
@buildstatus_message = response_buildstatus.message
responseJson = @util.json(response_body)
@responseResult = responseJson.get('result')
@responseFullDisplayName = responseJson.get('fullDisplayName')
@responseUrl = responseJson.get('url')
if @buildstatus_exitcode == 0
@log.info("Success in getting last build status ")
@reply_message = 'Hello @' + @mention_name + ',Last failed Build status is : '+ @responseResult + ' |Full Build-name: ' + @responseFullDisplayName + ' |Build URL: ' + @responseUrl
@output.set("reply_message",@reply_message)
else
@log.info("message : #{@buildstatus_message}| There is no last failed build found for #{@build_name}")
@reply_message = 'Hello @' + @mention_name + ', There is no last failed build found for build-name: ' + @build_name
@output.set("reply_message",@reply_message)
end
rescue Exception => e
@log.error(e.message)
@reply_message = 'Hello @' + @mention_name + ',Failed in getting build status of: ' + @build_name + ' due to ' + e.message + ''
@output.set('exit-code', 1).set('message', e.message).set("reply_message",@reply_message)
end
@log.trace("Finished execution of 'flint-util:jenkins:lastfailed_build.rb' flintbit..")
| 42.565657 | 195 | 0.669673 |
62d306db702db3b76b980ea3c8a6d52b21a547b3 | 1,671 | RSpec.describe Gamefic::Tty::Engine do
let(:plot) {
plot = Gamefic::Plot.new
plot.stage do
respond :think do |actor|
actor.tell 'Player thinks'
actor.queue.push 'quit'
end
respond :quit do |actor|
actor.tell 'Player quit'
actor.conclude default_conclusion
end
end
plot
}
it 'completes a turn' do
user = Gamefic::Tty::User.new(input: StringIO.new('quit'), output: StringIO.new)
engine = Gamefic::Tty::Engine.new(plot: plot, user: user)
engine.turn
expect(engine.character).to be_concluded
end
it 'runs until concluded' do
user = Gamefic::Tty::User.new(input: StringIO.new('quit'), output: StringIO.new)
engine = Gamefic::Tty::Engine.new(plot: plot, user: user)
engine.run
expect(engine.character).to be_concluded
end
it 'runs from the singleton method' do
user = Gamefic::Tty::User.new(input: StringIO.new('quit'), output: StringIO.new)
engine = Gamefic::Tty::Engine.run(plot: plot, user: user)
expect(engine.character).to be_concluded
end
it 'handles multiple commands in queue' do
user = Gamefic::Tty::User.new(input: StringIO.new("think"), output: StringIO.new)
engine = Gamefic::Tty::Engine.new(plot: plot, user: user)
engine.run
expect(user.output.string).to include('Player thinks')
expect(user.output.string).to include('Player quit')
end
it 'updates after conclusion' do
user = Gamefic::Tty::User.new(input: StringIO.new('quit'), output: StringIO.new)
engine = Gamefic::Tty::Engine.new(plot: plot, user: user)
engine.run
expect(user.output.string).to include('Player quit')
end
end
| 31.528302 | 85 | 0.668462 |
015109a0520014c422897d7bd0507235a2844a52 | 2,386 | require_relative '../spec_helper'
require_relative '../fixtures/classes'
describe 'UDPSocket#recvfrom_nonblock' do
SocketSpecs.each_ip_protocol do |family, ip_address, family_name|
before do
@server = UDPSocket.new(family)
@client = UDPSocket.new(family)
end
after do
@client.close
@server.close
end
platform_is_not :windows do
describe 'using an unbound socket' do
it 'raises IO::WaitReadable' do
lambda { @server.recvfrom_nonblock(1) }.should raise_error(IO::WaitReadable)
end
end
end
describe 'using a bound socket' do
before do
@server.bind(ip_address, 0)
addr = @server.connect_address
@client.connect(addr.ip_address, addr.ip_port)
end
describe 'without any data available' do
it 'raises IO::WaitReadable' do
lambda { @server.recvfrom_nonblock(1) }.should raise_error(IO::WaitReadable)
end
end
platform_is_not :windows do
describe 'with data available' do
before do
@client.write('hello')
platform_is(:darwin, :freebsd) { IO.select([@server]) }
end
it 'returns an Array containing the data and an Array' do
@server.recvfrom_nonblock(1).should be_an_instance_of(Array)
end
describe 'the returned Array' do
before do
@array = @server.recvfrom_nonblock(1)
end
it 'contains the data at index 0' do
@array[0].should == 'h'
end
it 'contains an Array at index 1' do
@array[1].should be_an_instance_of(Array)
end
end
describe 'the returned address Array' do
before do
@addr = @server.recvfrom_nonblock(1)[1]
end
it 'uses the correct address family' do
@addr[0].should == family_name
end
it 'uses the port of the client' do
@addr[1].should == @client.local_address.ip_port
end
it 'uses the hostname of the client' do
@addr[2].should == ip_address
end
it 'uses the IP address of the client' do
@addr[3].should == ip_address
end
end
end
end
end
end
end
| 26.21978 | 86 | 0.565381 |
282b4abada988c0f3e7d3d05597cb1d507dd0eaa | 369 | class TcpProxyNginxModule < Formula
desc "tcp proxy and health check and status monitor"
homepage "https://github.com/yaoweibin/nginx_tcp_proxy_module"
url "https://github.com/yaoweibin/nginx_tcp_proxy_module/archive/v0.4.5.tar.gz"
sha256 "5225fa70785b14fcdf14a163d01b094c746b70e5ebad7dc35740af4f6d115390"
def install
pkgshare.install Dir["*"]
end
end
| 33.545455 | 81 | 0.796748 |
1cef97982f650ab0fae70d2c8665a2fc0154f0ac | 373 | module Helpers
include RSpec::Rails::FeatureCheck
def with_isolated_config
original_config = RSpec.configuration
RSpec.configuration = RSpec::Core::Configuration.new
RSpec::Rails.initialize_configuration(RSpec.configuration)
yield RSpec.configuration
ensure
RSpec.configuration = original_config
end
RSpec.configure {|c| c.include self}
end
| 24.866667 | 62 | 0.772118 |
385a604daf7076cefc8131930a23e1d3cbece35e | 54 | module ComfortableMexicanSofa
VERSION = "1.12.1"
end | 18 | 29 | 0.777778 |
b9f24f8c3641535ef1d925d1bdb7727c650d9016 | 47 | require 'spec_helper'
describe Enchant do
end
| 9.4 | 21 | 0.808511 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.