hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
036b6dcb32b06df876ecddbf2973587b86a6761f | 71 | d_hash = { :name => "Grey", :age => 28 }
puts d_hash
puts d_hash[:age]
| 17.75 | 40 | 0.591549 |
bb266f62aca40b738c4991242cedbf7b970e1dbd | 1,403 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v4/resources/ad_group_extension_setting.proto
require 'google/protobuf'
require 'google/ads/google_ads/v4/enums/extension_setting_device_pb'
require 'google/ads/google_ads/v4/enums/extension_type_pb'
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
require 'google/protobuf/wrappers_pb'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v4/resources/ad_group_extension_setting.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v4.resources.AdGroupExtensionSetting" do
optional :resource_name, :string, 1
optional :extension_type, :enum, 2, "google.ads.googleads.v4.enums.ExtensionTypeEnum.ExtensionType"
optional :ad_group, :message, 3, "google.protobuf.StringValue"
repeated :extension_feed_items, :message, 4, "google.protobuf.StringValue"
optional :device, :enum, 5, "google.ads.googleads.v4.enums.ExtensionSettingDeviceEnum.ExtensionSettingDevice"
end
end
end
module Google
module Ads
module GoogleAds
module V4
module Resources
AdGroupExtensionSetting = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v4.resources.AdGroupExtensionSetting").msgclass
end
end
end
end
end
| 40.085714 | 162 | 0.774056 |
263014d229df2dadf11c3bdb372beda86a40db98 | 12,725 | #
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/exceptions'
require 'chef/log'
require 'chef/provider'
require 'fileutils'
class Chef
class Provider
class Git < Chef::Provider
def whyrun_supported?
true
end
def load_current_resource
@resolved_reference = nil
@current_resource = Chef::Resource::Git.new(@new_resource.name)
if current_revision = find_current_revision
@current_resource.revision current_revision
end
end
def define_resource_requirements
# Parent directory of the target must exist.
requirements.assert(:checkout, :sync) do |a|
dirname = ::File.dirname(@new_resource.destination)
a.assertion { ::File.directory?(dirname) }
a.whyrun("Directory #{dirname} does not exist, this run will fail unless it has been previously created. Assuming it would have been created.")
a.failure_message(Chef::Exceptions::MissingParentDirectory,
"Cannot clone #{@new_resource} to #{@new_resource.destination}, the enclosing directory #{dirname} does not exist")
end
requirements.assert(:all_actions) do |a|
a.assertion { !(@new_resource.revision =~ /^origin\//) }
a.failure_message Chef::Exceptions::InvalidRemoteGitReference,
"Deploying remote branches is not supported. " +
"Specify the remote branch as a local branch for " +
"the git repository you're deploying from " +
"(ie: '#{@new_resource.revision.gsub('origin/', '')}' rather than '#{@new_resource.revision}')."
end
requirements.assert(:all_actions) do |a|
# this can't be recovered from in why-run mode, because nothing that
# we do in the course of a run is likely to create a valid target_revision
# if we can't resolve it up front.
a.assertion { target_revision != nil }
a.failure_message Chef::Exceptions::UnresolvableGitReference,
"Unable to parse SHA reference for '#{@new_resource.revision}' in repository '#{@new_resource.repository}'. " +
"Verify your (case-sensitive) repository URL and revision.\n" +
"`git ls-remote` output: #{@resolved_reference}"
end
end
def action_checkout
if target_dir_non_existent_or_empty?
clone
if @new_resource.enable_checkout
checkout
end
enable_submodules
add_remotes
else
Chef::Log.debug "#{@new_resource} checkout destination #{@new_resource.destination} already exists or is a non-empty directory"
end
end
def action_export
action_checkout
converge_by("complete the export by removing #{@new_resource.destination}.git after checkout") do
FileUtils.rm_rf(::File.join(@new_resource.destination,".git"))
end
end
def action_sync
if existing_git_clone?
Chef::Log.debug "#{@new_resource} current revision: #{@current_resource.revision} target revision: #{target_revision}"
unless current_revision_matches_target_revision?
fetch_updates
enable_submodules
Chef::Log.info "#{@new_resource} updated to revision #{target_revision}"
end
add_remotes
else
action_checkout
end
end
def existing_git_clone?
::File.exist?(::File.join(@new_resource.destination, ".git"))
end
def target_dir_non_existent_or_empty?
!::File.exist?(@new_resource.destination) || Dir.entries(@new_resource.destination).sort == ['.','..']
end
def find_current_revision
Chef::Log.debug("#{@new_resource} finding current git revision")
if ::File.exist?(::File.join(cwd, ".git"))
# 128 is returned when we're not in a git repo. this is fine
result = shell_out!('git rev-parse HEAD', :cwd => cwd, :returns => [0,128]).stdout.strip
end
sha_hash?(result) ? result : nil
end
def add_remotes
if (@new_resource.additional_remotes.length > 0)
@new_resource.additional_remotes.each_pair do |remote_name, remote_url|
converge_by("add remote #{remote_name} from #{remote_url}") do
Chef::Log.info "#{@new_resource} adding git remote #{remote_name} = #{remote_url}"
setup_remote_tracking_branches(remote_name, remote_url)
end
end
end
end
def clone
converge_by("clone from #{@new_resource.repository} into #{@new_resource.destination}") do
remote = @new_resource.remote
args = []
args << "-o #{remote}" unless remote == 'origin'
args << "--depth #{@new_resource.depth}" if @new_resource.depth
Chef::Log.info "#{@new_resource} cloning repo #{@new_resource.repository} to #{@new_resource.destination}"
clone_cmd = "git clone #{args.join(' ')} \"#{@new_resource.repository}\" \"#{@new_resource.destination}\""
shell_out!(clone_cmd, run_options)
end
end
def checkout
sha_ref = target_revision
converge_by("checkout ref #{sha_ref} branch #{@new_resource.revision}") do
# checkout into a local branch rather than a detached HEAD
shell_out!("git checkout -b #{@new_resource.checkout_branch} #{sha_ref}", run_options(:cwd => @new_resource.destination))
Chef::Log.info "#{@new_resource} checked out branch: #{@new_resource.revision} onto: #{@new_resource.checkout_branch} reference: #{sha_ref}"
end
end
def enable_submodules
if @new_resource.enable_submodules
converge_by("enable git submodules for #{@new_resource}") do
Chef::Log.info "#{@new_resource} synchronizing git submodules"
command = "git submodule sync"
shell_out!(command, run_options(:cwd => @new_resource.destination))
Chef::Log.info "#{@new_resource} enabling git submodules"
# the --recursive flag means we require git 1.6.5+ now, see CHEF-1827
command = "git submodule update --init --recursive"
shell_out!(command, run_options(:cwd => @new_resource.destination))
end
end
end
def fetch_updates
setup_remote_tracking_branches(@new_resource.remote, @new_resource.repository)
converge_by("fetch updates for #{@new_resource.remote}") do
# since we're in a local branch already, just reset to specified revision rather than merge
fetch_command = "git fetch #{@new_resource.remote} && git fetch #{@new_resource.remote} --tags && git reset --hard #{target_revision}"
Chef::Log.debug "Fetching updates from #{new_resource.remote} and resetting to revision #{target_revision}"
shell_out!(fetch_command, run_options(:cwd => @new_resource.destination))
end
end
def setup_remote_tracking_branches(remote_name, remote_url)
converge_by("set up remote tracking branches for #{remote_url} at #{remote_name}") do
Chef::Log.debug "#{@new_resource} configuring remote tracking branches for repository #{remote_url} "+
"at remote #{remote_name}"
check_remote_command = "git config --get remote.#{remote_name}.url"
remote_status = shell_out!(check_remote_command, run_options(:cwd => @new_resource.destination, :returns => [0,1,2]))
case remote_status.exitstatus
when 0, 2
# * Status 0 means that we already have a remote with this name, so we should update the url
# if it doesn't match the url we want.
# * Status 2 means that we have multiple urls assigned to the same remote (not a good idea)
# which we can fix by replacing them all with our target url (hence the --replace-all option)
if multiple_remotes?(remote_status) || !remote_matches?(remote_url,remote_status)
update_remote_url_command = "git config --replace-all remote.#{remote_name}.url #{remote_url}"
shell_out!(update_remote_url_command, run_options(:cwd => @new_resource.destination))
end
when 1
add_remote_command = "git remote add #{remote_name} #{remote_url}"
shell_out!(add_remote_command, run_options(:cwd => @new_resource.destination))
end
end
end
def multiple_remotes?(check_remote_command_result)
check_remote_command_result.exitstatus == 2
end
def remote_matches?(remote_url, check_remote_command_result)
check_remote_command_result.stdout.strip.eql?(remote_url)
end
def current_revision_matches_target_revision?
(!@current_resource.revision.nil?) && (target_revision.strip.to_i(16) == @current_resource.revision.strip.to_i(16))
end
def target_revision
@target_revision ||= begin
if sha_hash?(@new_resource.revision)
@target_revision = @new_resource.revision
else
@target_revision = remote_resolve_reference
end
end
end
alias :revision_slug :target_revision
def remote_resolve_reference
Chef::Log.debug("#{@new_resource} resolving remote reference")
# The sha pointed to by an annotated tag is identified by the
# '^{}' suffix appended to the tag. In order to resolve
# annotated tags, we have to search for "revision*" and
# post-process. Special handling for 'HEAD' to ignore a tag
# named 'HEAD'.
rev_pattern = case @new_resource.revision
when '', 'HEAD'
'HEAD'
else
@new_resource.revision + '*'
end
command = git("ls-remote \"#{@new_resource.repository}\" \"#{rev_pattern}\"")
@resolved_reference = shell_out!(command, run_options).stdout
ref_lines = @resolved_reference.split("\n")
refs = ref_lines.map { |line| line.split("\t") }
# first try for ^{} indicating the commit pointed to by an
# annotated tag
tagged_commit = refs.find { |m| m[1].end_with?("#{@new_resource.revision}^{}") }
# It is possible for a user to create a tag named 'HEAD'.
# Using such a degenerate annotated tag would be very
# confusing. We avoid the issue by disallowing the use of
# annotated tags named 'HEAD'.
if tagged_commit && rev_pattern != 'HEAD'
tagged_commit[0]
else
found = refs.find { |m| m[1].end_with?(@new_resource.revision) }
if found
found[0]
else
nil
end
end
end
private
def run_options(run_opts={})
env = {}
if @new_resource.user
run_opts[:user] = @new_resource.user
# Certain versions of `git` misbehave if git configuration is
# inaccessible in $HOME. We need to ensure $HOME matches the
# user who is executing `git` not the user running Chef.
env['HOME'] = begin
require 'etc'
Etc.getpwnam(@new_resource.user).dir
rescue ArgumentError # user not found
raise Chef::Exceptions::User, "Could not determine HOME for specified user '#{@new_resource.user}' for resource '#{@new_resource.name}'"
end
end
run_opts[:group] = @new_resource.group if @new_resource.group
env['GIT_SSH'] = @new_resource.ssh_wrapper if @new_resource.ssh_wrapper
run_opts[:log_tag] = @new_resource.to_s
run_opts[:timeout] = @new_resource.timeout if @new_resource.timeout
run_opts[:environment] = env unless env.empty?
run_opts
end
def cwd
@new_resource.destination
end
def git(*args)
["git", *args].compact.join(" ")
end
def sha_hash?(string)
string =~ /^[0-9a-f]{40}$/
end
end
end
end
| 41.584967 | 153 | 0.632456 |
08a0a66ededce532b694d82f4ae934bcca1489fc | 2,361 | module CruiseControl
class CruiseControl::Configuration
@default_polling_interval = 20.seconds
@sleep_after_build_loop_error = 30.seconds
@build_request_checking_interval = 5.seconds
@dashboard_refresh_interval = 30.seconds
@dashboard_url = nil
@email_from = '[email protected]'
@disable_admin_ui = false
@disable_add_project = false
@serialize_builds = false
@serialized_build_timeout = 3.hour
@git_load_new_changesets_timeout = 5.minutes
@build_history_limit = 30
@max_file_display_length = 300.kilobytes
@db_username = "root"
@db_password = ""
@review_changeset_url = nil
class << self
# published configuration options (mentioned in config/site_config.rb.example)
attr_accessor :default_polling_interval, :disable_admin_ui, :email_from,
:dashboard_refresh_interval, :serialize_builds,
:serialized_build_timeout, :git_load_new_changesets_timeout,
:disable_code_browsing, :build_history_limit, :max_file_display_length,
:disable_add_project, :review_changeset_url, :db_username, :db_password
attr_reader :dashboard_url
# non-published configuration options (obscure stuff, mostly useful for http://cruisecontrolrb.thoughtworks.com)
attr_writer :build_request_checking_interval, :sleep_after_build_loop_error
def data_root=(root)
@data_root = Pathname.new(root)
end
def data_root
@data_root ||= CruiseControl.data_root
end
def disable_build_now=(flag)
puts "DEPRECATED: Please use CruiseControl::Configuration.disable_admin_ui instead of disable_build_now to disable administration features in the UI."
@disable_admin_ui = flag
end
def projects_root
self.data_root.join("projects")
end
def plugins_root
self.data_root.join("builder_plugins")
end
def dashboard_url=(value)
@dashboard_url = remove_trailing_slash(value)
end
def sleep_after_build_loop_error
@sleep_after_build_loop_error
end
def build_request_checking_interval
@build_request_checking_interval.to_i
end
private
def remove_trailing_slash(str)
str.sub(/\/$/, '')
end
end
end
end | 31.48 | 158 | 0.695044 |
26e2f1e3ea0277cb04d804243e059945c48d0849 | 15,190 | # frozen_string_literal: true
# Assuming you have not yet modified this file, each configuration option below
# is set to its default value. Note that some are commented out while others
# are not: uncommented lines are intended to protect your configuration from
# breaking changes in upgrades (i.e., in the event that future versions of
# Devise change the default values for those options).
#
# Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = '92fa5f53ed9851344fc08a70ee57e10ca4576abd792cf83a209c61be877ec450e0cae05be7cf4aac811e9738f75a52f64048fab0c1cc24a94f7811bd3e5dc7b1'
# ==> Controller configuration
# Configure the parent class to the devise controllers.
# config.parent_controller = 'DeviseController'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication.
# For API-only applications to support authentication "out-of-the-box", you will likely want to
# enable this with :database unless you are using a custom strategy.
# The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# When false, Devise will not attempt to reload routes on eager load.
# This can reduce the time taken to boot the app but if your application
# requires the Devise mappings to be loaded during boot time the application
# won't boot properly.
# config.reload_routes = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 12. If
# using other algorithms, it sets how many times you want the password to be hashed.
# The number of stretches used for generating the hashed password are stored
# with the hashed password. This allows you to change the stretches without
# invalidating existing passwords.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 12
# Set up a pepper to generate the hashed password.
# config.pepper = '3ced69bd23b8f540650729eb3f41f550bef43a6487476a537510463ae60da2c339bad0e51b293badca4c3a55f31f6737f21867ddad793c6864a87ae42cac5d34'
# Send a notification to the original email when the user's email is changed.
# config.send_email_changed_notification = false
# Send a notification email when the user's password is changed.
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day.
# You can also set it to nil, which will allow the user to access the website
# without confirming their account.
# Default is 0.days, meaning the user cannot access the website without
# confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
# ==> Turbolinks configuration
# If your app is using Turbolinks, Turbolinks::Controller needs to be included to make redirection work correctly:
#
# ActiveSupport.on_load(:devise_failure_app) do
# include Turbolinks::Controller
# end
# ==> Configuration for :registerable
# When set to false, does not sign a user in automatically after their password is
# changed. Defaults to true, so a user is signed in automatically after changing a password.
# config.sign_in_after_change_password = true
end
| 48.685897 | 154 | 0.752535 |
03a7cf249cf3627551cc56652edf80bcffc3c736 | 2,668 | shared_context 'simple_check' do |metrics_prefix, check_name, success_result|
describe '#metrics' do
subject { described_class.metrics }
context 'Check is passing' do
before do
allow(described_class).to receive(:check).and_return success_result
end
it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_success", value: 1)) }
it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_timeout", value: 0)) }
it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_latency_seconds", value: be >= 0)) }
end
context 'Check is misbehaving' do
before do
allow(described_class).to receive(:check).and_return 'error!'
end
it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_success", value: 0)) }
it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_timeout", value: 0)) }
it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_latency_seconds", value: be >= 0)) }
end
context 'Check is timeouting' do
before do
allow(described_class).to receive(:check).and_return Timeout::Error.new
end
it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_success", value: 0)) }
it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_timeout", value: 1)) }
it { is_expected.to include(have_attributes(name: "#{metrics_prefix}_latency_seconds", value: be >= 0)) }
end
end
describe '#readiness' do
subject { described_class.readiness }
context 'Check returns ok' do
before do
allow(described_class).to receive(:check).and_return success_result
end
it { is_expected.to have_attributes(success: true) }
end
context 'Check is misbehaving' do
before do
allow(described_class).to receive(:check).and_return 'error!'
end
it { is_expected.to have_attributes(success: false, message: "unexpected #{described_class.human_name} check result: error!") }
end
context 'Check is timeouting' do
before do
allow(described_class).to receive(:check ).and_return Timeout::Error.new
end
it { is_expected.to have_attributes(success: false, message: "#{described_class.human_name} check timed out") }
end
context 'Check is raising an unhandled exception' do
before do
allow(described_class).to receive(:check ).and_raise "unexpected error"
end
it { is_expected.to have_attributes(success: false, message: "unexpected #{described_class.human_name} check result: unexpected error") }
end
end
end
| 37.055556 | 143 | 0.69078 |
1ae7ea9f25fb5e8a6d325f39a8fc027be0236cbd | 1,807 | HEADERS = %w[domain id name code registrar].freeze
namespace :contacts do
desc 'Starts collect invalid validation contacts'
task scan_org: :environment do
contacts = []
Contact.where(ident_type: 'org').each do |contact|
contacts << contact unless checking_contacts(contact)
end
contacts.select! { |c| c.ident_country_code == 'EE' }
magic_with_contacts(contacts)
end
end
def checking_contacts(contact)
c = BusinessRegistryContact.find_by(registry_code: contact.ident)
return false if c.nil? || c.status == 'N'
true
end
def magic_with_contacts(contacts)
CSV.open('invalid_business_contacts.csv', 'w') do |csv|
csv << HEADERS
contacts.each do |contact|
domains = domain_filter(contact)
domains.each do |domain|
registrar = Registrar.find_by(id: domain.registrar_id)
csv << [domain.name, contact.id, contact.name, contact.ident, registrar.name]
end
end
end
end
def domain_filter(contact)
domains = searching_domains(contact)
domains.reject! { |dom| dom.statuses.include? DomainStatus::FORCE_DELETE }
domains
end
def searching_domains(contact)
registrant_domains = Domain.where(registrant_id: contact.id)
tech_domains = collect_tech_domains(contact)
admin_domains = collect_admin_domains(contact)
tech_domains | admin_domains | registrant_domains
end
def collect_admin_domains(contact)
admin_domains = []
admin_contacts = AdminDomainContact.where(contact_id: contact.id)
admin_contacts.each do |c|
admin_domains << Domain.find(c.domain_id)
end
admin_domains
end
def collect_tech_domains(contact)
tech_domains = []
tech_contacts = TechDomainContact.where(contact_id: contact.id)
tech_contacts.each do |c|
tech_domains << Domain.find(c.domain_id)
end
tech_domains
end
| 24.753425 | 85 | 0.73658 |
8752da0f77aa5f1928d4dcce2b687796049b258c | 9,808 | module ActiveMerchant #:nodoc:
module Billing #:nodoc:
class BarclaysEpdqGateway < Gateway
TEST_URL = 'https://secure2.mde.epdq.co.uk:11500'
LIVE_URL = 'https://secure2.epdq.co.uk:11500'
self.supported_countries = ['GB']
self.default_currency = 'GBP'
self.supported_cardtypes = [:visa, :master, :american_express, :maestro, :switch ]
self.money_format = :cents
self.homepage_url = 'http://www.barclaycard.co.uk/business/accepting-payments/epdq-mpi/'
self.display_name = 'Barclays ePDQ'
def initialize(options = {})
requires!(options, :login, :password, :client_id)
@options = options
super
end
def authorize(money, creditcard, options = {})
document = Document.new(self, @options) do
add_order_form(options[:order_id]) do
add_consumer(options) do
add_creditcard(creditcard)
end
add_transaction(:PreAuth, money)
end
end
commit(document)
end
def purchase(money, creditcard, options = {})
# disable fraud checks if this is a repeat order:
if options[:payment_number] && (options[:payment_number] > 1)
no_fraud = true
else
no_fraud = options[:no_fraud]
end
document = Document.new(self, @options, :no_fraud => no_fraud) do
add_order_form(options[:order_id], options[:group_id]) do
add_consumer(options) do
add_creditcard(creditcard)
end
add_transaction(:Auth, money, options)
end
end
commit(document)
end
# authorization is your unique order ID, not the authorization
# code returned by ePDQ
def capture(money, authorization, options = {})
document = Document.new(self, @options) do
add_order_form(authorization) do
add_transaction(:PostAuth, money)
end
end
commit(document)
end
# authorization is your unique order ID, not the authorization
# code returned by ePDQ
def credit(money, creditcard_or_authorization, options = {})
if creditcard_or_authorization.is_a?(String)
deprecated CREDIT_DEPRECATION_MESSAGE
refund(money, creditcard_or_authorization, options)
else
credit_new_order(money, creditcard_or_authorization, options)
end
end
def refund(money, authorization, options = {})
credit_existing_order(money, authorization, options)
end
def void(authorization, options = {})
document = Document.new(self, @options) do
add_order_form(authorization) do
add_transaction(:Void)
end
end
commit(document)
end
private
def credit_new_order(money, creditcard, options)
document = Document.new(self, @options) do
add_order_form do
add_consumer(options) do
add_creditcard(creditcard)
end
add_transaction(:Credit, money)
end
end
commit(document)
end
def credit_existing_order(money, authorization, options)
order_id, _ = authorization.split(":")
document = Document.new(self, @options) do
add_order_form(order_id) do
add_transaction(:Credit, money)
end
end
commit(document)
end
def parse(body)
parser = Parser.new(body)
response = parser.parse
Response.new(response[:success], response[:message], response,
:test => test?,
:authorization => response[:authorization],
:avs_result => response[:avsresponse],
:cvv_result => response[:cvv_result],
:order_id => response[:order_id],
:raw_response => response[:raw_response]
)
end
def commit(document)
url = (test? ? TEST_URL : LIVE_URL)
data = ssl_post(url, document.to_xml)
parse(data)
end
class Parser
def initialize(response)
@response = response
end
def parse
doc = REXML::Document.new(@response)
auth_type = find(doc, "//Transaction/Type").to_s
message = find(doc, "//Message/Text")
if message.blank?
message = find(doc, "//Transaction/CardProcResp/CcReturnMsg")
end
case auth_type
when 'Credit', 'Void'
success = find(doc, "//CcReturnMsg") == "Approved."
else
success = find(doc, "//Transaction/AuthCode").present?
end
{
:success => success,
:message => message,
:transaction_id => find(doc, "//Transaction/Id"),
:avs_result => find(doc, "//Transaction/AvsRespCode"),
:cvv_result => find(doc, "//Transaction/Cvv2Resp"),
:authorization => find(doc, "//OrderFormDoc/Id"),
:raw_response => @response
}
end
def find(doc, xpath)
REXML::XPath.first(doc, xpath).try(:text)
end
end
class Document
attr_reader :type, :xml
PAYMENT_INTERVALS = {
:days => 'D',
:months => 'M'
}
EPDQ_CARD_TYPES = {
:visa => 1,
:master => 2,
:switch => 9,
:maestro => 10,
}
def initialize(gateway, options = {}, document_options = {}, &block)
@gateway = gateway
@options = options
@document_options = document_options
@xml = Builder::XmlMarkup.new(:indent => 2)
build(&block)
end
def to_xml
@xml.target!
end
def build(&block)
xml.instruct!(:xml, :version => '1.0')
xml.EngineDocList do
xml.DocVersion "1.0"
xml.EngineDoc do
xml.ContentType "OrderFormDoc"
xml.User do
xml.Name(@options[:login])
xml.Password(@options[:password])
xml.ClientId({ :DataType => "S32" }, @options[:client_id])
end
xml.Instructions do
if @document_options[:no_fraud]
xml.Pipeline "PaymentNoFraud"
else
xml.Pipeline "Payment"
end
end
instance_eval(&block)
end
end
end
def add_order_form(order_id=nil, group_id=nil, &block)
xml.OrderFormDoc do
xml.Mode 'P'
xml.Id(order_id) if order_id
xml.GroupId(group_id) if group_id
instance_eval(&block)
end
end
def add_consumer(options=nil, &block)
xml.Consumer do
if options
xml.Email(options[:email]) if options[:email]
billing_address = options[:billing_address] || options[:address]
if billing_address
xml.BillTo do
xml.Location do
xml.Address do
xml.Street1 billing_address[:address1]
xml.Street2 billing_address[:address2]
xml.City billing_address[:city]
xml.StateProv billing_address[:state]
xml.PostalCode billing_address[:zip]
xml.Country billing_address[:country_code]
end
end
end
end
end
instance_eval(&block)
end
end
def add_creditcard(creditcard)
xml.PaymentMech do
xml.CreditCard do
xml.Type({ :DataType => 'S32' }, EPDQ_CARD_TYPES[creditcard.brand.to_sym])
xml.Number creditcard.number
xml.Expires({ :DataType => 'ExpirationDate', :Locale => 826 }, format_expiry_date(creditcard))
if creditcard.verification_value.present?
xml.Cvv2Indicator 1
xml.Cvv2Val creditcard.verification_value
else
xml.Cvv2Indicator 5
end
xml.IssueNum(creditcard.issue_number) if creditcard.issue_number.present?
end
end
end
def add_transaction(auth_type, amount = nil, options = {})
@auth_type = auth_type
xml.Transaction do
xml.Type @auth_type.to_s
if options[:payment_number] && options[:payment_number] > 1
xml.CardholderPresentCode({ :DataType => 'S32' }, 8)
else
xml.CardholderPresentCode({ :DataType => 'S32' }, 7)
end
if options[:payment_number]
xml.PaymentNumber({ :DataType => 'S32' }, options[:payment_number])
end
if options[:total_payments]
xml.TotalNumberPayments({ :DataType => 'S32' }, options[:total_payments])
end
if amount
xml.CurrentTotals do
xml.Totals do
xml.Total({ :DataType => 'Money', :Currency => 826 }, amount)
end
end
end
end
end
# date must be formatted MM/YY
def format_expiry_date(creditcard)
month_str = "%02d" % creditcard.month
if match = creditcard.year.to_s.match(/^\d{2}(\d{2})$/)
year_str = "%02d" % match[1].to_i
else
year_str = "%02d" % creditcard.year
end
"#{month_str}/#{year_str}"
end
end
end
end
end
| 31.7411 | 108 | 0.533442 |
d5b1a0b2ce18ff9db2ea4a6fcb5bedc35b412331 | 1,660 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Storage::Mgmt::V2017_10_01
module Models
#
# The response from the ListKeys operation.
#
class StorageAccountListKeysResult
include MsRestAzure
# @return [Array<StorageAccountKey>] Gets the list of storage account
# keys and their properties for the specified storage account.
attr_accessor :keys
#
# Mapper for StorageAccountListKeysResult class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'StorageAccountListKeysResult',
type: {
name: 'Composite',
class_name: 'StorageAccountListKeysResult',
model_properties: {
keys: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'keys',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StorageAccountKeyElementType',
type: {
name: 'Composite',
class_name: 'StorageAccountKey'
}
}
}
}
}
}
}
end
end
end
end
| 28.62069 | 75 | 0.528916 |
01a1397bc6a16030ab87174de81898d7a1f4a3f4 | 154 | module EventsHelper
def link_to_map title, address
link_to h(title), "http://maps.google.com/maps?q=#{url_encode address.gsub(/\s/,' ')}"
end
end
| 25.666667 | 90 | 0.694805 |
5d84f4c934195710bdba1817ad4b7a1a539c66e0 | 1,965 | class UsersController < ApplicationController
# GET /users
# GET /users.json
def index
ActiveSupport::Deprecation.warn("from index page")
@users = User.all
respond_to do |format|
format.html # index.html.erb
format.json { render json: @users }
end
end
# GET /users/1
# GET /users/1.json
def show
ActiveSupport::Deprecation.warn("from show page")
@user = User.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @user }
end
end
# GET /users/new
# GET /users/new.json
def new
@user = User.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @user }
end
end
# GET /users/1/edit
def edit
@user = User.find(params[:id])
end
# POST /users
# POST /users.json
def create
@user = User.new(params[:user])
respond_to do |format|
if @user.save
format.html { redirect_to @user, notice: 'User was successfully created.' }
format.json { render json: @user, status: :created, location: @user }
else
format.html { render action: "new" }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
end
# PUT /users/1
# PUT /users/1.json
def update
@user = User.find(params[:id])
respond_to do |format|
if @user.update_attributes(params[:user])
format.html { redirect_to @user, notice: 'User was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
end
# DELETE /users/1
# DELETE /users/1.json
def destroy
@user = User.find(params[:id])
@user.destroy
respond_to do |format|
format.html { redirect_to users_url }
format.json { head :no_content }
end
end
end
| 22.329545 | 83 | 0.617303 |
e21299705a7fb985ef222e432a82c324291146da | 15,553 | require "rubygems"
require "bundler"
require "shellwords"
require "set"
module GemfileUtil
#
# Adds `override: true`, which allows your statement to override any other
# gem statement about the same gem in the Gemfile.
#
def gem(name, *args)
options = args[-1].is_a?(Hash) ? args[-1] : {}
# Unless we're finished with everything, ignore gems that are being overridden
unless overridden_gems == :finished
# If it's a path or override gem, it overrides whatever else is there.
if options[:path] || options[:override]
options.delete(:override)
warn_if_replacing(name, overridden_gems[name], args)
overridden_gems[name] = args
return
# If there's an override gem, and we're *not* an override gem, don't do anything
elsif overridden_gems[name]
warn_if_replacing(name, args, overridden_gems[name])
return
end
end
# Otherwise, add the gem normally
super
rescue
puts $!.backtrace
raise
end
def overridden_gems
@overridden_gems ||= {}
end
#
# Just before we finish the Gemfile, finish up the override gems
#
def to_definition(*args)
complete_overrides
super
end
def complete_overrides
to_override = overridden_gems
unless to_override == :finished
@overridden_gems = :finished
to_override.each do |name, args|
gem name, *args
end
end
end
#
# Include all gems in the locked gemfile.
#
# @param gemfile_path Path to the Gemfile to load (relative to your Gemfile)
# @param lockfile_path Path to the Gemfile to load (relative to your Gemfile).
# Defaults to <gemfile_path>.lock.
# @param groups A list of groups to include (whitelist). If not passed (or set
# to nil), all gems will be selected.
# @param without_groups A list of groups to ignore. Gems will be excluded from
# the results if all groups they belong to are ignored. This matches
# bundler's `without` behavior.
# @param gems A list of gems to include above and beyond the given groups.
# Gems in this list must be explicitly included in the Gemfile
# with a `gem "gem_name", ...` line or they will be silently
# ignored.
# @param copy_groups Whether to copy the groups over from the old lockfile to
# the new. Use this when the new lockfile has the same convention for
# groups as the old. Defaults to `false`.
#
def include_locked_gemfile(gemfile_path, lockfile_path = "#{gemfile_path}.lock", groups: nil, without_groups: nil, gems: [], copy_groups: false)
# Parse the desired lockfile
gemfile_path = Pathname.new(gemfile_path).expand_path(Bundler.default_gemfile.dirname).realpath
lockfile_path = Pathname.new(lockfile_path).expand_path(Bundler.default_gemfile.dirname).realpath
# Calculate relative_to
relative_to = Bundler.default_gemfile.dirname.realpath
# Call out to create-override-gemfile to read the Gemfile+Gemfile.lock (bundler does not work well if you do two things in one process)
create_override_gemfile_bin = File.expand_path("../bin/create-override-gemfile", __FILE__)
arguments = [
"--gemfile", gemfile_path,
"--lockfile", lockfile_path,
"--override"
]
arguments += [ "--relative-to", relative_to ] if relative_to != "."
arguments += Array(groups).flat_map { |group| [ "--group", group ] }
arguments += Array(without_groups).flat_map { |without| [ "--without", without ] }
arguments += Array(gems).flat_map { |name| [ "--gem", name ] }
arguments << "--copy-groups" if copy_groups
cmd = Shellwords.join([ Gem.ruby, "-S", create_override_gemfile_bin, *arguments ])
output = nil
Bundler.ui.info("> #{cmd}")
Bundler.with_clean_env do
output = `#{cmd}`
end
instance_eval(output, cmd, 1)
end
#
# Include all gems in the locked gemfile.
#
# @param current_gemfile The Gemfile you are currently loading (`self`).
# @param gemfile_path Path to the Gemfile to load (relative to your Gemfile)
# @param lockfile_path Path to the Gemfile to load (relative to your Gemfile).
# Defaults to <gemfile_path>.lock.
# @param groups A list of groups to include (whitelist). If not passed (or set
# to nil), all gems will be selected.
# @param without_groups A list of groups to ignore. Gems will be excluded from
# the results if all groups they belong to are ignored. This matches
# bundler's `without` behavior.
# @param gems A list of gems to include above and beyond the given groups.
# Gems in this list must be explicitly included in the Gemfile
# with a `gem "gem_name", ...` line or they will be silently
# ignored.
# @param copy_groups Whether to copy the groups over from the old lockfile to
# the new. Use this when the new lockfile has the same convention for
# groups as the old. Defaults to `false`.
#
def self.include_locked_gemfile(current_gemfile, gemfile_path, lockfile_path = "#{gemfile_path}.lock", groups: nil, without_groups: nil, gems: [], copy_groups: false)
current_gemfile.instance_eval do
extend GemfileUtil
include_locked_gemfile(gemfile_path, lockfile_path, groups: groups, without_groups: without_groups, gems: gems, copy_groups: copy_groups)
end
end
def warn_if_replacing(name, old_args, new_args)
return if !old_args || !new_args
if args_to_dep(name, *old_args) =~ args_to_dep(name, *new_args)
Bundler.ui.debug "Replaced Gemfile dependency #{name} (#{old_args}) with (#{new_args})"
else
Bundler.ui.warn "Replaced Gemfile dependency #{name} (#{old_args}) with (#{new_args})"
end
end
def args_to_dep(name, *version, **options)
version = [">= 0"] if version.empty?
Bundler::Dependency.new(name, version, options)
end
#
# Reads a bundle, including a gemfile and lockfile.
#
# Does no validation, does not update the lockfile or its gems in any way.
#
class Bundle
#
# Parse the given gemfile/lockfile pair.
#
# @return [Bundle] The parsed bundle.
#
def self.parse(gemfile_path, lockfile_path = "#{gemfile_path}.lock")
result = new(gemfile_path, lockfile_path)
result.gems
result
end
#
# Create a new Bundle to parse the given gemfile/lockfile pair.
#
def initialize(gemfile_path, lockfile_path = "#{gemfile_path}.lock")
@gemfile_path = gemfile_path
@lockfile_path = lockfile_path
end
#
# The path to the Gemfile
#
attr_reader :gemfile_path
#
# The path to the Lockfile
#
attr_reader :lockfile_path
#
# The list of gems.
#
# @return [Hash<String, Hash>] The resulting gems, where key = gem_name, and the
# hash has:
# - version: version of the gem.
# - source info (:source/:git/:ref/:path) from the lockfile
# - dependencies: A list of gem names this gem has a runtime
# dependency on. Dependencies are transitive: if A depends on B,
# and B depends on C, then A has C in its :dependencies list.
# - development_dependencies: - A list of gem names this gem has a
# development dependency on. Dependencies are transitive: if A
# depends on B, and B depends on C, then A has C in its
# :development_dependencies list. development dependencies *include*
# runtime dependencies.
# - groups: The list of groups (symbols) this gem is in. Groups
# are transitive: if A has a runtime dependency on B, and A is
# in group X, then B is also in group X.
# - declared_groups: The list of groups (symbols) this gem was
# declared in the Gemfile.
#
def gems
@gems ||= begin
gems = locks.dup
gems.each do |name, g|
if gem_declarations.has_key?(name)
g[:declared_groups] = gem_declarations[name][:groups]
else
g[:declared_groups] = []
end
g[:groups] = g[:declared_groups].dup
end
# Transitivize groups (since dependencies are already transitive, this is easy)
gems.each do |name, g|
g[:dependencies].each do |dep|
gems[dep][:groups] |= gems[name][:declared_groups].dup
end
end
gems
end
end
#
# Get the gems (and their deps) in the given group.
#
# @param groups A list of groups to include (whitelist). If not passed (or set
# to nil), all gems will be selected.
# @param without_groups A list of groups to ignore. Gems will be excluded from
# the results if all groups they belong to are ignored.
# This matches bundler's `without` behavior.
# @param gems A list of gems to include regardless of what groups are included.
#
# @return Hash[String, Hash] The resulting gems, where key = gem_name, and the
# hash has:
# - version: version of the gem.
# - source info (:source/:git/:ref/:path) from the lockfile
# - dependencies: A list of gem names this gem has a runtime
# dependency on. Dependencies are transitive: if A depends on B,
# and B depends on C, then A has C in its :dependencies list.
# - development_dependencies: - A list of gem names this gem has a
# development dependency on. Dependencies are transitive: if A
# depends on B, and B depends on C, then A has C in its
# :development_dependencies list. development dependencies
# *include* runtime dependencies.
# - groups: The list of groups (symbols) this gem is in. Groups
# are transitive: if A has a runtime dependency on B, and A is
# in group X, then B is also in group X.
# - declared_groups: The list of groups (symbols) this gem was
# declared in the Gemfile.
#
def select_gems(groups: nil, without_groups: nil)
# First, select the gems that match
result = {}
gems.each do |name, g|
dep_groups = g[:declared_groups] - [ :only_a_runtime_dependency_of_other_gems ]
dep_groups = dep_groups & groups if groups
dep_groups = dep_groups - without_groups if without_groups
if dep_groups.any?
result[name] ||= g
g[:dependencies].each do |dep|
result[dep] ||= gems[dep]
end
end
end
result
end
#
# Get all locks from the given lockfile.
#
# @return Hash[String, Hash] The resulting gems, where key = gem_name, and the
# hash has:
# - version: version of the gem.
# - source info (:source/:git/:ref/:path)
# - dependencies: A list of gem names this gem has a runtime
# dependency on. Dependencies are transitive: if A depends on B,
# and B depends on C, then A has C in its :dependencies list.
# - development_dependencies: - A list of gem names this gem has a
# development dependency on. Dependencies are transitive: if A
# depends on B, and B depends on C, then A has C in its
# :development_dependencies list. development dependencies *include*
# runtime dependencies.
#
def locks
@locks ||= begin
# Grab all the specs from the lockfile
locks = {}
parsed_lockfile = Bundler::LockfileParser.new(IO.read(lockfile_path))
parsed_lockfile.specs.each do |spec|
# Never include bundler, it can't be bundled and doesn't put itself in
# the lockfile correctly anyway
next if spec.name == "bundler"
# Only the platform-specific locks for now (TODO make it possible to emit all locks)
next if spec.platform && spec.platform != Gem::Platform::RUBY
lock = lock_source_metadata(spec)
lock[:version] = spec.version.to_s
runtime = spec.dependencies.select { |dep| dep.type == :runtime }
lock[:dependencies] = Set.new(runtime.map { |dep| dep.name })
lock[:development_dependencies] = Set.new(spec.dependencies.map { |dep| dep.name })
lock[:dependencies].delete("bundler")
lock[:development_dependencies].delete("bundler")
locks[spec.name] = lock
end
# Transitivize the deps.
locks.each do |name, lock|
# Not all deps were brought over (platform-specific ones) so weed them out
lock[:dependencies] &= locks.keys
lock[:development_dependencies] &= locks.keys
lock[:dependencies] = transitive_dependencies(locks, name, :dependencies)
lock[:development_dependencies] = transitive_dependencies(locks, name, :development_dependencies)
end
locks
end
end
#
# Get all desired gems, sans dependencies, from the gemfile.
#
# @param gemfile Path to the Gemfile to load
#
# @return Hash<String, Hash> An array of hashes where key = gem name and value
# has :groups (an array of symbols representing the groups the gem
# is in). :groups are not transitive, since we don't know the
# dependency tree yet.
#
def gem_declarations
@gem_declarations ||= begin
Bundler.with_clean_env do
# Set BUNDLE_GEMFILE to the new gemfile temporarily so all bundler's things work
# This works around some issues in bundler 1.11.2.
ENV["BUNDLE_GEMFILE"] = gemfile_path
parsed_gemfile = Bundler::Dsl.new
parsed_gemfile.eval_gemfile(gemfile_path)
parsed_gemfile.complete_overrides if parsed_gemfile.respond_to?(:complete_overrides)
result = {}
parsed_gemfile.dependencies.each do |dep|
groups = dep.groups.empty? ? [:default] : dep.groups
result[dep.name] = { groups: groups, platforms: dep.platforms }
end
result
end
end
end
private
#
# Given a bunch of locks (name -> { dependencies: [name,name] }) and a
# dependency name, add its dependencies to the result transitively.
#
def transitive_dependencies(locks, name, dep_key, result = Set.new)
locks[name][dep_key].each do |dep|
# Only ever add a dep once, so we don't infinitely recurse
if result.add?(dep)
transitive_dependencies(locks, dep, dep_key, result)
end
end
result
end
#
# Get source and version metadata for the given Bundler spec (coming from a lockfile).
#
# @return Hash { version: <version>, git: <git>, path: <path>, source: <source>, ref: <ref> }
#
def lock_source_metadata(spec)
# Copy source information from included Gemfile
result = {}
case spec.source
when Bundler::Source::Rubygems
result[:source] = spec.source.remotes.first.to_s
when Bundler::Source::Git
result[:git] = spec.source.uri.to_s
result[:ref] = spec.source.revision
when Bundler::Source::Path
result[:path] = spec.source.path.to_s
else
raise "Unknown source #{spec.source} for gem #{spec.name}"
end
result
end
end
end
| 39.777494 | 168 | 0.630168 |
28232d8c7697db1302e5344292d70d4d99f30f99 | 134 | # frozen_string_literal: true
FactoryBot.define do
factory :architecture do
platform
name { Faker::Lorem.word }
end
end
| 13.4 | 30 | 0.716418 |
1c96e6769acc76914ca55d17b50b82eab7bf9b18 | 11,003 | =begin
* Puppet Module : Provder: netdev
* Author : Jeremy Schulman
* File : junos_l2_interface.rb
* Version : 2012-11-07
* Platform : EX | QFX | SRX
* Description :
*
* This file contains the Junos specific code to control basic
* Layer 2 interface configuration on platforms that support
* the [edit vlans] hierarchy. L2 interfaces are assumed
* to be at [edit interface <name> unit 0 family ethernet-switching]
*
* Copyright (c) 2012 Juniper Networks. All Rights Reserved.
*
* YOU MUST ACCEPT THE TERMS OF THIS DISCLAIMER TO USE THIS SOFTWARE,
* IN ADDITION TO ANY OTHER LICENSES AND TERMS REQUIRED BY JUNIPER NETWORKS.
*
* JUNIPER IS WILLING TO MAKE THE INCLUDED SCRIPTING SOFTWARE AVAILABLE TO YOU
* ONLY UPON THE CONDITION THAT YOU ACCEPT ALL OF THE TERMS CONTAINED IN THIS
* DISCLAIMER. PLEASE READ THE TERMS AND CONDITIONS OF THIS DISCLAIMER
* CAREFULLY.
*
* THE SOFTWARE CONTAINED IN THIS FILE IS PROVIDED "AS IS." JUNIPER MAKES NO
* WARRANTIES OF ANY KIND WHATSOEVER WITH RESPECT TO SOFTWARE. ALL EXPRESS OR
* IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING ANY WARRANTY
* OF NON-INFRINGEMENT OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR A
* PARTICULAR PURPOSE, ARE HEREBY DISCLAIMED AND EXCLUDED TO THE EXTENT
* ALLOWED BY APPLICABLE LAW.
*
* IN NO EVENT WILL JUNIPER BE LIABLE FOR ANY DIRECT OR INDIRECT DAMAGES,
* INCLUDING BUT NOT LIMITED TO LOST REVENUE, PROFIT OR DATA, OR
* FOR DIRECT, SPECIAL, INDIRECT, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES
* HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY ARISING OUT OF THE
* USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF JUNIPER HAS BEEN ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGES.
=end
require 'puppet/provider/junos/junos_parent'
class Puppet::Provider::Junos::L2Interface < Puppet::Provider::Junos
### ---------------------------------------------------------------
### triggered from Provider #exists?
### ---------------------------------------------------------------
def netdev_res_exists?
self.class.init_class_vars
return false unless (ifl_config = init_resource)
@ndev_res[:description] = ifl_config.xpath('description').text.chomp
fam_eth_cfg = ifl_config.xpath('family/ethernet-switching')
return false if fam_eth_cfg.empty?
netdev_retrieve_fam_eth_info( fam_eth_cfg )
return true
end
### ---------------------------------------------------------------
### called from #netdev_exists?
### ---------------------------------------------------------------
def init_resource
@ndev_res ||= NetdevJunos::Resource.new( self, "interfaces" )
@ndev_res[:description] = ''
@ndev_res[:vlan_tagging] = :disable
@ndev_res[:untagged_vlan] = ''
@ndev_res[:tagged_vlans] = []
resource[:description] ||= default_description
resource[:tagged_vlans] = resource[:tagged_vlans].to_a || []
resource[:untagged_vlan] ||= '' # if not set in manifest, it is nil
resource[:vlan_tagging] = :enable unless resource[:tagged_vlans].empty?
ndev_config = @ndev_res.getconfig
return false unless (ifl_config = ndev_config.xpath('//interface/unit')[0])
@ndev_res.set_active_state( ifl_config )
return ifl_config
end
def default_description
"Puppet created netdev_l2_interface: #{resource[:name]}"
end
def netdev_retrieve_fam_eth_info( fam_eth_cfg )
@ndev_res[:vlan_tagging] = fam_eth_cfg.xpath('port-mode').text.chomp == 'trunk' ? :enable : :disable
# --- access port
if @ndev_res[:vlan_tagging] == :disable
@ndev_res[:untagged_vlan] = fam_eth_cfg.xpath('vlan/members').text.chomp || ''
return
end
# --- trunk port
@ndev_res[:untagged_vlan] = fam_eth_cfg.xpath('native-vlan-id').text.chomp
@ndev_res[:tagged_vlans] = fam_eth_cfg.xpath('vlan/members').collect { |v| v.text.chomp }
end
def is_trunk?
@ndev_res[:vlan_tagging] == :enable
end
def should_trunk?
resource[:vlan_tagging] == :enable
end
def mode_changed?
@ndev_res[:name].nil? or (resource[:vlan_tagging] != @ndev_res[:vlan_tagging])
end
##### ------------------------------------------------------------
##### XML Resource Building
##### ------------------------------------------------------------
# override default 'top' method to create the unit sub-interface
def netdev_resxml_top( xml )
xml.interface {
xml.name resource[:name]
xml.unit {
xml.name '0'
return xml
}
}
end
# override default 'edit' method to place 'dot' inside
# the family ethernet-switching stanza
def netdev_resxml_edit( xml )
xml.family {
xml.send(:'ethernet-switching') {
return xml
}
}
end
###
### :description
###
def xml_change_description( xml )
par = xml.instance_variable_get(:@parent)
Nokogiri::XML::Builder.with(par.at_xpath('ancestor::unit')) {
|dot|
dot.description resource[:description]
}
end
####
#### :vlan_tagging
####
def xml_change_vlan_tagging( xml )
port_mode = should_trunk? ? 'trunk' : 'access'
xml.send(:'port-mode', port_mode )
# when the vlan_tagging value changes then this method
# will trigger updates to the untagged_vlan and tagged_vlans
# resource values as well.
upd_untagged_vlan( xml )
upd_tagged_vlans( xml )
end
### ---------------------------------------------------------------
### XML:tagged_vlans
### ---------------------------------------------------------------
def xml_change_tagged_vlans( xml )
return if mode_changed?
upd_tagged_vlans( xml )
end
def upd_tagged_vlans( xml )
return unless should_trunk?
should = resource[:tagged_vlans] || []
if should.empty?
xml.vlan Netconf::JunosConfig::DELETE
return
end
has = @ndev_res[:tagged_vlans] || []
has = has.map(&:to_s)
should = should.map(&:to_s)
del = has - should
add = should - has
if add or del
Puppet.debug "#{resource[:name]}: Adding VLANS: [#{add.join(',')}]" unless add.empty?
Puppet.debug "#{resource[:name]}: Deleting VLANS: [#{del.join(',')}]" unless del.empty?
xml.vlan {
del.each { |v| xml.members v, Netconf::JunosConfig::DELETE }
add.each { |v| xml.members v }
}
end
end
### ---------------------------------------------------------------
### XML:untagged_vlan
### ---------------------------------------------------------------
def xml_change_untagged_vlan( xml )
return if mode_changed?
upd_untagged_vlan( xml )
end
def upd_untagged_vlan( xml )
self.class.change_untagged_vlan( self, xml )
end
class << self
# creating some class definitions ...
# this is a bit complicated because we need to handle port-mode
# change transitions; basically dealing with the fact that
# trunk ports use 'native-vlan-id' and access ports have a
# vlan member definition; i.e. they don't use native-vlan-id, ugh.
# Rather than doing all this logic as if/then/else statements,
# I've opted to using a proc jump-table technique. Lessons
# learned from lots of embedded systems programming :-)
def initcvar_jmptbl_untagged_vlan
# auto-hash table
hash = Hash.new(&(p=lambda{|h,k| h[k] = Hash.new(&p)}))
# ------------------------------------------------------------------
# - jump table for handling various untagged vlan change use-cases
# ------------------------------------------------------------------
# There are three criteria for selection:
# | is_trunk | will_trunk | no_untg |
# ------------------------------------------------------------------
# - will not have untagged vlan
hash[false][false][true] = self.method(:ac_ac_nountg)
hash[false][true][true] = self.method(:ac_tr_nountg)
hash[true][false][true] = self.method(:tr_ac_nountg)
hash[true][true][true] = self.method(:tr_tr_nountg)
# - will have untagged vlan
hash[false][false][false] = self.method(:ac_ac_untg)
hash[false][true][false] = self.method(:ac_tr_untg)
hash[true][false][false] = self.method(:tr_ac_untg)
hash[true][true][false] = self.method(:tr_tr_untg)
hash
end
### initialize the jump table once as a class variable
### this is called from #init_resource
def init_class_vars
@@untgv_jmptbl ||= initcvar_jmptbl_untagged_vlan
end
### invoke the correct method from the jump table
### based on the three criteria to select the action
def change_untagged_vlan( this, xml )
proc = @@untgv_jmptbl[this.is_trunk?][this.should_trunk?][this.resource[:untagged_vlan].empty?]
proc.call( this, xml )
end
### -------------------------------------------------------------
### The following are all the change transition functions for
### each of the use-cases
### -------------------------------------------------------------
def ac_ac_nountg( this, xml )
xml.vlan Netconf::JunosConfig::DELETE
end
def ac_tr_nountg( this, xml )
unless (untg_vlan = this.ndev_res[:tagged_vlans]).empty?
xml.vlan {
xml.members untg_vlan, Netconf::JunosConfig::DELETE
}
end
end
def tr_ac_nountg( this, xml )
xml.send :'native-vlan-id', Netconf::JunosConfig::DELETE
xml.vlan( Netconf::JunosConfig::DELETE ) if this.ndev_res[:tagged_vlans]
end
def tr_tr_nountg( this, xml )
xml.send :'native-vlan-id', Netconf::JunosConfig::DELETE
end
def ac_ac_untg( this, xml )
xml.vlan( Netconf::JunosConfig::REPLACE ) {
xml.members this.resource[:untagged_vlan]
}
end
def ac_tr_untg( this, xml )
was_untg_vlan = this.ndev_res[:untagged_vlan]
xml.vlan( Netconf::JunosConfig::REPLACE ) {
xml.members was_untg_vlan, Netconf::JunosConfig::DELETE if was_untg_vlan
}
xml.send :'native-vlan-id', this.resource[:untagged_vlan]
end
def tr_ac_untg( this, xml )
xml.send :'native-vlan-id', Netconf::JunosConfig::DELETE
xml.vlan( Netconf::JunosConfig::REPLACE ) {
xml.members this.resource[:untagged_vlan]
}
end
def tr_tr_untg( this, xml )
xml.send :'native-vlan-id', this.resource[:untagged_vlan]
end
end # class methods for changing untagged_vlan
end
| 32.553254 | 104 | 0.574389 |
f863c0f999f6ec15e10f1c0c90fad870a8ca02c0 | 365 | # encoding: utf-8
require 'capybara/rspec'
require 'capybara/rails'
module TestServer
module SpecHelper
module Capybara
include ::Capybara::DSL
end
end
end
RSpec.configure do |c|
c.include TestServer::SpecHelper::Capybara
c.before(:all) do
Capybara.default_driver = :rack_test
Capybara.javascript_driver = :poltergeist
end
end
| 17.380952 | 45 | 0.720548 |
3305b1bd4260a4c58c26031ad58a7cfbd645f5a9 | 1,863 | require 'rails_helper'
describe Users::OmniauthCallbacksController, type: :controller do
describe '#twitter' do
let(:twitter_auth_hash) { OmniAuth.config.mock_auth[:twitter] }
let(:github_auth_hash) { OmniAuth.config.mock_auth[:github] }
let(:user) { build(:user) }
let(:invitation) { build(:invitation, slug: "abc123", email: '[email protected]') }
let(:other_invitation) { build(:invitation, slug: "def456", email: '[email protected]') }
before :each do
session[:invitation_slug] = invitation.slug
request.env['omniauth.auth'] = twitter_auth_hash
request.env['devise.mapping'] = Devise.mappings[:user]
allow(controller).to receive(:current_user).and_return(User.last)
end
it "allows twitter login" do
get :twitter
expect(response).to redirect_to(edit_profile_url)
end
it "adds any pending invitations to the new user record" do
pending "This is broken and hard to read intended test"
allow(User).to receive(:from_omniauth).with(twitter_auth_hash).and_return(user)
allow(Invitation).to receive(:find_by).and_return(invitation)
allow(Invitation).to receive(:where).and_return([other_invitation])
expect(other_invitation).to receive(:update_column).and_return(true)
get :twitter
expect(response).to redirect_to(events_url)
end
end
describe "GET #new" do
let(:github_auth_hash) { OmniAuth.config.mock_auth[:github] }
before :each do
request.env['omniauth.auth'] = github_auth_hash
request.env['devise.mapping'] = Devise.mappings[:user]
end
it "redirects a user if they are currently logged in" do
organizer = create(:organizer)
sign_in(organizer)
get :github
expect(response).to redirect_to(events_url)
expect(controller.current_user).to eq(organizer)
end
end
end
| 32.684211 | 91 | 0.697262 |
0857c784b416cfe7b1ae4acd72dad32e6fe0428d | 967 | # This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause this
# file to always be loaded, without a need to explicitly require it in any files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, make a
# separate helper file that requires this one and then use it only in the specs
# that actually need it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure(&:raise_errors_for_deprecations!)
| 53.722222 | 92 | 0.770424 |
26823f96c59400365354f33b6720b5535074d234 | 23,763 | # frozen_string_literal: true
require 'spec_helper'
require 'bolt_spec/bolt_server'
require 'bolt_spec/conn'
require 'bolt_spec/file_cache'
require 'bolt_server/config'
require 'bolt_server/transport_app'
require 'json'
require 'rack/test'
require 'puppet/environments'
require 'digest'
describe "BoltServer::TransportApp" do
include BoltSpec::BoltServer
include BoltSpec::Conn
include BoltSpec::FileCache
include Rack::Test::Methods
def app
moduledir = File.join(__dir__, '..', 'fixtures', 'modules')
mock_file_cache(moduledir)
config = BoltServer::Config.new(default_config)
BoltServer::TransportApp.new(config)
end
def file_data(file)
{ 'uri' => {
'path' => "/tasks/#{File.basename(file)}",
'params' => { 'param' => 'val' }
},
'filename' => File.basename(file),
'sha256' => Digest::SHA256.file(file),
'size' => File.size(file) }
end
it 'responds ok' do
get '/'
expect(last_response).to be_ok
expect(last_response.status).to eq(200)
end
context 'when raising errors' do
it 'returns non-html 404 when the endpoint is not found' do
post '/ssh/run_tasksss', JSON.generate({}), 'CONTENT_TYPE' => 'text/json'
expect(last_response).not_to be_ok
expect(last_response.status).to eq(404)
result = JSON.parse(last_response.body)
expect(result['msg']).to eq("Could not find route /ssh/run_tasksss")
expect(result['kind']).to eq("boltserver/not-found")
end
it 'returns non-html 500 when the request times out' do
get '/500_error'
expect(last_response).not_to be_ok
expect(last_response.status).to eq(500)
result = JSON.parse(last_response.body)
expect(result['msg']).to eq('500: Unknown error: Unexpected error')
expect(result['kind']).to eq('boltserver/server-error')
end
end
describe 'transport routes' do
def mock_plan_info(full_name)
module_name, _plan_name = full_name.split('::', 2)
{
'name' => full_name,
'description' => 'foo',
'parameters' => {},
'module' => "/opt/puppetlabs/puppet/modules/#{module_name}"
}
end
let(:action) { 'run_task' }
let(:result) { double(Bolt::Result, to_data: { 'status': 'test_status' }) }
before(:each) do
allow_any_instance_of(BoltServer::TransportApp)
.to receive(action.to_sym).and_return(
Bolt::ResultSet.new([result])
)
end
describe '/plans/:module_name/:plan_name' do
let(:fake_pal) { instance_double('BoltServer::PE::PAL') }
context 'with module_name::plan_name' do
let(:path) { '/plans/foo/bar?environment=production' }
let(:plan_name) { 'foo::bar' }
let(:metadata) { mock_plan_info(plan_name) }
let(:expected_response) {
{
'name' => metadata['name'],
'description' => metadata['description'],
'parameters' => metadata['parameters']
}
}
it '/plans/:module_name/:plan_name handles module::plan_name' do
expect(BoltServer::PE::PAL).to receive(:new).and_return(fake_pal)
expect(fake_pal).to receive(:get_plan_info).with(plan_name).and_return(metadata)
get(path)
resp = JSON.parse(last_response.body)
expect(resp).to eq(expected_response)
end
end
context 'with module_name' do
let(:init_plan) { '/plans/foo/init?environment=production' }
let(:plan_name) { 'foo' }
let(:metadata) { mock_plan_info(plan_name) }
let(:expected_response) {
{
'name' => metadata['name'],
'description' => metadata['description'],
'parameters' => metadata['parameters']
}
}
it '/plans/:module_name/:plan_name handles plan name = module name (init.pp) plan' do
expect(BoltServer::PE::PAL).to receive(:new).and_return(fake_pal)
expect(fake_pal).to receive(:get_plan_info).with(plan_name).and_return(metadata)
get(init_plan)
resp = JSON.parse(last_response.body)
expect(resp).to eq(expected_response)
end
end
context 'with non-existant plan' do
let(:path) { '/plans/foo/bar?environment=production' }
it 'returns 400 if an unknown plan error is thrown' do
expect(BoltServer::PE::PAL).to receive(:new).and_return(fake_pal)
expect(fake_pal).to receive(:get_plan_info).with('foo::bar').and_raise(Bolt::Error.unknown_plan('foo::bar'))
get(path)
expect(last_response.status).to eq(400)
end
end
end
describe '/plans' do
let(:fake_pal) { instance_double('BoltServer::PE::PAL') }
describe 'when metadata=false' do
let(:path) { "/plans?environment=production" }
it 'returns just the list of plan names when metadata=false' do
expect(BoltServer::PE::PAL).to receive(:new).and_return(fake_pal)
expect(fake_pal).to receive(:list_plans).and_return([['abc'], ['def']])
get(path)
metadata = JSON.parse(last_response.body)
expect(metadata).to eq([{ 'name' => 'abc' }, { 'name' => 'def' }])
end
it 'returns 400 if an environment not found error is thrown' do
# Actually creating the EnvironmentNotFound error with puppet is difficult to do without
# puppet actually loaded with settings, so just stub out the error type
stub_const("Puppet::Environments::EnvironmentNotFound", StandardError)
expect(BoltServer::PE::PAL).to receive(:new).and_raise(Puppet::Environments::EnvironmentNotFound)
get(path)
expect(last_response.status).to eq(400)
end
end
describe 'when metadata=true' do
let(:path) { '/plans?environment=production&metadata=true' }
let(:plan_name) { 'abc' }
let(:metadata) { mock_plan_info(plan_name) }
let(:expected_response) {
{
metadata['name'] => {
'name' => metadata['name'],
'description' => metadata['description'],
'parameters' => metadata['parameters']
}
}
}
it 'returns all metadata for each plan when metadata=true' do
expect(BoltServer::PE::PAL).to receive(:new).and_return(fake_pal)
expect(fake_pal).to receive(:list_plans).and_return([plan_name])
expect(fake_pal).to receive(:get_plan_info).with(plan_name).and_return(metadata)
get(path)
resp = JSON.parse(last_response.body)
expect(resp).to eq(expected_response)
end
end
end
describe '/tasks' do
let(:fake_pal) { instance_double('BoltServer::PE::PAL') }
let(:path) { "/tasks?environment=production" }
it 'returns just the list of plan names when metadata=false' do
expect(BoltServer::PE::PAL).to receive(:new).and_return(fake_pal)
expect(fake_pal).to receive(:list_tasks).and_return([%w[abc abc_description], %w[def def_description]])
get(path)
metadata = JSON.parse(last_response.body)
expect(metadata).to eq([{ 'name' => 'abc' }, { 'name' => 'def' }])
end
it 'returns 400 if an environment not found error is thrown' do
stub_const("Puppet::Environments::EnvironmentNotFound", StandardError)
expect(BoltServer::PE::PAL).to receive(:new).and_raise(Puppet::Environments::EnvironmentNotFound)
get(path)
expect(last_response.status).to eq(400)
end
end
describe '/tasks/:module_name/:task_name' do
let(:fake_pal) { instance_double('BoltServer::PE::PAL') }
context 'with module_name::task_name' do
let(:path) { '/tasks/foo/bar?environment=production' }
let(:mock_task) {
Bolt::Task.new(task_name, {}, [{ 'name' => 'bar.rb', 'path' => File.expand_path(__FILE__) }])
}
let(:task_name) { 'foo::bar' }
let(:expected_response) {
{
"metadata" => {},
"name" => "foo::bar",
"files" => [
{
"filename" => "bar.rb",
"sha256" => Digest::SHA256.hexdigest(File.read(__FILE__)),
"size_bytes" => File.size(__FILE__),
"uri" => {
"path" => "/puppet/v3/file_content/tasks/foo/bar.rb",
"params" => { "environment" => "production" }
}
}
]
}
}
it '/tasks/:module_name/:task_name handles module::task_name' do
expect(BoltServer::PE::PAL).to receive(:new).and_return(fake_pal)
expect(fake_pal).to receive(:get_task).with(task_name).and_return(mock_task)
get(path)
resp = JSON.parse(last_response.body)
expect(resp).to eq(expected_response)
end
end
context 'with module_name' do
let(:path) { '/tasks/foo/init?environment=production' }
let(:mock_task) {
Bolt::Task.new(task_name, {}, [{ 'name' => 'init.rb', 'path' => File.expand_path(__FILE__) }])
}
let(:task_name) { 'foo' }
let(:expected_response) {
{
"metadata" => {},
"name" => "foo",
"files" => [
{
"filename" => "init.rb",
"sha256" => Digest::SHA256.hexdigest(File.read(__FILE__)),
"size_bytes" => File.size(__FILE__),
"uri" => {
"path" => "/puppet/v3/file_content/tasks/foo/init.rb",
"params" => { "environment" => "production" }
}
}
]
}
}
it '/tasks/:module_name/:task_name handles task name = module name (init.rb) task' do
expect(BoltServer::PE::PAL).to receive(:new).and_return(fake_pal)
expect(fake_pal).to receive(:get_task).with(task_name).and_return(mock_task)
get(path)
resp = JSON.parse(last_response.body)
expect(resp).to eq(expected_response)
end
end
context 'with non-existant task' do
let(:path) { '/tasks/foo/bar?environment=production' }
it 'returns 400 if an unknown plan error is thrown' do
expect(BoltServer::PE::PAL).to receive(:new).and_return(fake_pal)
expect(fake_pal).to receive(:get_task).with('foo::bar').and_raise(Bolt::Error.unknown_task('foo::bar'))
get(path)
expect(last_response.status).to eq(400)
end
end
end
describe '/ssh/*' do
let(:path) { "/ssh/#{action}" }
let(:target) { conn_info('ssh') }
it 'returns a non-html 404 if the action does not exist' do
post('/ssh/not_an_action', JSON.generate({}), 'CONTENT_TYPE' => 'text/json')
expect(last_response).not_to be_ok
expect(last_response.status).to eq(404)
result = JSON.parse(last_response.body)
expect(result['kind']).to eq('boltserver/not-found')
end
it 'errors if both password and private-key-content are present' do
body = { target: {
password: 'password',
'private-key-content': 'private-key-content'
} }
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response).not_to be_ok
expect(last_response.status).to eq(400)
result = JSON.parse(last_response.body)
regex = %r{The property '#/target' of type object matched more than one of the required schemas}
expect(result['value']['_error']['details'].join).to match(regex)
expect(result['status']).to eq('failure')
end
it 'fails if no authorization is present' do
body = { target: {
hostname: target[:host],
user: target[:user],
port: target[:port]
} }
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response).not_to be_ok
expect(last_response.status).to eq(400)
result = last_response.body
expect(result).to match(%r{The property '#/target' of type object did not match any of the required schemas})
end
it 'performs the action when using a password and scrubs any stack traces' do
body = { 'target': {
'hostname': target[:host],
'user': target[:user],
'password': target[:password],
'port': target[:port]
} }
expect_any_instance_of(BoltServer::TransportApp)
.to receive(:scrub_stack_trace).with(result.to_data).and_return({})
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response).to be_ok
expect(last_response.status).to eq(200)
end
it 'performs an action when using a private key and scrubs any stack traces' do
private_key = ENV['BOLT_SSH_KEY'] || Dir["spec/fixtures/keys/id_rsa"][0]
private_key_content = File.read(private_key)
body = { 'target': {
'hostname': target[:host],
'user': target[:user],
'private-key-content': private_key_content,
'port': target[:port]
} }
expect_any_instance_of(BoltServer::TransportApp)
.to receive(:scrub_stack_trace).with(result.to_data).and_return({})
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response).to be_ok
expect(last_response.status).to eq(200)
end
it 'expects either a single target or a set of targets, but not both' do
single_target = {
hostname: target[:host],
user: target[:user],
password: target[:password],
port: target[:port]
}
body = { target: single_target }
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response.status).to eq(200)
body = { targets: [single_target] }
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response.status).to eq(200)
body = { target: single_target, targets: single_target }
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response.status).to eq(400)
end
end
describe '/winrm/*' do
let(:path) { "/winrm/#{action}" }
let(:target) { conn_info('winrm') }
it 'returns a non-html 404 if the action does not exist' do
post('/winrm/not_an_action', JSON.generate({}), 'CONTENT_TYPE' => 'text/json')
expect(last_response).not_to be_ok
expect(last_response.status).to eq(404)
result = JSON.parse(last_response.body)
expect(result['kind']).to eq('boltserver/not-found')
end
it 'fails if no authorization is present' do
body = { target: {
hostname: target[:host],
user: target[:user],
port: target[:port]
} }
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response).not_to be_ok
expect(last_response.status).to eq(400)
result = last_response.body
expect(result).to match(%r{The property '#/target' did not contain a required property of 'password'})
end
it 'fails if either port or connect-timeout is a string' do
body = { target: {
hostname: target[:host],
uaser: target[:user],
password: target[:password],
port: 'port',
'connect-timeout': 'timeout'
} }
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response).not_to be_ok
expect(last_response.status).to eq(400)
result = last_response.body
[
%r{The property '#/target/port' of type string did not match the following type: integer},
%r{The property '#/target/connect-timeout' of type string did not match the following type: integer}
].each do |re|
expect(result).to match(re)
end
end
it 'performs the action and scrubs any stack traces from the result' do
body = { target: {
hostname: target[:host],
user: target[:user],
password: target[:password],
port: target[:port]
} }
expect_any_instance_of(BoltServer::TransportApp)
.to receive(:scrub_stack_trace).with(result.to_data).and_return({})
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response).to be_ok
expect(last_response.status).to eq(200)
end
it 'expects either a single target or a set of targets, but not both' do
single_target = {
hostname: target[:host],
user: target[:user],
password: target[:password],
port: target[:port]
}
body = { target: single_target }
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response.status).to eq(200)
body = { targets: [single_target] }
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response.status).to eq(200)
body = { target: single_target, targets: single_target }
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response.status).to eq(400)
end
end
end
describe 'action endpoints' do
# Helper to set the transport on a body hash, and then post to an action
# endpoint (/ssh/<action> or /winrm/<action>) Set `:multiple` to send
# a list of `targets` rather than a single `target` with the request.
def post_over_transport(transport, action, body_content, multiple: false)
path = "/#{transport}/#{action}"
target_data = conn_info(transport)
target = {
hostname: target_data[:host],
user: target_data[:user],
password: target_data[:password],
port: target_data[:port]
}
body = if multiple
body_content.merge(targets: [target])
else
body_content.merge(target: target)
end
post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
end
describe 'check_node_connections' do
it 'checks node connections over SSH', :ssh do
post_over_transport('ssh', 'check_node_connections', {}, multiple: true)
expect(last_response.status).to eq(200)
result = JSON.parse(last_response.body)
expect(result['status']).to eq('success')
end
it 'checks node connections over WinRM', :winrm do
post_over_transport('winrm', 'check_node_connections', {}, multiple: true)
expect(last_response.status).to eq(200)
result = JSON.parse(last_response.body)
expect(result['status']).to eq('success')
expect(result['result']).to be_a(Array)
expect(result['result'].length).to eq(1)
expect(result['result'].first['status']).to eq('success')
end
context 'when the checks succeed, but at least one node failed' do
let(:successful_target) {
target_data = conn_info('ssh')
{
hostname: target_data[:host],
user: target_data[:user],
password: target_data[:password],
port: target_data[:port]
}
}
let(:failed_target) {
target = successful_target.clone
target[:hostname] = 'not-a-real-host'
target
}
it 'returns 200 but reports a "failure" status', :ssh do
body = { targets: [successful_target, failed_target] }
post('/ssh/check_node_connections', JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
expect(last_response.status).to eq(200)
response_body = JSON.parse(last_response.body)
expect(response_body['status']).to eq('failure')
end
end
end
describe 'run_task' do
describe 'over SSH', :ssh do
let(:simple_ssh_task) {
{
task: { name: 'sample::echo',
metadata: {
description: 'Echo a message',
parameters: { message: 'Default message' }
},
files: [{ filename: "echo.sh", sha256: "foo",
uri: { path: 'foo', params: { environment: 'foo' } } }] },
parameters: { message: "Hello!" }
}
}
it 'runs a simple echo task', :ssh do
post_over_transport('ssh', 'run_task', simple_ssh_task)
expect(last_response).to be_ok
expect(last_response.status).to eq(200)
result = JSON.parse(last_response.body)
expect(result).to include('status' => 'success')
expect(result['value']['_output']).to match(/got passed the message: Hello!/)
end
it 'overrides host-key-check default', :ssh do
target = conn_info('ssh')
body = {
target: {
hostname: target[:host],
user: target[:user],
password: target[:password],
port: target[:port],
'host-key-check': true
},
task: { name: 'sample::echo',
metadata: {
description: 'Echo a message',
parameters: { message: 'Default message' }
},
files: [{ filename: "echo.sh", sha256: "foo",
uri: { path: 'foo', params: { environment: 'foo' } } }] },
parameters: { message: "Hello!" }
}
post('ssh/run_task', JSON.generate(body), 'CONTENT_TYPE' => 'text/json')
result = last_response.body
expect(result).to match(/Host key verification failed for localhost/)
end
it 'errors if multiple targets are supplied', :ssh do
post_over_transport('ssh', 'run_task', simple_ssh_task, multiple: true)
expect(last_response.status).to eq(400)
expect(last_response.body)
.to match(%r{The property '#/' did not contain a required property of 'target'})
expect(last_response.body)
.to match(%r{The property '#/' contains additional properties \[\\"targets\\"\]})
end
end
describe 'over WinRM' do
let(:simple_winrm_task) {
{
task: {
name: 'sample::wininput',
metadata: {
description: 'Echo a message',
input_method: 'stdin'
},
files: [{ filename: 'wininput.ps1', sha256: 'foo',
uri: { path: 'foo', params: { environment: 'foo' } } }]
},
parameters: { input: 'Hello!' }
}
}
it 'runs a simple echo task', :winrm do
post_over_transport('winrm', 'run_task', simple_winrm_task)
expect(last_response).to be_ok
expect(last_response.status).to eq(200)
result = JSON.parse(last_response.body)
expect(result).to include('status' => 'success')
expect(result['value']['_output']).to match(/INPUT.*Hello!/)
end
it 'errors if multiple targets are supplied', :winrm do
post_over_transport('winrm', 'run_task', simple_winrm_task, multiple: true)
expect(last_response.status).to eq(400)
expect(last_response.body)
.to match(%r{The property '#/' did not contain a required property of 'target'})
expect(last_response.body)
.to match(%r{The property '#/' contains additional properties \[\\"targets\\"\]})
end
end
end
end
end
| 36.004545 | 118 | 0.581703 |
5d2195e5cc8e8c6899cbd6734b4e26e06fe51f98 | 1,306 | class TestMapScreen < PM::MapScreen
attr_accessor :infinite_loop_points
start_position latitude: 35.090648651123, longitude: -82.965972900391, radius: 4
title "Gorges State Park, NC"
def get_title
self.title
end
def promotion_annotation_data
@promotion_annotation_data
end
def annotation_data
# Partial set of data from "GPS Map of Gorges State Park": http://www.hikewnc.info/maps/gorges-state-park/gps-map
@data ||= [{
longitude: -82.965972900391,
latitude: 35.090648651123,
title: "Rainbow Falls",
subtitle: "Nantahala National Forest",
},{
longitude: -82.966093558105,
latitude: 35.092520895652,
title: "Turtleback Falls",
subtitle: "Nantahala National Forest",
},{
longitude: -82.95916,
latitude: 35.07496,
title: "Windy Falls"
},{
longitude: -82.943031505056,
latitude: 35.102516828489,
title: "Upper Bearwallow Falls",
subtitle: "Gorges State Park",
},{
longitude: -82.956244328014,
latitude: 35.085548421623,
title: "Stairway Falls",
subtitle: "Gorges State Park",
}]
end
def lookup_infinite_loop
look_up_address address: "1 Infinite Loop" do |points, error|
self.infinite_loop_points = points
end
end
end
| 25.115385 | 117 | 0.662328 |
212d319f0eb87d43c9c0d91bbe75b2588d3ee7d2 | 4,967 | module Windows
module ServiceConstants
SC_MANAGER_ALL_ACCESS = 0xF003F
SC_MANAGER_CREATE_SERVICE = 0x0002
SC_MANAGER_CONNECT = 0x0001
SC_MANAGER_ENUMERATE_SERVICE = 0x0004
SC_MANAGER_LOCK = 0x0008
SC_MANAGER_MODIFY_BOOT_CONFIG = 0x0020
SC_MANAGER_QUERY_LOCK_STATUS = 0x0010
SC_STATUS_PROCESS_INFO = 0
SC_ENUM_PROCESS_INFO = 0
# Service control action types
SC_ACTION_NONE = 0
SC_ACTION_RESTART = 1
SC_ACTION_REBOOT = 2
SC_ACTION_RUN_COMMAND = 3
# Service access rights
SERVICE_ALL_ACCESS = 0xF01FF
SERVICE_CHANGE_CONFIG = 0x0002
SERVICE_ENUMERATE_DEPENDENTS = 0x0008
SERVICE_INTERROGATE = 0x0080
SERVICE_PAUSE_CONTINUE = 0x0040
SERVICE_QUERY_CONFIG = 0x0001
SERVICE_QUERY_STATUS = 0x0004
SERVICE_START = 0x0010
SERVICE_STOP = 0x0020
SERVICE_USER_DEFINED_CONTROL = 0x0100
# Service types
SERVICE_KERNEL_DRIVER = 0x00000001
SERVICE_FILE_SYSTEM_DRIVER = 0x00000002
SERVICE_ADAPTER = 0x00000004
SERVICE_RECOGNIZER_DRIVER = 0x00000008
SERVICE_WIN32_OWN_PROCESS = 0x00000010
SERVICE_WIN32_SHARE_PROCESS = 0x00000020
SERVICE_WIN32 = 0x00000030
SERVICE_INTERACTIVE_PROCESS = 0x00000100
SERVICE_DRIVER = 0x0000000B
SERVICE_TYPE_ALL = 0x0000013F
# Error control
SERVICE_ERROR_IGNORE = 0x00000000
SERVICE_ERROR_NORMAL = 0x00000001
SERVICE_ERROR_SEVERE = 0x00000002
SERVICE_ERROR_CRITICAL = 0x00000003
# Start types
SERVICE_BOOT_START = 0x00000000
SERVICE_SYSTEM_START = 0x00000001
SERVICE_AUTO_START = 0x00000002
SERVICE_DEMAND_START = 0x00000003
SERVICE_DISABLED = 0x00000004
# Service control
SERVICE_CONTROL_STOP = 0x00000001
SERVICE_CONTROL_PAUSE = 0x00000002
SERVICE_CONTROL_CONTINUE = 0x00000003
SERVICE_CONTROL_INTERROGATE = 0x00000004
SERVICE_CONTROL_SHUTDOWN = 0x00000005
SERVICE_CONTROL_PARAMCHANGE = 0x00000006
SERVICE_CONTROL_NETBINDADD = 0x00000007
SERVICE_CONTROL_NETBINDREMOVE = 0x00000008
SERVICE_CONTROL_NETBINDENABLE = 0x00000009
SERVICE_CONTROL_NETBINDDISABLE = 0x0000000A
SERVICE_CONTROL_DEVICEEVENT = 0x0000000B
SERVICE_CONTROL_HARDWAREPROFILECHANGE = 0x0000000C
SERVICE_CONTROL_POWEREVENT = 0x0000000D
SERVICE_CONTROL_SESSIONCHANGE = 0x0000000E
SERVICE_CONTROL_PRESHUTDOWN = 0x0000000F
SERVICE_CONTROL_TIMECHANGE = 0x00000010
SERVICE_CONTROL_TRIGGEREVENT = 0x00000020
# Service controls accepted
SERVICE_ACCEPT_STOP = 0x00000001
SERVICE_ACCEPT_PAUSE_CONTINUE = 0x00000002
SERVICE_ACCEPT_SHUTDOWN = 0x00000004
SERVICE_ACCEPT_PARAMCHANGE = 0x00000008
SERVICE_ACCEPT_NETBINDCHANGE = 0x00000010
SERVICE_ACCEPT_HARDWAREPROFILECHANGE = 0x00000020
SERVICE_ACCEPT_POWEREVENT = 0x00000040
SERVICE_ACCEPT_SESSIONCHANGE = 0x00000080
SERVICE_ACCEPT_PRESHUTDOWN = 0x00000100
SERVICE_ACCEPT_TIMECHANGE = 0x00000200
SERVICE_ACCEPT_TRIGGEREVENT = 0x00000400
# Service states
SERVICE_ACTIVE = 0x00000001
SERVICE_INACTIVE = 0x00000002
SERVICE_STATE_ALL = 0x00000003
# Service current states
SERVICE_STOPPED = 0x00000001
SERVICE_START_PENDING = 0x00000002
SERVICE_STOP_PENDING = 0x00000003
SERVICE_RUNNING = 0x00000004
SERVICE_CONTINUE_PENDING = 0x00000005
SERVICE_PAUSE_PENDING = 0x00000006
SERVICE_PAUSED = 0x00000007
# Info levels
SERVICE_CONFIG_DESCRIPTION = 1
SERVICE_CONFIG_FAILURE_ACTIONS = 2
SERVICE_CONFIG_DELAYED_AUTO_START_INFO = 3
SERVICE_CONFIG_FAILURE_ACTIONS_FLAG = 4
SERVICE_CONFIG_SERVICE_SID_INFO = 5
SERVICE_CONFIG_REQUIRED_PRIVILEGES_INFO = 6
SERVICE_CONFIG_PRESHUTDOWN_INFO = 7
# Configuration
SERVICE_NO_CHANGE = 0xffffffff
# Misc
WAIT_OBJECT_0 = 0
WAIT_TIMEOUT = 0x00000102
INFINITE = 0xFFFFFFFF
IDLE_CONTROL_CODE = 0
DELETE = 0x00010000
FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000
FORMAT_MESSAGE_IGNORE_INSERTS = 0x00000200
NO_ERROR = 0
SE_PRIVILEGE_ENABLED = 0x00000002
TOKEN_ADJUST_PRIVILEGES = 0x0020
TOKEN_QUERY = 0x0008
# Errors
ERROR_INSUFFICIENT_BUFFER = 122
ERROR_MORE_DATA = 234
ERROR_FILE_NOT_FOUND = 2
ERROR_RESOURCE_TYPE_NOT_FOUND = 1813
ERROR_RESOURCE_NAME_NOT_FOUND = 1814
WAIT_FAILED = 0xFFFFFFFF
end
end
| 34.493056 | 55 | 0.674854 |
01bcdeac9775c353329ad3302ac913aa77ca5134 | 551 | # == Schema Information
#
# Table name: provider_item_images
#
# id :integer not null, primary key
# provider_item_id :integer not null
# imagen :string not null
# created_at :datetime not null
# updated_at :datetime not null
#
class ProviderItemImage < ActiveRecord::Base
has_paper_trail
belongs_to :provider_item
mount_uploader :imagen, ProviderItemImageUploader
validates :imagen,
presence: true,
if: "imagen_cache.blank?"
end
| 23.956522 | 59 | 0.618875 |
f81620a338718f6285e0822728a59143c4a8a17b | 478 | module Worker
class EmailNotification
def process(payload, metadata, delivery_info)
payload.symbolize_keys!
set_locale(payload)
mailer = payload[:mailer_class].constantize
action = payload[:method]
args = payload[:args]
message = mailer.send(:new, action, *args).message
message.deliver
end
private
def set_locale(payload)
locale = payload[:locale]
I18n.locale = locale if locale
end
end
end
| 19.12 | 56 | 0.654812 |
1a341bdbdebb8daedd11537e9711c9b5080bcbc8 | 1,025 | cask "amethyst" do
if MacOS.version <= :el_capitan
version "0.10.1"
sha256 "9fd1ac2cfb8159b2945a4482046ee6d365353df617f4edbabc4e8cadc448c1e7"
url "https://ianyh.com/amethyst/versions/Amethyst-#{version}.zip"
else
version "0.15.5"
sha256 "a3d39e9c36ff13ac6f7e0c656c741acd285124ef53a03264fe03efc5906ce683"
url "https://github.com/ianyh/Amethyst/releases/download/v#{version}/Amethyst.zip",
verified: "github.com/ianyh/Amethyst/"
end
name "Amethyst"
desc "Automatic tiling window manager similar to xmonad"
homepage "https://ianyh.com/amethyst/"
livecheck do
url :url
strategy :github_latest
end
auto_updates true
app "Amethyst.app"
zap trash: [
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.amethyst.amethyst.sfl*",
"~/Library/Caches/com.amethyst.Amethyst",
"~/Library/Cookies/com.amethyst.Amethyst.binarycookies",
"~/Library/Preferences/com.amethyst.Amethyst.plist",
]
end
| 31.060606 | 142 | 0.742439 |
79248fcd40fca434a49d609567978b2289c9c40a | 1,110 | # frozen_string_literal: true
module Curator
module DescriptiveFieldSets
extend Curator::NamespaceAccessor
IDENTIFIER_TYPES = %w(local-accession local-other local-call local-barcode iiif-manifest internet-archive isbn ismn
isrc issn issue-number lccn matrix-number music-plate music-publisher sici uri videorecording uri-preview).freeze
NOTE_TYPES = ['date', 'language', 'acquisition', 'ownership', 'funding', 'biographical/historical',
'citation/reference', 'preferred citation', 'bibliography', 'exhibitions', 'publications',
'creation/production credits', 'performers', 'physical description', 'venue', 'arrangement',
'statement of responsibility'].freeze
LOCAL_ORIGINAL_IDENTIFIER_TYPES = {
'internet-archive' => 'barcode',
'local-barcode' => 'barcode',
'local-accession' => 'id_local-accession',
'local-other' => 'id_local-other'
}.freeze
namespace_klass_accessors :cartographic, :date, :identifier, :note, :publication, :related, :subject, :title_set, :title
end
end
| 44.4 | 139 | 0.684685 |
4a8987be12357b15d35a4b398237e36773071dee | 987 | # frozen_string_literal: true
module UiRules
class BinLoadPurposeRule < Base
def generate_rules
@repo = RawMaterialsApp::BinLoadRepo.new
make_form_object
apply_form_values
common_values_for_fields common_fields
set_show_fields if %i[show reopen].include? @mode
form_name 'bin_load_purpose'
end
def set_show_fields
fields[:purpose_code] = { renderer: :label }
fields[:description] = { renderer: :label }
fields[:active] = { renderer: :label, as_boolean: true }
end
def common_fields
{
purpose_code: { required: true },
description: {}
}
end
def make_form_object
if @mode == :new
make_new_form_object
return
end
@form_object = @repo.find_bin_load_purpose(@options[:id])
end
def make_new_form_object
@form_object = OpenStruct.new(purpose_code: nil,
description: nil)
end
end
end
| 21.933333 | 63 | 0.625127 |
abee7002aab9a3bba1f0cdf1468e00c657849491 | 187 | class AddUserAndPostRefToLikes < ActiveRecord::Migration[5.2]
def change
add_reference :likes, :user, foreign_key: true
add_reference :likes, :post, foreign_key: true
end
end
| 26.714286 | 61 | 0.754011 |
bf1f2bae7daef66d6a1b9936f810bd6dbcec0239 | 10,848 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::Ansi2html do
subject { described_class }
it "prints non-ansi as-is" do
expect(convert_html("Hello")).to eq('<span>Hello</span>')
end
it "strips non-color-changing control sequences" do
expect(convert_html("Hello \e[2Kworld")).to eq('<span>Hello world</span>')
end
it "prints simply red" do
expect(convert_html("\e[31mHello\e[0m")).to eq('<span class="term-fg-red">Hello</span>')
end
it "prints simply red without trailing reset" do
expect(convert_html("\e[31mHello")).to eq('<span class="term-fg-red">Hello</span>')
end
it "prints simply yellow" do
expect(convert_html("\e[33mHello\e[0m")).to eq('<span class="term-fg-yellow">Hello</span>')
end
it "prints default on blue" do
expect(convert_html("\e[39;44mHello")).to eq('<span class="term-bg-blue">Hello</span>')
end
it "prints red on blue" do
expect(convert_html("\e[31;44mHello")).to eq('<span class="term-fg-red term-bg-blue">Hello</span>')
end
it "resets colors after red on blue" do
expect(convert_html("\e[31;44mHello\e[0m world")).to eq('<span class="term-fg-red term-bg-blue">Hello</span><span> world</span>')
end
it "performs color change from red/blue to yellow/blue" do
expect(convert_html("\e[31;44mHello \e[33mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-fg-yellow term-bg-blue">world</span>')
end
it "performs color change from red/blue to yellow/green" do
expect(convert_html("\e[31;44mHello \e[33;42mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-fg-yellow term-bg-green">world</span>')
end
it "performs color change from red/blue to reset to yellow/green" do
expect(convert_html("\e[31;44mHello\e[0m \e[33;42mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello</span><span> </span><span class="term-fg-yellow term-bg-green">world</span>')
end
it "ignores unsupported codes" do
expect(convert_html("\e[51mHello\e[0m")).to eq('<span>Hello</span>')
end
it "prints light red" do
expect(convert_html("\e[91mHello\e[0m")).to eq('<span class="term-fg-l-red">Hello</span>')
end
it "prints default on light red" do
expect(convert_html("\e[101mHello\e[0m")).to eq('<span class="term-bg-l-red">Hello</span>')
end
it "performs color change from red/blue to default/blue" do
expect(convert_html("\e[31;44mHello \e[39mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-bg-blue">world</span>')
end
it "performs color change from light red/blue to default/blue" do
expect(convert_html("\e[91;44mHello \e[39mworld")).to eq('<span class="term-fg-l-red term-bg-blue">Hello </span><span class="term-bg-blue">world</span>')
end
it "prints bold text" do
expect(convert_html("\e[1mHello")).to eq('<span class="term-bold">Hello</span>')
end
it "resets bold text" do
expect(convert_html("\e[1mHello\e[21m world")).to eq('<span class="term-bold">Hello</span><span> world</span>')
expect(convert_html("\e[1mHello\e[22m world")).to eq('<span class="term-bold">Hello</span><span> world</span>')
end
it "prints italic text" do
expect(convert_html("\e[3mHello")).to eq('<span class="term-italic">Hello</span>')
end
it "resets italic text" do
expect(convert_html("\e[3mHello\e[23m world")).to eq('<span class="term-italic">Hello</span><span> world</span>')
end
it "prints underlined text" do
expect(convert_html("\e[4mHello")).to eq('<span class="term-underline">Hello</span>')
end
it "resets underlined text" do
expect(convert_html("\e[4mHello\e[24m world")).to eq('<span class="term-underline">Hello</span><span> world</span>')
end
it "prints concealed text" do
expect(convert_html("\e[8mHello")).to eq('<span class="term-conceal">Hello</span>')
end
it "resets concealed text" do
expect(convert_html("\e[8mHello\e[28m world")).to eq('<span class="term-conceal">Hello</span><span> world</span>')
end
it "prints crossed-out text" do
expect(convert_html("\e[9mHello")).to eq('<span class="term-cross">Hello</span>')
end
it "resets crossed-out text" do
expect(convert_html("\e[9mHello\e[29m world")).to eq('<span class="term-cross">Hello</span><span> world</span>')
end
it "can print 256 xterm fg colors" do
expect(convert_html("\e[38;5;16mHello")).to eq('<span class="xterm-fg-16">Hello</span>')
end
it "can print 256 xterm fg colors on normal magenta background" do
expect(convert_html("\e[38;5;16;45mHello")).to eq('<span class="xterm-fg-16 term-bg-magenta">Hello</span>')
end
it "can print 256 xterm bg colors" do
expect(convert_html("\e[48;5;240mHello")).to eq('<span class="xterm-bg-240">Hello</span>')
end
it "can print 256 xterm fg bold colors" do
expect(convert_html("\e[38;5;16;1mHello")).to eq('<span class="xterm-fg-16 term-bold">Hello</span>')
end
it "can print 256 xterm bg colors on normal magenta foreground" do
expect(convert_html("\e[48;5;16;35mHello")).to eq('<span class="term-fg-magenta xterm-bg-16">Hello</span>')
end
it "prints bold colored text vividly" do
expect(convert_html("\e[1;31mHello\e[0m")).to eq('<span class="term-fg-l-red term-bold">Hello</span>')
end
it "prints bold light colored text correctly" do
expect(convert_html("\e[1;91mHello\e[0m")).to eq('<span class="term-fg-l-red term-bold">Hello</span>')
end
it "prints <" do
expect(convert_html("<")).to eq('<span><</span>')
end
it "replaces newlines with line break tags" do
expect(convert_html("\n")).to eq('<span><br/></span>')
end
it "groups carriage returns with newlines" do
expect(convert_html("\r\n")).to eq('<span><br/></span>')
end
describe "incremental update" do
shared_examples 'stateable converter' do
let(:pass1_stream) { StringIO.new(pre_text) }
let(:pass2_stream) { StringIO.new(pre_text + text) }
let(:pass1) { subject.convert(pass1_stream) }
let(:pass2) { subject.convert(pass2_stream, pass1.state) }
it "to returns html to append" do
expect(pass2.append).to be_truthy
expect(pass2.html).to eq(html)
expect(pass1.html + pass2.html).to eq(pre_html + html)
end
end
context "with split word" do
let(:pre_text) { "\e[1mHello" }
let(:pre_html) { "<span class=\"term-bold\">Hello</span>" }
let(:text) { "\e[1mWorld" }
let(:html) { "<span class=\"term-bold\">World</span>" }
it_behaves_like 'stateable converter'
end
context "with split sequence" do
let(:pre_text) { "\e[1m" }
let(:pre_html) { "" }
let(:text) { "Hello" }
let(:html) { "<span class=\"term-bold\">Hello</span>" }
it_behaves_like 'stateable converter'
end
context "with partial sequence" do
let(:pre_text) { "Hello\e" }
let(:pre_html) { "<span>Hello</span>" }
let(:text) { "[1m World" }
let(:html) { "<span class=\"term-bold\"> World</span>" }
it_behaves_like 'stateable converter'
end
context 'with new line' do
let(:pre_text) { "Hello\r" }
let(:pre_html) { "<span>Hello\r</span>" }
let(:text) { "\nWorld" }
let(:html) { "<span><br/>World</span>" }
it_behaves_like 'stateable converter'
end
end
context "with section markers" do
let(:section_name) { 'test_section' }
let(:section_start_time) { Time.new(2017, 9, 20).utc }
let(:section_duration) { 3.seconds }
let(:section_end_time) { section_start_time + section_duration }
let(:section_start) { "section_start:#{section_start_time.to_i}:#{section_name}\r\033[0K"}
let(:section_end) { "section_end:#{section_end_time.to_i}:#{section_name}\r\033[0K"}
let(:section_start_html) do
'<div class="section-start"' \
" data-timestamp=\"#{section_start_time.to_i}\" data-section=\"#{class_name(section_name)}\"" \
' role="button"></div>'
end
let(:section_end_html) do
"<div class=\"section-end\" data-section=\"#{class_name(section_name)}\"></div>"
end
shared_examples 'forbidden char in section_name' do
it 'ignores sections' do
text = "#{section_start}Some text#{section_end}"
class_name_start = section_start.gsub("\033[0K", '').gsub('<', '<')
class_name_end = section_end.gsub("\033[0K", '').gsub('<', '<')
html = %{<span>#{class_name_start}Some text#{class_name_end}</span>}
expect(convert_html(text)).to eq(html)
end
end
shared_examples 'a legit section' do
let(:text) { "#{section_start}Some text#{section_end}" }
it 'prints light red' do
text = "#{section_start}\e[91mHello\e[0m\nLine 1\nLine 2\nLine 3\n#{section_end}"
header = %{<span class="term-fg-l-red section section-header js-s-#{class_name(section_name)}">Hello</span>}
line_break = %{<span class="section section-header js-s-#{class_name(section_name)}"><br/></span>}
output_line = %{<span class="section line js-s-#{class_name(section_name)}">Line 1<br/>Line 2<br/>Line 3<br/></span>}
html = "#{section_start_html}#{header}#{line_break}#{output_line}#{section_end_html}"
expect(convert_html(text)).to eq(html)
end
it 'begins with a section_start html marker' do
expect(convert_html(text)).to start_with(section_start_html)
end
it 'ends with a section_end html marker' do
expect(convert_html(text)).to end_with(section_end_html)
end
end
it_behaves_like 'a legit section'
context 'section name includes $' do
let(:section_name) { 'my_$ection'}
it_behaves_like 'forbidden char in section_name'
end
context 'section name includes <' do
let(:section_name) { '<a_tag>'}
it_behaves_like 'forbidden char in section_name'
end
context 'section name contains .-_' do
let(:section_name) { 'a.Legit-SeCtIoN_namE' }
it_behaves_like 'a legit section'
end
it 'do not allow XSS injections' do
text = "#{section_start}section_end:1:2<script>alert('XSS Hack!');</script>#{section_end}"
expect(convert_html(text)).not_to include('<script>')
end
end
describe "truncates" do
let(:text) { "Hello World" }
let(:stream) { StringIO.new(text) }
let(:subject) { described_class.convert(stream) }
before do
stream.seek(3, IO::SEEK_SET)
end
it "returns truncated output" do
expect(subject.truncated).to be_truthy
end
it "does not append output" do
expect(subject.append).to be_falsey
end
end
def convert_html(data)
stream = StringIO.new(data)
subject.convert(stream).html
end
def class_name(section)
subject::Converter.new.section_to_class_name(section)
end
end
| 35.220779 | 192 | 0.655697 |
3966b646829b93b6b14e26ead28991290d891024 | 1,972 | # Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "git_fetcher"
s.version = "0.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Yuri Kovalov"]
s.date = "2013-11-14"
s.description = "Perform git fetch on all remote sources in multiple git repos."
s.email = "[email protected]"
s.executables = ["git-fetcher"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"bin/git-fetcher",
"git_fetcher.gemspec",
"lib/git_fetcher.rb",
"lib/git_fetcher/runner.rb",
"lib/git_fetcher/version.rb",
"test/helper.rb",
"test/test_git_fetcher.rb"
]
s.homepage = "http://blog.yurikoval.com/git_fetcher"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.7"
s.summary = "Run git fetch on all your projects."
s.test_files = ["test/helper.rb", "test/test_git_fetcher.rb"]
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, "~> 6.2")
s.add_development_dependency(%q<bundler>, ["~> 1.0"])
s.add_development_dependency(%q<jeweler>, "~> 2.3.9")
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, "~> 6.2")
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, "~> 2.3.9")
end
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, "~> 6.2")
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, "~> 2.3.9")
end
end
| 30.338462 | 105 | 0.625254 |
ff9e2403754be180e7157a853f17a486fe2375c2 | 253 | class User < ActiveRecord::Base
has_secure_password
has_many :donations
has_many :charities
validates :username, presence: true, uniqueness: true, length: {minimum: 5}
validates :password, presence: true, length: {minimum: 6}
end | 28.111111 | 79 | 0.715415 |
f84b618060b676b5b9998f64a0e818249d430ce0 | 3,737 | # frozen_string_literal: true
describe 'GraphQL API Query' do
describe 'searchEpisodes' do
let!(:work) { create(:work, :with_current_season) }
let!(:episode1) { create(:episode, work: work, sort_number: 1) }
let!(:episode2) { create(:episode, work: work, sort_number: 3) }
let!(:episode3) { create(:episode, work: work, sort_number: 2) }
context 'when `anikutoIds` argument is specified' do
let(:result) do
query_string = <<~QUERY
query {
searchEpisodes(anikutoIds: [#{episode1.id}]) {
edges {
node {
anikutoId
title
}
}
}
}
QUERY
res = Beta::AnikutoSchema.execute(query_string)
pp(res) if res['errors']
res
end
it 'shows episode' do
expect(result.dig('data', 'searchEpisodes', 'edges')).to match_array(
[
{
'node' => {
'anikutoId' => episode1.id,
'title' => episode1.title
}
}
]
)
end
end
context 'when `orderBy` argument is specified' do
let(:result) do
query_string = <<~QUERY
query {
searchEpisodes(orderBy: { field: SORT_NUMBER, direction: DESC }) {
edges {
node {
anikutoId
title
sortNumber
}
}
}
}
QUERY
res = Beta::AnikutoSchema.execute(query_string)
pp(res) if res['errors']
res
end
it 'shows ordered episodes' do
expect(result.dig('data', 'searchEpisodes', 'edges')).to match_array(
[
{
'node' => {
'anikutoId' => episode2.id,
'title' => episode2.title,
'sortNumber' => 3
}
},
{
'node' => {
'anikutoId' => episode3.id,
'title' => episode3.title,
'sortNumber' => 2
}
},
{
'node' => {
'anikutoId' => episode1.id,
'title' => episode1.title,
'sortNumber' => 1
}
}
]
)
end
end
context 'when `recodes` are fetched' do
let!(:record) { create(:episode_record, episode: episode1) }
let(:result) do
query_string = <<~QUERY
query {
searchEpisodes(orderBy: { field: SORT_NUMBER, direction: ASC }, first: 1) {
edges {
node {
anikutoId
records {
edges {
node {
anikutoId
comment
}
}
}
}
}
}
}
QUERY
res = Beta::AnikutoSchema.execute(query_string)
pp(res) if res['errors']
res
end
it 'shows records' do
expect(result.dig('data', 'searchEpisodes', 'edges')).to match_array(
[
{
'node' => {
'anikutoId' => episode1.id,
'records' => {
'edges' => [
{
'node' => {
'anikutoId' => record.id,
'comment' => record.body
}
}
]
}
}
}
]
)
end
end
end
end
| 25.59589 | 87 | 0.385871 |
2651403a0fe085ceed5df4daccf1dce3a8f84143 | 38 | module Qcloud
VERSION = "0.1.0"
end
| 9.5 | 19 | 0.657895 |
33a283187d8f57b2d4748b0d7020bf2ef086d5fb | 2,353 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp', 'caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.action_controller.enable_fragment_cache_logging = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
host = 'localhost:3000'
config.action_mailer.default_url_options = { host: host, protocol: 'http' }
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations.
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 35.119403 | 87 | 0.759881 |
610b05523aa5f1789e862685cf0dae8fd7d2e0f5 | 1,668 | require 'spec_helper'
RSpec.describe "Creates a new project with default configuration" do
before(:all) do
drop_dummy_database
remove_project_directory
run_kerosene
setup_app_dependencies
end
it "uses custom Gemfile" do
gemfile_file = IO.read("#{project_path}/Gemfile")
expect(gemfile_file).to match(
/^ruby '#{Kerosene::RUBY_VERSION}'$/,
)
expect(gemfile_file).to match(
/^gem 'autoprefixer-rails'$/,
)
expect(gemfile_file).to match(
/^gem 'sidekiq'$/,
)
expect(gemfile_file).to match(
/^gem 'rails', '#{Kerosene::RAILS_VERSION}'$/,
)
end
it "adds bin/setup file" do
expect(File).to exist("#{project_path}/bin/setup")
end
it "makes bin/setup executable" do
pending('fix test')
expect("bin/setup").to be_executable
end
it "creates .ruby-version from Kerosene .ruby-version" do
ruby_version_file = IO.read("#{project_path}/.ruby-version")
expect(ruby_version_file).to eq "#{RUBY_VERSION}\n"
end
it "doesn't generate test directory" do
expect(File).not_to exist("#{project_path}/test")
end
it "adds sassc-rails" do
gemfile = read_project_file("Gemfile")
expect(gemfile).to match(/sassc-rails/)
end
def development_config
@_development_config ||=
read_project_file %w(config environments development.rb)
end
def test_config
@_test_config ||= read_project_file %w(config environments test.rb)
end
def production_config
@_production_config ||=
read_project_file %w(config environments production.rb)
end
def read_project_file(path)
IO.read(File.join(project_path, *path))
end
end
| 23.828571 | 71 | 0.684652 |
1dfbf551eca7d96ec1e7887f81c0220372583261 | 1,301 | class Nanomsgxx < Formula
desc "Nanomsg binding for C++11"
homepage "https://achille-roussel.github.io/nanomsgxx/doc/nanomsgxx.7.html"
url "https://github.com/achille-roussel/nanomsgxx/archive/0.2.tar.gz"
sha256 "116ad531b512d60ea75ef21f55fd9d31c00b172775548958e5e7d4edaeeedbaa"
revision 1
bottle do
cellar :any
sha256 "b35ef1c194aea9a8b1c59495dadec535d748ad21003843caf1d520743d4e6a88" => :high_sierra
sha256 "31944634bba1c194586658fd0d7ab9bc5c2564f334a9fbbea3d1af57dc43ef55" => :sierra
sha256 "e70ca4633486bd83259989bf62041e5e140401fbecc7bb4e855375229b016312" => :el_capitan
end
depends_on "pkg-config" => :build
depends_on "python" => :build if MacOS.version <= :snow_leopard
depends_on "nanomsg"
def install
args = %W[
--static
--shared
--prefix=#{prefix}
]
system "python", "./waf", "configure", *args
system "python", "./waf", "build"
system "python", "./waf", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <iostream>
int main(int argc, char **argv) {
std::cout << "Hello Nanomsgxx!" << std::endl;
}
EOS
system ENV.cxx, "-std=c++11", "-L#{lib}", "-lnnxx", "test.cpp"
assert_equal "Hello Nanomsgxx!\n", shell_output("#{testpath}/a.out")
end
end
| 28.911111 | 93 | 0.679477 |
392b4047dfd10bc9494cc8d909d53f0794835e21 | 94 | require 'test_helper'
class QuickComplimentsComplimentsHelperTest < ActionView::TestCase
end
| 18.8 | 66 | 0.861702 |
5d62be34de188d5f232642b692bba1d131e5e979 | 1,778 | #--
# Copyright (c) 2011 Scott Steadman, Michael Berkovich, Geni Inc
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
class Tr8n::IpAddress
def self.non_routable_networks
@non_routable_networks ||= [
Tr8n::IpAddress.new('10.0.0.0/8'),
Tr8n::IpAddress.new('127.0.0.0/8'),
Tr8n::IpAddress.new('172.16.0.0/12'),
Tr8n::IpAddress.new('192.168.0.0/16'),
]
end
def self.routable?(ip)
not non_routable?(ip)
end
def self.non_routable?(ip)
return true if ip.blank?
ip = new(ip.to_s) unless ip.is_a?(Tr8n::IpAddress)
ip.non_routable?
rescue ArgumentError
return true
end
def non_routable?
self.class.non_routable_networks.each {|network| return true if network.include?(self)}
false
end
end
| 33.54717 | 91 | 0.730034 |
ffe7ed57a09f634ff2b2593fe3012352e3fea6ee | 2,620 | # frozen_string_literal: true
require "project_types/script/test_helper"
describe Script::Layers::Application::BuildScript do
include TestHelpers::FakeFS
describe ".call" do
let(:language) { "assemblyscript" }
let(:extension_point_type) { "discount" }
let(:script_name) { "name" }
let(:op_failed_msg) { "msg" }
let(:content) { "content" }
let(:compiled_type) { "wasm" }
let(:metadata) { Script::Layers::Domain::Metadata.new("1", "0", false) }
let(:config_ui) { Script::Layers::Domain::ConfigUi.new(filename: "filename", content: "content") }
let(:task_runner) { stub(compiled_type: compiled_type, metadata: metadata) }
let(:script_project) { stub }
subject do
Script::Layers::Application::BuildScript.call(
ctx: @context,
task_runner: task_runner,
script_project: script_project,
config_ui: config_ui
)
end
describe "when build succeeds" do
it "should return normally" do
CLI::UI::Frame.expects(:with_frame_color_override).never
task_runner.expects(:build).returns(content)
Script::Layers::Infrastructure::PushPackageRepository.any_instance.expects(:create_push_package).with(
script_project: script_project,
script_content: content,
compiled_type: "wasm",
metadata: metadata,
config_ui: config_ui
)
capture_io { subject }
end
end
describe "when build raises" do
it "should output message and raise BuildError" do
err_msg = "some error message"
CLI::UI::Frame.expects(:with_frame_color_override).yields.once
task_runner.expects(:build).returns(content)
Script::Layers::Infrastructure::PushPackageRepository
.any_instance
.expects(:create_push_package)
.raises(err_msg)
io = capture_io do
assert_raises(Script::Layers::Infrastructure::Errors::BuildError) { subject }
end
output = io.join
assert_match(err_msg, output)
end
[
Script::Layers::Infrastructure::Errors::InvalidBuildScriptError,
Script::Layers::Infrastructure::Errors::BuildScriptNotFoundError,
Script::Layers::Infrastructure::Errors::WebAssemblyBinaryNotFoundError,
].each do |e|
it "it should re-raise #{e} when the raised error is #{e}" do
CLI::UI::Frame.expects(:with_frame_color_override).yields.once
task_runner.expects(:build).raises(e)
capture_io do
assert_raises(e) { subject }
end
end
end
end
end
end
| 34.025974 | 110 | 0.64771 |
1cf5e920641ac98e7fa4ff973b5cbc1875b4d026 | 1,344 | class Ey::Core::Client
class Real
def get_users(params={})
query = Ey::Core.paging_parameters(params)
url = params.delete("url")
request(
:params => params,
:path => "/users",
:query => query,
:url => url,
)
end
end # Real
class Mock
def get_users(params={})
resources = if url = params.delete("url")
if account_id = path_params(url)["accounts"]
account = self.find(:accounts, account_id)
if url.index("/owners")
account[:account_owners].inject({}){|r,id| r.merge(id => self.data[:users][id])}
elsif url.index("/users")
account[:account_users].inject({}){|r,id| r.merge(id => self.data[:users][id])}
else
raise "Mock doesn't know how to handle url: #{url}"
end
else []
end
else
self.data[:users]
end
headers, users_page = search_and_page(params, :users, search_keys: %w[name email first_name last_name], resources: resources)
response(
:body => {"users" => users_page},
:headers => headers
)
end
end # Mock
end
| 31.255814 | 131 | 0.473958 |
b9919095a06fbe6e47ddbe83630cb97d15292c6f | 7,467 | require 'spreedly/common'
require 'httparty'
raise "Mock Spreedly already required!" if defined?(Spreedly::MOCK)
=begin rdoc
Provides a convenient wrapper around the http://spreedly.com API.
Instead of mucking around with http you can just Spreedly.configure
and Spreedly::Subscriber.find. Much of the functionality is hung off
of the Spreedly::Subscriber class, and there's also a
Spreedly::SubscriptionPlan class.
One of the goals of this wrapper is to keep your tests fast while
also keeping your app working. It does this by providing a drop-in
replacement for the real Spreedly functionality that skips the
network and gives you a simple (some might call it stupid)
implementation that will work for 90% of your tests. At least we
hope so.
Help us make the mock work better by telling us when it doesn't work
so we can improve it. Thanks!
==Example mock usage:
if ENV["SPREEDLY"] == "REAL"
require 'spreedly'
else
require 'spreedly/mock'
end
=end
module Spreedly
REAL = "real" # :nodoc:
include HTTParty
headers 'Accept' => 'text/xml'
headers 'Content-Type' => 'text/xml'
format :xml
# Call this before you start using the API to set things up.
def self.configure(site_name, token)
base_uri "https://spreedly.com/api/v4/#{site_name}"
basic_auth token, 'X'
@site_name = site_name
end
def self.site_name # :nodoc:
@site_name
end
def self.to_xml_params(hash) # :nodoc:
hash.collect do |key, value|
tag = key.to_s.tr('_', '-')
result = "<#{tag}>"
if value.is_a?(Hash)
result << to_xml_params(value)
else
result << value.to_s
end
result << "</#{tag}>"
result
end.join('')
end
class Resource # :nodoc: all
def initialize(data)
@data = data
end
def id
@data["id"]
end
def method_missing(method, *args, &block)
if method.to_s =~ /\?$/
send(method.to_s[0..-2])
elsif @data.include?(method.to_s)
@data[method.to_s]
else
super
end
end
end
class Subscriber < Resource
# This will DELETE all the subscribers from the site.
#
# Only works for test sites (enforced on the Spreedly side).
def self.wipe!
Spreedly.delete('/subscribers.xml')
end
# This will DELETE individual subscribers from the site. Pass in the customer_id.
#
# Only works for test sites (enforced on the Spreedly side).
def self.delete!(id)
Spreedly.delete("/subscribers/#{id}.xml")
end
# Creates a new subscriber on Spreedly. The subscriber will NOT
# be active - they have to pay or you have to comp them for that
# to happen.
#
# Usage:
# Spreedly.Subscriber.create!(id, email)
# Spreedly.Subscriber.create!(id, email, screen_name)
# Spreedly.Subscriber.create!(id, :email => email, :screen_name => screen_name)
# Spreedly.Subscriber.create!(id, email, screen_name, :billing_first_name => first_name)
def self.create!(id, *args)
optional_attrs = args.last.is_a?(::Hash) ? args.pop : {}
email, screen_name = args
subscriber = {:customer_id => id, :email => email, :screen_name => screen_name}.merge(optional_attrs)
result = Spreedly.post('/subscribers.xml', :body => Spreedly.to_xml_params(:subscriber => subscriber))
case result.code.to_s
when /2../
new(result['subscriber'])
when '403'
raise "Could not create subscriber: already exists."
when '422'
errors = [*result['errors']].collect{|e| e.last}
raise "Could not create subscriber: #{errors.join(', ')}"
else
raise "Could not create subscriber: result code #{result.code}."
end
end
# Looks a subscriber up by id.
def self.find(id)
xml = Spreedly.get("/subscribers/#{id}.xml")
(xml.nil? || xml.empty? ? nil : new(xml['subscriber']))
end
# Returns all the subscribers in your site.
def self.all
Spreedly.get('/subscribers.xml')['subscribers'].collect{|data| new(data)}
end
# Spreedly calls your id for the user the "customer id". This
# gives you a handy alias so you can just call it "id".
def id
customer_id
end
# Allows you to give a complimentary subscription (if the
# subscriber is inactive) or a complimentary time extension (if
# the subscriber is active). Automatically figures out which
# to do.
#
# Note: units must be one of "days" or "months" (Spreedly
# enforced).
def comp(quantity, units, feature_level=nil)
params = {:duration_quantity => quantity, :duration_units => units}
params[:feature_level] = feature_level if feature_level
raise "Feature level is required to comp an inactive subscriber" if !active? and !feature_level
endpoint = (active? ? "complimentary_time_extensions" : "complimentary_subscriptions")
result = Spreedly.post("/subscribers/#{id}/#{endpoint}.xml", :body => Spreedly.to_xml_params(endpoint[0..-2] => params))
case result.code.to_s
when /2../
when '404'
raise "Could not comp subscriber: no longer exists."
when '422'
raise "Could not comp subscriber: validation failed."
when '403'
raise "Could not comp subscriber: invalid comp type (#{endpoint})."
else
raise "Could not comp subscriber: result code #{result.code}."
end
end
# Activates a free trial on the subscriber.
# Requires plan_id of the free trial plan
def activate_free_trial(plan_id)
result = Spreedly.post("/subscribers/#{id}/subscribe_to_free_trial.xml", :body =>
Spreedly.to_xml_params(:subscription_plan => {:id => plan_id}))
case result.code.to_s
when /2../
when '404'
raise "Could not active free trial for subscriber: subscriber or subscription plan no longer exists."
when '422'
raise "Could not activate free trial for subscriber: validation failed. missing subscription plan id"
when '403'
raise "Could not activate free trial for subscriber: subscription plan either 1) isn't a free trial, 2) the subscriber is not eligible for a free trial, or 3) the subscription plan is not enabled."
else
raise "Could not activate free trial for subscriber: result code #{result.code}."
end
end
# Stop the auto renew of the subscriber such that their recurring subscription will no longer be renewed.
# usage: @subscriber.stop_auto_renew
def stop_auto_renew
result = Spreedly.post("/subscribers/#{id}/stop_auto_renew.xml")
case result.code.to_s
when /2../
when '404'
raise "Could not stop auto renew for subscriber: subscriber does not exist."
else
raise "Could not stop auto renew for subscriber: result code #{result.code}."
end
end
def attributes
@data
end
end
class SubscriptionPlan < Resource
# Returns all of the subscription plans defined in your site.
def self.all
Spreedly.get('/subscription_plans.xml')['subscription_plans'].collect{|data| new(data)}
end
# Returns the subscription plan with the given id.
def self.find(id)
all.detect{|e| e.id.to_s == id.to_s}
end
# Convenience method for determining if this plan is a free trial plan or not.
def trial?
(plan_type == 'free_trial')
end
end
end
| 33.186667 | 205 | 0.655417 |
089350ae355cf594ccf29d73adc4226f4bcee956 | 513 | module Bookafy
class Company < BaseService
def initialize
super
end
def uri
'company'
end
def info()
response = get(uri)
unless response.code == 200
return nil
end
response_json = JSON.parse(response.body)['response']
company_json = response_json['company']
company = Bookafy::Model::Company.new(company_json)
company
rescue => e
puts "Error at fetching customers: #{e.message}"
return nil
end
end
end | 17.689655 | 59 | 0.604288 |
26d10d64af4f8a3bc629ff5c987e7f293ace7c9a | 1,178 | class Gsoap < Formula
desc "SOAP stub and skeleton compiler for C and C++"
homepage "https://www.genivia.com/products.html"
url "https://downloads.sourceforge.net/project/gsoap2/gsoap-2.8/gsoap_2.8.109.zip"
sha256 "27de421db6f8abfc443f2e1d5ccdcbd7e6373095b2c72df2b17f50a9c5f80d14"
license any_of: ["GPL-2.0-or-later", "gSOAP-1.3b"]
livecheck do
url :stable
regex(%r{url=.*?/gsoap[._-]v?(\d+(?:\.\d+)+)\.zip}i)
end
bottle do
sha256 "93fe2a7aa626ca66e67bba197f1fffa04fd43715b8ac44234c21d753db0a3f2c" => :big_sur
sha256 "c2bf0fe4477d58fbbd507b01892e8e52df4b79a905ed1c4c7a3fbbe3b4f4e13f" => :catalina
sha256 "3947a0297c9b3ec635ce7951bcd622b1c2ae8f2c03a7149be2c001569c304961" => :mojave
end
depends_on "autoconf" => :build
depends_on "[email protected]"
uses_from_macos "bison"
uses_from_macos "flex"
uses_from_macos "zlib"
def install
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system "#{bin}/wsdl2h", "-o", "calc.h", "https://www.genivia.com/calc.wsdl"
system "#{bin}/soapcpp2", "calc.h"
assert_predicate testpath/"calc.add.req.xml", :exist?
end
end
| 31 | 90 | 0.712224 |
621285f496c192b3ba14d3cba919684419f9f6c0 | 249 |
puts "Enter a color: "
color = gets.chomp()
puts "Enter a plural noun: "
plural_noun = gets.chomp()
puts "Enter a celebrity: "
celebrity = gets.chomp()
s
puts ("Roses are " + color)
puts ( plural_noun + " are blue")
puts ("I admire " + celebrity)
| 19.153846 | 33 | 0.666667 |
7a9bbbc19d04b7232967c15e89048cdb75b671a8 | 2,333 | # frozen_string_literal: true
module Cards
class WorkCardComponent < ApplicationV6Component
def initialize(view_context, work:, width:, show_button: true, show_note: false, user: nil, caption: "", note: "")
super view_context
@work = work
@width = width
@show_button = show_button
@show_note = show_note
@user = user
@caption = caption
@note = note
end
def render
build_html do |h|
h.tag :div, class: "align-items-end border-0 c-work-card card flex-column h-100" do
h.tag :div, class: "text-center w-100" do
h.tag :a, href: view_context.work_path(@work) do
h.html Pictures::WorkPictureComponent.new(
view_context,
work: @work,
width: @width,
alt: @work.local_title
).render
end
end
h.tag :div, class: "mt-2 w-100" do
h.tag :a, href: view_context.work_path(@work), class: "text-body" do
h.tag :div, class: "c-work-card__work-title fw-bold h5 mb-0", title: @work.local_title do
h.text @work.local_title
end
if @caption.present?
h.tag :div, class: "small text-muted text-truncate", title: @caption do
h.text @caption
end
end
end
end
h.tag :div, class: "mt-auto text-center w-100" do
if @note.present?
h.tag :div, class: "mb-2" do
h.tag :a, {
class: "px-2 small u-fake-link",
data_bs_toggle: "popover",
data_bs_trigger: "focus",
data_bs_placement: "top",
data_bs_content: @note,
tabindex: "0"
} do
h.tag :i, class: "far fa-comment-dots me-1"
h.text t("noun.note_alt")
end
end
end
if @show_button && (!current_user || !@user || current_user.id == @user.id)
h.tag :div, class: "mt-2" do
h.html ButtonGroups::WorkButtonGroupComponent.new(view_context, work: @work).render
end
end
end
end
end
end
end
end
| 32.402778 | 118 | 0.497214 |
2686738f57d40c4efe771b5b810062303134b7f9 | 1,143 | module Bretels
module Actions
def concat_file(source, destination)
contents = IO.read(find_in_source_paths(source))
append_file destination, contents
end
def replace_in_file(relative_path, find, replace)
path = File.join(destination_root, relative_path)
contents = IO.read(path)
unless contents.gsub!(find, replace)
raise "#{find.inspect} not found in #{relative_path}"
end
File.open(path, "w") { |file| file.write(contents) }
end
def action_mailer_host(rails_env, host)
inject_into_file(
"config/environments/#{rails_env}.rb",
"\n\n config.action_mailer.default_url_options = { :host => '#{host}' }",
:before => "\nend"
)
end
def download_file(uri_string, destination)
uri = URI.parse(uri_string)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true if uri_string =~ /^https/
request = Net::HTTP::Get.new(uri.path)
contents = http.request(request).body
path = File.join(destination_root, destination)
File.open(path, "w") { |file| file.write(contents) }
end
end
end
| 31.75 | 82 | 0.64392 |
bb8016506e9060d6f07cdb7c781e41299101214a | 87 | class CommentSerializer < ActiveModel::Serializer
attributes :id, :author, :text
end
| 21.75 | 49 | 0.781609 |
d534ed1be2c14bf3a541e81a2c1d20bf62465204 | 3,690 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataFactory::Mgmt::V2017_09_01_preview
module Models
#
# Google BigQuery service dataset.
#
class GoogleBigQueryObjectDataset < Dataset
include MsRestAzure
def initialize
@type = "GoogleBigQueryObject"
end
attr_accessor :type
#
# Mapper for GoogleBigQueryObjectDataset class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'GoogleBigQueryObject',
type: {
name: 'Composite',
class_name: 'GoogleBigQueryObjectDataset',
model_properties: {
additional_properties: {
client_side_validation: true,
required: false,
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
description: {
client_side_validation: true,
required: false,
serialized_name: 'description',
type: {
name: 'String'
}
},
structure: {
client_side_validation: true,
required: false,
serialized_name: 'structure',
type: {
name: 'Object'
}
},
linked_service_name: {
client_side_validation: true,
required: true,
serialized_name: 'linkedServiceName',
default_value: {},
type: {
name: 'Composite',
class_name: 'LinkedServiceReference'
}
},
parameters: {
client_side_validation: true,
required: false,
serialized_name: 'parameters',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ParameterSpecificationElementType',
type: {
name: 'Composite',
class_name: 'ParameterSpecification'
}
}
}
},
annotations: {
client_side_validation: true,
required: false,
serialized_name: 'annotations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
type: {
client_side_validation: true,
required: true,
serialized_name: 'type',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 29.758065 | 75 | 0.423848 |
2171e2cd9f7a1a8da431820430c0c4338058d23a | 1,071 | require 'rails_helper'
FactoryBot.define do
factory :user do
first_name { "John" }
last_name { "Doe" }
username { "John_Doe"}
is_admin { false }
password {"VeryStrongPassword123"}
email {"[email protected]"}
end
end
RSpec.describe User, type: :model do
id = 1,
username = "Attribute_testing"
first_name = "Attribute"
last_name = "Tests"
created_at = DateTime.now
updated_at = DateTime.now
password = "VeryStrongPassword123"
# pending "add some examples to (or delete) #{__FILE__}"
# E-mail whitelisting
it "Outlook email should be whitelisted" do
user = create(:user)
expect(user).to be_valid
end
it "Gmail email should be whitelisted" do
user = User.new( id: 3, username: "#{username}2", first_name: first_name, password: password, last_name: last_name, email: "[email protected]" )
expect(user).to be_valid
end
# Validation
it "Should validate uniqueness of username" do
user = create(:user)
# @user = Factory(:user)
should validate_uniqueness_of(:username)
end
end
| 24.906977 | 150 | 0.686275 |
28604f7005fb8895e7e2ffee11262937f2f7eae6 | 964 | module Dom
module Admin
class UserForm < Domino
selector 'form.user'
def submit_with(options)
within(node) do
fill_in('user_email', :with => options[:email]) if options[:email]
fill_in('user_first_name', :with => options[:first_name]) if options[:first_name]
fill_in('user_last_name', :with => options[:last_name]) if options[:last_name]
select(Pageflow::Account.find(options[:account_id]).name, :from => 'user_account_id') if options[:account_id]
if options[:admin]
select(I18n.t('pageflow.admin.users.roles.admin'), :from => 'user_role')
elsif options[:account_manager]
select(I18n.t('pageflow.admin.users.roles.account_manager'), :from => 'user_role')
else
select(I18n.t('pageflow.admin.users.roles.editor'), :from => 'user_role')
end
find('[name="commit"]').click
end
end
end
end
end
| 34.428571 | 119 | 0.610996 |
39491a4f5e37ff1d969e17714f5669356a0c549f | 5,714 | require 'watir'
class CompaniesController < ApplicationController
before_action :set_company, only: [:show, :edit, :update, :destroy]
# GET /companies
# GET /companies.json
def index
@companies = Company.paginate(:page => params[:page], :per_page => 5).search(params[:search])
#where(["Description LIKE ?","%#{params[:search]}%"])
end
# GET /companies/1
# GET /companies/1.json
def show
end
# GET /companies/new
def new
@company = Company.new
end
# GET /companies/1/edit
def edit
end
# POST /companies
# POST /companies.json
def create
@company = Company.new(company_params)
respond_to do |format|
if @company.save
format.html { redirect_to @company, notice: 'Company was successfully created.' }
format.json { render :show, status: :created, location: @company }
else
format.html { render :new }
format.json { render json: @company.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /companies/1
# PATCH/PUT /companies/1.json
def update
respond_to do |format|
if @company.update(company_params)
format.html { redirect_to @company, notice: 'Company was successfully updated.' }
format.json { render :show, status: :ok, location: @company }
else
format.html { render :edit }
format.json { render json: @company.errors, status: :unprocessable_entity }
end
end
end
# DELETE /companies/1
# DELETE /companies/1.json
def destroy
@company.destroy
respond_to do |format|
format.html { redirect_to companies_url, notice: 'Company was successfully destroyed.' }
format.json { head :no_content }
end
end
def show
# added some condition for where clause
@companies = Company.paginate(:page => params[:page], :per_page => 5)
#puts @company.inspect # selected item
query = @company.Description.dup # get descrip. of selected row
stopwords = ['MAKITA', 'ONLY', 'EACH', 'LITRE', "-", "CLEAN", "CUT", "PTC20", "BODY", "BASIC"] # terms to remove e.g. brand names - JIGSAW BLADE B17 WOOD
stopwords.each do |item|
query.slice! item
end
# remove terms that begin with numbers
new_query = ''
query.gsub(/\s+/m, ' ').strip.split(" ").each do |item|
first_word = item.initial
last_word = item.final
unless first_word.is_number? || last_word.is_number?
new_query+= ' ' + item
end
end
query = new_query.strip!
@generated_query = query
# screwfix
args = ['--no-sandbox', '--disable-dev-shm-usage']
browser = Watir::Browser.new :chrome, headless: true, options: {args: args}
browser.goto("https://www.screwfix.com/")
browser.input(id: "mainSearch-input").send_keys(query) # enter search term e.g. "Water pump"
browser.send_keys :enter # hit enter
@screwfix_results = []
@current_price = @company.GPH_RRP
@maxprice = @current_price
@minprice = @current_price
begin
# get first result
first = Hash.new
first["title"] = browser.div(id: "product_box_1").a(id: "product_description_1").attribute_value('title')
first["price"] = browser.div(id: "product_box_1").h4(id: "product_list_price_1").text_content
first["price"] = first["price"].tr('£', '')
first["price"] = first["price"].to_f
first["image_url"] = browser.div(id: "product_box_1").img(id: "product_image").attribute_value('src')
first["link"] = browser.div(id: "product_box_1").a(id: "product_description_1").attribute_value('href')
if first["price"] > @maxprice
@maxprice = first["price"]
end
if first["price"] < @minprice
@minprice = first["price"]
end
# get second result
second = Hash.new
second["title"] = browser.div(id: "product_box_2").a(id: "product_description_2").attribute_value('title')
second["price"] = browser.div(id: "product_box_2").h4(id: "product_list_price_2").text_content
second["price"] = second["price"].tr('£', '')
second["price"] = second["price"].to_f
second["image_url"] = browser.div(id: "product_box_2").img(id: "product_image").attribute_value('src')
second["link"] = browser.div(id: "product_box_2").a(id: "product_description_2").attribute_value('href')
if second["price"] > @maxprice
@maxprice = second["price"]
end
if second["price"] < @minprice
@minprice = second["price"]
end
@screwfix_results.push(first)
@screwfix_results.push(second)
# puts @screwfix_results.inspect
# puts browser.links
rescue StandardError => e
puts e.message
puts e.backtrace.inspect
end
browser.close if browser # close browser
end
def import
res = Company.import(params[:file])
if res
redirect_to companies_path, notice: "File Uploaded Successfully"
else
redirect_to companies_path, notice: "File was not Uploaded"
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_company
@company = Company.find(params[:id])
end
# Only allow a list of trusted parameters through.
def company_params
params.require(:company).permit(:GPHProductCode, :SupplierProductCode, :BarCode, :Description, :GPH_RRP)
end
end
class Object
def is_number?
to_f.to_s == to_s || to_i.to_s == to_s
end
end
class String
def initial
self[0,1]
end
def final
self[-1]
end
end
| 31.744444 | 162 | 0.623906 |
08e92804a0266b1d3a205b3efcb2c5b215a3fef5 | 9,148 | # coding: utf-8
module Natto
# Module `Binding` encapsulates methods and behavior
# which are made available via `FFI` bindings to MeCab.
module Binding
require 'ffi'
require 'rbconfig'
extend FFI::Library
MECAB_PATH = 'MECAB_PATH'.freeze
# @private
def self.included(base)
base.extend(ClassMethods)
end
# Returns the absolute pathname to the MeCab library based on
# the runtime environment.
# @return [String] absolute pathname to the MeCab library
# @raise [LoadError] if the library cannot be located
def self.find_library
if ENV[MECAB_PATH]
File.absolute_path(ENV[MECAB_PATH])
else
host_os = RbConfig::CONFIG['host_os']
if host_os =~ /mswin|mingw/i
require 'win32/registry'
begin
base = nil
Win32::Registry::HKEY_CURRENT_USER.open('Software\MeCab') do |r|
base = r['mecabrc'].split('etc').first
end
lib = File.join(base, 'bin/libmecab.dll')
File.absolute_path(lib)
rescue
raise LoadError, "Please set #{MECAB_PATH} to the full path to libmecab.dll"
end
else
require 'open3'
if host_os =~ /darwin/i
ext = 'dylib'
else
ext = 'so'
end
begin
base, lib = nil, nil
cmd = 'mecab-config --libs'
Open3.popen3(cmd) do |stdin,stdout,stderr|
toks = stdout.read.split
base = toks[0][2..-1]
lib = toks[1][2..-1]
end
File.absolute_path(File.join(base, "lib#{lib}.#{ext}"))
rescue
raise LoadError, "Please set #{MECAB_PATH} to the full path to libmecab.#{ext}"
end
end
end
end
ffi_lib find_library
# Model interface
attach_function :mecab_model_new2, [:string], :pointer
attach_function :mecab_model_destroy, [:pointer], :void
attach_function :mecab_model_new_tagger, [:pointer], :pointer
attach_function :mecab_model_new_lattice, [:pointer], :pointer
attach_function :mecab_model_dictionary_info, [:pointer], :pointer
# Tagger interface
attach_function :mecab_destroy, [:pointer], :void
attach_function :mecab_version, [], :string
attach_function :mecab_strerror, [:pointer],:string
attach_function :mecab_format_node, [:pointer, :pointer], :string
# Lattice interface
attach_function :mecab_lattice_destroy, [:pointer], :void
attach_function :mecab_lattice_clear, [:pointer], :void
attach_function :mecab_lattice_is_available, [:pointer], :int
attach_function :mecab_lattice_strerror, [:pointer], :string
attach_function :mecab_lattice_get_sentence, [:pointer], :string
attach_function :mecab_lattice_set_sentence, [:pointer, :string], :void
attach_function :mecab_lattice_get_size, [:pointer], :int
attach_function :mecab_lattice_set_theta, [:pointer, :float], :void
attach_function :mecab_lattice_set_z, [:pointer, :float], :void
attach_function :mecab_lattice_get_request_type, [:pointer], :int
attach_function :mecab_lattice_add_request_type, [:pointer, :int], :void
attach_function :mecab_lattice_set_request_type, [:pointer, :int], :void
attach_function :mecab_lattice_get_boundary_constraint, [:pointer, :int], :int
attach_function :mecab_lattice_set_boundary_constraint, [:pointer, :int, :int], :void
attach_function :mecab_lattice_get_feature_constraint, [:pointer, :int], :string
attach_function :mecab_lattice_set_feature_constraint, [:pointer, :int, :int, :string], :void
attach_function :mecab_parse_lattice, [:pointer, :pointer], :int
attach_function :mecab_lattice_next, [:pointer], :int
attach_function :mecab_lattice_tostr, [:pointer], :string
attach_function :mecab_lattice_nbest_tostr, [:pointer, :int], :string
attach_function :mecab_lattice_get_bos_node, [:pointer], :pointer
# @private
module ClassMethods
def find_library
Natto::Binding.find_library
end
# Model interface ------------------------
def mecab_model_new2(opts_str)
Natto::Binding.mecab_model_new2(opts_str)
end
def mecab_model_destroy(mptr)
Natto::Binding.mecab_model_destroy(mptr)
end
def mecab_model_new_tagger(mptr)
Natto::Binding.mecab_model_new_tagger(mptr)
end
def mecab_model_new_lattice(mptr)
Natto::Binding.mecab_model_new_lattice(mptr)
end
def mecab_model_dictionary_info(mptr)
Natto::Binding.mecab_model_dictionary_info(mptr)
end
# Tagger interface -----------------------
def mecab_destroy(tptr)
Natto::Binding.mecab_destroy(tptr)
end
def mecab_version
Natto::Binding.mecab_version
end
def mecab_strerror(tptr)
Natto::Binding.mecab_strerror(tptr)
end
def mecab_format_node(tptr, nptr)
Natto::Binding.mecab_format_node(tptr, nptr)
end
# Lattice interface ----------------------
def mecab_lattice_destroy(lptr)
Natto::Binding.mecab_lattice_destroy(lptr)
end
def mecab_lattice_clear(lptr)
Natto::Binding.mecab_lattice_clear(lptr)
end
def mecab_lattice_is_available(lptr)
Natto::Binding.mecab_lattice_is_available(lptr)
end
def mecab_lattice_strerror(lptr)
Natto::Binding.mecab_lattice_strerror(lptr)
end
def mecab_lattice_get_sentence(lptr)
Natto::Binding.mecab_lattice_get_sentence(lptr)
end
def mecab_lattice_set_sentence(lptr, str)
Natto::Binding.mecab_lattice_set_sentence(lptr, str)
end
def mecab_lattice_get_size(lptr)
Natto::Binding.mecab_lattice_get_size(lptr)
end
def mecab_lattice_set_theta(lptr, t)
Natto::Binding.mecab_lattice_set_theta(lptr, t)
end
def mecab_lattice_set_z(lptr, z)
Natto::Binding.mecab_lattice_set_z(lptr, z)
end
def mecab_lattice_get_request_type(lptr)
Natto::Binding.mecab_lattice_get_request_type(lptr)
end
def mecab_lattice_add_request_type(lptr, rt)
Natto::Binding.mecab_lattice_add_request_type(lptr, rt)
end
def mecab_lattice_set_request_type(lptr, rtype)
Natto::Binding.mecab_lattice_set_request_type(lptr, rtype)
end
def mecab_lattice_get_boundary_constraint(lptr, pos)
Natto::Binding.mecab_lattice_get_boundary_constraint(lptr, pos)
end
def mecab_lattice_set_boundary_constraint(lptr, pos, btype)
Natto::Binding.mecab_lattice_set_boundary_constraint(lptr, pos, btype)
end
def mecab_lattice_get_feature_constraint(lptr, bpos)
Natto::Binding.mecab_lattice_get_feature_constraint(lptr, bpos)
end
def mecab_lattice_set_feature_constraint(lptr, bpos, epos, feat)
Natto::Binding.mecab_lattice_set_feature_constraint(lptr,
bpos,
epos,
feat)
end
def mecab_parse_lattice(tptr, lptr)
Natto::Binding.mecab_parse_lattice(tptr, lptr)
end
def mecab_lattice_next(lptr)
Natto::Binding.mecab_lattice_next(lptr)
end
def mecab_lattice_tostr(lptr)
Natto::Binding.mecab_lattice_tostr(lptr)
end
def mecab_lattice_nbest_tostr(lptr, n)
Natto::Binding.mecab_lattice_nbest_tostr(lptr, n)
end
def mecab_lattice_get_bos_node(lptr)
Natto::Binding.mecab_lattice_get_bos_node(lptr)
end
end
end
end
# Copyright (c) 2020, Brooke M. Fujita.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| 34.520755 | 97 | 0.669327 |
f876b942b1ff9dd2c8b2df097cf097696b4519d0 | 5,160 | # require_relative '../puppet_x/Automation/Lib/Constants.rb'
# require_relative '../puppet_x/Automation/Lib/Log.rb'
# require_relative '../puppet_x/Automation/Lib/Remote/c_rsh.rb'
#
# ##############################################################################
# name : 'vios' factor
# param : none
# return : hash of vios.
# Two files are generated: "output/facter/vios_skipped.yml" and
# "output/facter/vios_kept.yml" as result log.
# description : this factor builds a fact called 'vios' containing a
# hash with vios names known by the NIM server as value.
# ##############################################################################
# include Automation::Lib
# include Automation::Lib::Remote
#
# Facter.add('vios') do
# setcode do
#
# Log.log_info('Computing "vios" facter')
#
# ##########################################################
# #### Sample of use of log API
# # Log.log_info("Sample info message")
# # Log.log_debug("Sample debug message")
# # Log.log_warning("Sample warning message")
# # Log.log_err("Sample error message")
# ####
# ##########################################################
#
# vioss = {}
#
# vios_hash = {}
# vioss_str = String.new
# vioss_str = Facter::Core::Execution.execute("/usr/sbin/lsnim -t vios | /bin/awk \
# 'NR==FNR{print $1;next}{print $1}' | /bin/awk 'FNR!=1{print l}{l=$0};END{ORS=\"\";print l}' ORS=' '")
# vioss_array = Array.new
# vioss_array = vioss_str.split(' ')
# vioss_array.each do |vios|
# oslevel = ""
# remote_cmd_rc = Remote.c_rsh(vios, "/usr/bin/oslevel -s", oslevel)
# if remote_cmd_rc == 0
# vios_hash['oslevel'] = oslevel.strip
# end
#
# #### ping
# cmd = "/usr/sbin/ping -c1 -w5 #{vios}"
# stdout, stderr, status = Open3.capture3("#{cmd}")
# Log.log_debug("cmd =#{cmd}")
# Log.log_debug("status=#{status}")
# if status.success?
# Log.log_debug("stdout=#{stdout}")
#
# ##### oslevel
# oslevel = ""
# remote_cmd_rc =Remote.c_rsh(vios, "/usr/bin/oslevel -s", oslevel)
# if remote_cmd_rc == 0
# vios_hash['oslevel'] = oslevel.strip
#
# full_facter = true
# if full_facter
#
# ##### /etc/niminfo
# niminfo_str = ""
# remote_cmd_rc = Remote.c_rsh(vios, "/bin/cat /etc/niminfo |\
# /bin/grep '=' | /bin/sed 's/export //g'", niminfo_str)
# if remote_cmd_rc == 0
# niminfo_lines = niminfo_str.split("\n")
# niminfo_lines.each do |envvar|
# key, val = envvar.split('=')
# vios_hash[key] = val
# end
#
# ##### Cstate from lsnim -l
# lsnim_str = Facter::Core::Execution.execute("/usr/sbin/lsnim -l " + vios)
# lsnim_lines = lsnim_str.split("\n")
# lsnim_lines.each do |lsnim_line|
# if lsnim_line =~ /^\s+Cstate\s+=\s+(.*)$/
# # Cstate
# cstate = Regexp.last_match(1)
# vios_hash['cstate'] = cstate
#
# elsif lsnim_line =~ /^\s+mgmt_profile1\s+=\s+(.*)$/
# # For VIOS store the management profile
# match_mgmtprof = Regexp.last_match(1)
# mgmt_elts = match_mgmtprof.split
# if mgmt_elts.size == 3
# vios_hash['mgmt_hmc_id'] = mgmt_elts[0]
# vios_hash['mgmt_vios_id'] = mgmt_elts[1]
# vios_hash['mgmt_cec_serial'] = mgmt_elts[2]
# end
# elsif lsnim_line =~ /^\s+if1\s+=\s+\S+\s+(\S+)\s+.*$/
# # IP
# vios_hash['vios_ip'] = Regexp.last_match(1)
# end
# end
#
# # yeah, we keep it !
# vioss[vios] = vios_hash
# else
# Log.log_err("error while doing c_rsh '/bin/cat /etc/niminfo' on " + vios)
# standalone_error = true
# end
# end
# else
# Log.log_err("error while doing c_rsh '/usr/bin/oslevel -s' on " + vios)
# Log.log_err("stderr=#{stderr}")
# standalone_error = true
# end
#
# else
# Log.log_err("error while doing '/usr/sbin/ping -c1 -w5 ' " + vios)
# Log.log_err("stderr=#{stderr}")
# standalone_error = true
# end
#
# end
# # Failure
# Log.log_err('vios in failure="' +vios_failure.to_s+ '"')
# # persist to yaml
# failure_result_yml_file = ::File.join(Constants.output_dir,
# 'facter',
# 'vios_in_failure.yml')
# File.write(failure_result_yml_file, vios_failure.to_yaml)
# Log.log_info('Refer to "' +failure_result_yml_file+ '" to have results of "vios in failure" facter.')
#
# # Success
# # persist to yaml
# result_yml_file = ::File.join(Constants.output_dir,
# 'facter',
# 'vios.yml')
# File.write(result_yml_file, vios.to_yaml)
# Log.log_info('Refer to "' +result_yml_file+ '" to have results of "vios" facter.')
# standalones
#
# Log.log_info("vioss=" + vioss.to_s)
# vioss
# end
# end
#
| 36.595745 | 103 | 0.516473 |
6a2e5ef9a84c1535d9bfc7fe7525c40753dba8ce | 5,119 | require 'spec_helper'
require 'metasploit/framework/login_scanner/manageengine_desktop_central'
RSpec.describe Metasploit::Framework::LoginScanner::ManageEngineDesktopCentral do
it_behaves_like 'Metasploit::Framework::LoginScanner::Base', has_realm_key: true, has_default_realm: false
it_behaves_like 'Metasploit::Framework::LoginScanner::RexSocket'
let(:session_id) do
'DCJSESSIONID=5628CFEA339C2688D74267B03CDA88BD; '
end
let(:username) do
'username'
end
let(:good_password) do
'good_password'
end
let(:bad_password) do
'bad_password'
end
let(:successful_auth_response) do
Rex::Proto::Http::Response.new(302, 'Moved Temporarily')
end
let(:fail_auth_response) do
Rex::Proto::Http::Response.new(200, 'OK')
end
subject do
described_class.new
end
let(:response) do
Rex::Proto::Http::Response.new(200, 'OK')
end
before(:example) do
allow_any_instance_of(Rex::Proto::Http::Client).to receive(:request_cgi).with(any_args)
allow_any_instance_of(Rex::Proto::Http::Client).to receive(:send_recv).with(any_args).and_return(response)
allow_any_instance_of(Rex::Proto::Http::Client).to receive(:set_config).with(any_args)
allow_any_instance_of(Rex::Proto::Http::Client).to receive(:close)
allow_any_instance_of(Rex::Proto::Http::Client).to receive(:connect)
end
describe '#check_setup' do
context 'when target is ManageEngine Desktop Central' do
let(:response) do
res = Rex::Proto::Http::Response.new(200, 'OK')
res.body = 'ManageEngine Desktop Central'
res
end
it 'returns true' do
expect(subject.check_setup).to be_truthy
end
end
context 'when target is not ManageEngine Desktop Central' do
it 'returns false' do
expect(subject.check_setup).to be_falsey
end
end
end
describe '#get_sid' do
context 'when there is no session ID' do
let(:response) do
res = Rex::Proto::Http::Response.new(200, 'OK')
res.headers['Set-Cookie'] = session_id
res
end
it 'returns a new session ID' do
expect(subject.get_sid(response)).to include('DCJSESSIONID')
end
end
end
describe '#get_hidden_inputs' do
let(:response) do
html = %Q|
<input type="hidden" name="buildNum" id="buildNum" value="90109"/>
<input type="hidden" name="clearCacheBuildNum" id="clearCacheBuildNum" value="-1"/>
|
res = Rex::Proto::Http::Response.new(200, 'OK')
res.body = html
res
end
context 'when there are hidden login inputs' do
it 'returns a Hash' do
expect(subject.get_hidden_inputs(response)).to be_kind_of(Hash)
end
it 'returns the value for buildNum' do
expect(subject.get_hidden_inputs(response)['buildNum']).to eq('90109')
end
it 'returns the value for clearCacheBuildNum' do
expect(subject.get_hidden_inputs(response)['clearCacheBuildNum']).to eq('-1')
end
end
end
describe '#get_login_state' do
context 'when the credential is valid' do
let(:response) { successful_auth_response }
it 'returns a hash indicating a successful login' do
expect(subject.get_login_state(username, good_password)[:status]).to eq(Metasploit::Model::Login::Status::SUCCESSFUL)
end
end
context 'when the creential is invalid' do
let(:response) { fail_auth_response }
it 'returns a hash indicating an incorrect cred' do
expect(subject.get_login_state(username, good_password)[:status]).to eq(Metasploit::Model::Login::Status::INCORRECT)
end
end
end
describe '#attempt_login' do
context 'when the credential is valid' do
let(:response) { successful_auth_response }
let(:cred_obj) { Metasploit::Framework::Credential.new(public: username, private: good_password) }
it 'returns a Result object indicating a successful login' do
result = subject.attempt_login(cred_obj)
expect(result).to be_kind_of(::Metasploit::Framework::LoginScanner::Result)
end
it 'returns successful login' do
result = subject.attempt_login(cred_obj)
expect(result.status).to eq(Metasploit::Model::Login::Status::SUCCESSFUL)
end
end
context 'when the credential is invalid' do
let(:response) { fail_auth_response }
let(:cred_obj) { Metasploit::Framework::Credential.new(public: username, private: bad_password) }
it 'returns a Result object' do
result = subject.attempt_login(cred_obj)
expect(result).to be_kind_of(::Metasploit::Framework::LoginScanner::Result)
end
it 'returns incorrect credential status' do
result = subject.attempt_login(cred_obj)
expect(result.status).to eq(Metasploit::Model::Login::Status::INCORRECT)
end
end
end
end | 32.814103 | 127 | 0.650713 |
212a7a06ba5ca0380b716208e3408c746520e86c | 54 | module Salesforce
class CaseStatus < Base
end
end
| 10.8 | 25 | 0.759259 |
1aefde1f5cb44fd65ed0a598ea4c8d29fb78be10 | 4,770 | class Gitless < Formula
include Language::Python::Virtualenv
desc "Simplified version control system on top of git"
homepage "https://gitless.com/"
url "https://files.pythonhosted.org/packages/9c/2e/457ae38c636c5947d603c84fea1cf51b7fcd0c8a5e4a9f2899b5b71534a0/gitless-0.8.8.tar.gz"
sha256 "590d9636d2ca743fdd972d9bf1f55027c1d7bc2ab1d5e877868807c3359b78ef"
license "MIT"
revision 9
bottle do
sha256 cellar: :any, arm64_big_sur: "cb04d922aef08494a684909d52f60b091c74022fd98cb36e6aa7d20402359c06"
sha256 cellar: :any, big_sur: "c4e4a2afca3b1e30950f86927d914e27d58b0fc43b4bb9cfe7b4038d5740eb79"
sha256 cellar: :any, catalina: "771d687e584bfaae2520f7c00681d0bb2ded0e50618b79db0763b0a011ee23be"
sha256 cellar: :any, mojave: "fd05318ee73242821b23e304dc27a04408f987e0c3eb92653f041f079310c455"
sha256 cellar: :any_skip_relocation, x86_64_linux: "bff8c36580c134e57bbd51232a7c1f96881c951be3df9574690b8dde5ecf4e3c"
end
depends_on "libgit2"
depends_on "[email protected]"
uses_from_macos "libffi"
on_linux do
depends_on "pkg-config" => :build
end
resource "args" do
url "https://files.pythonhosted.org/packages/e5/1c/b701b3f4bd8d3667df8342f311b3efaeab86078a840fb826bd204118cc6b/args-0.1.0.tar.gz"
sha256 "a785b8d837625e9b61c39108532d95b85274acd679693b71ebb5156848fcf814"
end
resource "cached-property" do
url "https://files.pythonhosted.org/packages/57/8e/0698e10350a57d46b3bcfe8eff1d4181642fd1724073336079cb13c5cf7f/cached-property-1.5.1.tar.gz"
sha256 "9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/66/6a/98e023b3d11537a5521902ac6b50db470c826c682be6a8c661549cb7717a/cffi-1.14.4.tar.gz"
sha256 "1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c"
end
resource "clint" do
url "https://files.pythonhosted.org/packages/3d/b4/41ecb1516f1ba728f39ee7062b9dac1352d39823f513bb6f9e8aeb86e26d/clint-0.5.1.tar.gz"
sha256 "05224c32b1075563d0b16d0015faaf9da43aa214e4a2140e51f08789e7a4c5aa"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/8c/2d/aad7f16146f4197a11f8e91fb81df177adcc2073d36a17b1491fd09df6ed/pycparser-2.18.tar.gz"
sha256 "99a8ca03e29851d96616ad0404b4aad7d9ee16f25c9f9708a11faf2810f7b226"
end
resource "pygit2" do
url "https://files.pythonhosted.org/packages/6b/23/a8c5b726a58282fe2cadcc63faaddd4be147c3c8e0bd38b233114adf98fd/pygit2-1.6.1.tar.gz"
sha256 "c3303776f774d3e0115c1c4f6e1fc35470d15f113a7ae9401a0b90acfa1661ac"
# libgit2 1.3 support
# https://github.com/libgit2/pygit2/pull/1089
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/54d3a0d1f241fdd4e9229312ced0d8da85d964b1/pygit2/libgit2-1.3.0.patch"
sha256 "4d501c09d6642d50d89a1a4d691980e3a4a2ebcb6de7b45d22cce16a451b9839"
end
end
resource "sh" do
url "https://files.pythonhosted.org/packages/7c/71/199d27d3e7e78bf448bcecae0105a1d5b29173ffd2bbadaa95a74c156770/sh-1.12.14.tar.gz"
sha256 "b52bf5833ed01c7b5c5fb73a7f71b3d98d48e9b9b8764236237bdc7ecae850fc"
end
resource "six" do
url "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz"
sha256 "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"
end
# Allow to be dependent on pygit2 1.4.0
# Remove for next version
patch :DATA
def install
virtualenv_install_with_resources
end
test do
system "git", "config", "--global", "user.email", '"[email protected]"'
system "git", "config", "--global", "user.name", '"Test"'
system bin/"gl", "init"
%w[haunted house].each { |f| touch testpath/f }
system bin/"gl", "track", "haunted", "house"
system bin/"gl", "commit", "-m", "Initial Commit"
assert_equal "haunted\nhouse", shell_output("git ls-files").strip
end
end
__END__
diff --git a/requirements.txt b/requirements.txt
index 05f190a..5eb025f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,6 +1,6 @@
# make sure to update setup.py
-pygit2==0.28.2 # requires libgit2 0.28
+pygit2==1.4.0 # requires libgit2 0.28
clint==0.5.1
sh==1.12.14;sys_platform!='win32'
pbs==0.110;sys_platform=='win32'
diff --git a/setup.py b/setup.py
index 68a3a87..d1704a8 100755
--- a/setup.py
+++ b/setup.py
@@ -68,7 +68,7 @@ setup(
packages=['gitless', 'gitless.cli'],
install_requires=[
# make sure it matches requirements.txt
- 'pygit2==0.28.2', # requires libgit2 0.28
+ 'pygit2==1.4.0', # requires libgit2 0.28
'clint>=0.3.6',
'sh>=1.11' if sys.platform != 'win32' else 'pbs>=0.11'
],
| 39.75 | 145 | 0.752411 |
38b91a867c45210c9315102101078ee8b2703863 | 1,466 | class Podiff < Formula
desc "Compare textual information in two PO files"
homepage "https://puszcza.gnu.org.ua/software/podiff/"
url "https://download.gnu.org.ua/pub/release/podiff/podiff-1.2.tar.gz"
sha256 "6902c10ceb9bb131d40cb2a7023ebb61df0ee7cf8abf1833129df65120244bd0"
license "GPL-3.0"
livecheck do
url "https://download.gnu.org.ua/pub/release/podiff/"
regex(/href=.*?podiff[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
cellar :any_skip_relocation
sha256 "d1d8236310dae076af3d6324070a7c66888eb51ffc7b80355fe53911584e2355" => :big_sur
sha256 "cdda50f296e87f84f828d09777f90217c98ca4578a00b09307df9dcd830424c2" => :catalina
sha256 "20e29ef344ca1da47dff379a12290150de1540338d49d00043a2093f3a22a6fa" => :mojave
sha256 "71b8f6e4b7935a26b50e32805036593d4fd20e24d4de73023a423a6889e72752" => :high_sierra
sha256 "9ca510db38cc8be5f68f3f93ba431c8da1f8b3fa5465b7b28e154e597ec31978" => :x86_64_linux
end
def install
system "make"
bin.install "podiff"
man1.install "podiff.1"
end
def caveats
<<~EOS
To use with git, add this to your .git/config or global git config file:
[diff "podiff"]
command = #{HOMEBREW_PREFIX}/bin/podiff -D-u
Then add the following line to the .gitattributes file in
the directory with your PO files:
*.po diff=podiff
See `man podiff` for more information.
EOS
end
test do
system "#{bin}/podiff", "-v"
end
end
| 30.541667 | 94 | 0.722374 |
385c060a2084541296bbe7e42ac720cf4b369e07 | 370 | module TricksHelper
def add_trick_to_course(course, trick)
link_to "Add to #{course.name}", add_course_trick_path(course, trick)
end
def trick_link(trick: trick, course: course)
if course
link_to trick.name, course_trick_path(course, trick)
else
link_to trick.name, trick_path(trick)
end
end
end
| 24.666667 | 77 | 0.645946 |
1d3001aa73fb384501a79779d01a963e8747d9fd | 646 | # frozen_string_literal: true
module Cloudtasker
module UniqueJob
module ConflictStrategy
# This strategy raises an error on conflict, both on client and server side.
class Raise < BaseStrategy
RESCHEDULE_DELAY = 5 # seconds
# Raise a Cloudtasker::UniqueJob::LockError
def on_schedule
raise_lock_error
end
# Raise a Cloudtasker::UniqueJob::LockError
def on_execute
raise_lock_error
end
private
def raise_lock_error
raise(UniqueJob::LockError, id: job.id, unique_id: job.unique_id)
end
end
end
end
end
| 22.275862 | 82 | 0.637771 |
03b5ba58ed5893e2ecf2388069a4e5dcb9e87b67 | 596 | =begin
STRING PERMUTATIONS
CHALLENGE DESCRIPTION:
Write a program which prints all the permutations of a string in alphabetical order.
We consider that digits < upper case letters < lower case letters.
The sorting should be performed in ascending order.
INPUT SAMPLE:
Your program should accept a file as its first argument. The file contains input strings, one per line.
=end
myfile = ARGV[0]
if File.exists?(myfile)
File.open(myfile).each_line do |line|
line.strip!
next if line.empty?
puts line.split('').permutation.to_a.collect{|e| e.join('')}.sort.join(',')
end
end
| 22.923077 | 103 | 0.741611 |
18caaaeb697f0d41619dba253385e0520d8507ed | 3,495 | class User < ApplicationRecord
has_many :microposts, dependent: :destroy
has_many :active_relationships, class_name: "Relationship",
foreign_key: "follower_id",
dependent: :destroy
has_many :passive_relationships, class_name: "Relationship",
foreign_key: "followed_id",
dependent: :destroy
has_many :following, through: :active_relationships, source: :followed
has_many :followers, through: :passive_relationships, source: :follower
attr_accessor :remember_token, :activation_token, :reset_token
before_save :downcase_email
before_create :create_activation_digest
validates :name, presence: true, length: { maximum: 50 }
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-.]+\.[a-z]+\z/i
validates :email, presence: true, length: { maximum: 255 },
format: { with: VALID_EMAIL_REGEX },
uniqueness: { case_sensitive: false }
validates :password, presence: true, length: { minimum: 6 }, allow_nil: true
has_secure_password
# Returns the hash digest of the given string.
def User.digest(string)
cost = ActiveModel::SecurePassword.min_cost ? BCrypt::Engine::MIN_COST : BCrypt::Engine.cost
BCrypt::Password.create(string, cost: cost)
end
# Returns a random token.
def User.new_token
SecureRandom.urlsafe_base64
end
# Remembers a user in the database for use in persistent sessions.
def remember
self.remember_token = User.new_token
update_attribute(:remember_digest, User.digest(remember_token))
end
# Returns true if the given token matches the digest.
def authenticated?(attribute, token)
digest = send("#{attribute}_digest")
return false if digest.nil?
BCrypt::Password.new(digest).is_password?(token)
end
# Forgets a user.
def forget
update_attribute(:remember_digest, nil)
end
# Activates an account
def activate
update_attribute(:activated, true)
update_attribute(:activated_at, Time.zone.now)
end
# Send activation email.
def send_activation_email
UserMailer.account_activation(self).deliver_now
end
# Sets the password reset attributes
def create_reset_digest
self.reset_token = User.new_token
update_attribute(:reset_digest, User.digest(reset_token))
update_attribute(:reset_sent_at, Time.zone.now)
end
# Sends password reset email.
def send_password_reset_email
UserMailer.password_reset(self).deliver_now
end
# Returns true if a password reset has expired.
def password_reset_expired?
reset_sent_at < 2.hours.ago
end
# Returns a user's status feed.
def feed
following_ids = "SELECT followed_id FROM relationships WHERE follower_id = :user_id"
Micropost.where("user_id IN (#{following_ids}) OR user_id = :user_id", user_id: id)
end
# Follows a user.
def follow(other_user)
following << other_user
end
# Unfollows a user.
def unfollow(other_user)
following.delete(other_user)
end
# Returns true if the current user is following the other user.
def following?(other_user)
following.include?(other_user)
end
private
# Converts email to all lower-case.
def downcase_email
self.email.downcase!
end
# Creates and assigns the activation token and digest.
def create_activation_digest
self.activation_token = User.new_token
self.activation_digest = User.digest(activation_token)
end
end
| 30.657895 | 96 | 0.704149 |
1db2d2ccf7bb84de37a398677d83676bb326200f | 3,064 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/cloud/monitoring/v3/uptime_check_service"
class ::Google::Cloud::Monitoring::V3::UptimeCheckService::ClientPathsTest < Minitest::Test
def test_folder_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Monitoring::V3::UptimeCheckService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.folder_path folder: "value0"
assert_equal "folders/value0", path
end
end
def test_organization_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Monitoring::V3::UptimeCheckService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.organization_path organization: "value0"
assert_equal "organizations/value0", path
end
end
def test_uptime_check_config_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Monitoring::V3::UptimeCheckService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.uptime_check_config_path project: "value0", uptime_check_config: "value1"
assert_equal "projects/value0/uptimeCheckConfigs/value1", path
path = client.uptime_check_config_path organization: "value0", uptime_check_config: "value1"
assert_equal "organizations/value0/uptimeCheckConfigs/value1", path
path = client.uptime_check_config_path folder: "value0", uptime_check_config: "value1"
assert_equal "folders/value0/uptimeCheckConfigs/value1", path
end
end
def test_workspace_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Monitoring::V3::UptimeCheckService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.workspace_path project: "value0"
assert_equal "projects/value0", path
path = client.workspace_path workspace: "value0"
assert_equal "workspaces/value0", path
end
end
end
| 36.915663 | 98 | 0.730091 |
e25861917bd9a3917ea53371089bfb0e30708241 | 3,438 | #--
# Copyright (c) 2007-2009, John Mettraux, [email protected]
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Made in Japan.
#++
module OpenWFE
#
# The methods of the engine for updating live expressions
# (in flight modifications of process instances)
#
module UpdateExpMethods
#
# Use only when doing "process gardening".
#
# This method updates an expression, the 'data' parameter is expected
# to be a hash. If the expression is an Environment, the variables
# will be merged with the ones found in the data param.
# If the expression is not an Environment, the data will be merged
# into the 'applied_workitem' if any.
#
# If the merge is not possible, an exception will be raised.
#
def update_expression_data (fei, data)
fexp = fetch_exp fei
original = if fexp.is_a?(Environment)
fexp.variables
else
fexp.applied_workitem.attributes
end
original.merge! data
get_expression_pool.update fexp
end
#
# A variant of update_expression() that directly replaces
# the raw representation stored within an expression
#
# Useful for modifying [not yet reached] segments of processes.
#
# If the index argument is set, only the raw child pointed by the index
# will get updated.
#
def update_expression_tree (fei, representation, index=nil)
fexp = fetch_exp(fei)
#raise "cannot update already applied expression" \
# unless fexp.is_a?(RawExpression)
if index
#
# update just one child
#
fexp.raw_representation = fexp.raw_representation.dup
fexp.raw_representation[2] = fexp.raw_representation[2].dup
# those dups are for the InMemory case ...
fexp.raw_representation[2][index] = representation
else
#
# update whole tree
#
fexp.raw_representation = representation
end
fexp.raw_rep_updated = true
get_expression_pool.update(fexp)
end
alias :update_raw_expression :update_expression_tree
#
# Replaces an expression in the pool with a newer version of it.
#
# (useful when fixing processes on the fly)
#
def update_expression (fexp)
fexp.application_context = application_context
fexp.raw_rep_updated = true
get_expression_pool.update(fexp)
end
end
end
| 28.65 | 79 | 0.696626 |
79fe7e34723a9ee0e197ee511567dc6e6742ee58 | 1,081 | class Balance < Formula
desc "Software load balancer"
homepage "https://www.inlab.net/balance/"
url "https://www.inlab.net/wp-content/uploads/2018/05/balance-3.57.tar.gz"
sha256 "b355f98932a9f4c9786cb61012e8bdf913c79044434b7d9621e2fa08370afbe1"
bottle do
cellar :any_skip_relocation
sha256 "77589c441e2c89d6fb3df19b51487fb4684327fe63c5fe768224d10f81868d3c" => :high_sierra
sha256 "02b241cd5085873f6f2e78c99c01b1be6c89a3a2ff9fc12e17600035096dc44e" => :sierra
sha256 "c6af3ec64f795a6ba24400e83b3ab3753564a57f8546f0137368860bd2605421" => :el_capitan
sha256 "07f517fc19b99e5d52f6a90576ccd718650bd6a291d7c808f0d8b8193bce7779" => :yosemite
sha256 "ee916620a28cde87c90824125bf418b61eea80bc99e3aa32936e39af8acf0432" => :mavericks
sha256 "225ecddbc89a491c8ee38988d0a18d175db79d7dec5553ff35d765d2d3ee6638" => :mountain_lion
end
def install
system "make"
bin.install "balance"
man1.install "balance.1"
end
test do
output = shell_output("#{bin}/balance 2>&1", 64)
assert_match "this is balance #{version}", output
end
end
| 38.607143 | 95 | 0.790009 |
08d3e20c61a017dfb27098f4b549fbdef9f6f6e9 | 122 | require 'test_helper'
class AgreemntTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.25 | 44 | 0.704918 |
1dbf24c1aacd0b4a1d8549912449c1b59f68ee35 | 4,113 | require 'netstring'
require 'logger'
require 'benchmark'
module JRPC
class TcpClient < BaseClient
attr_reader :namespace, :transport
attr_accessor :logger
def_delegators :@transport, :close, :closed?, :connect
MAX_LOGGED_MESSAGE_LENGTH = 255
def initialize(uri, options = {})
super
@logger = @options.delete(:logger) || Logger.new($null)
@namespace = @options.delete(:namespace).to_s
timeout = @options.fetch(:timeout, 5)
connect_timeout = @options.fetch(:connect_timeout, timeout)
read_timeout = @options.fetch(:read_timeout, timeout)
write_timeout = @options.fetch(:write_timeout, 60) # default 60
connect_retry_count = @options.fetch(:connect_retry_count, 10) # default 10
@close_after_sent = @options.fetch(:close_after_sent, false)
@transport = JRPC::Transport::SocketTcp.new server: @uri,
connect_retry_count: connect_retry_count,
connect_timeout: connect_timeout,
read_timeout: read_timeout,
write_timeout: write_timeout
connect_transport!
end
private
def connect_transport!
@transport.connect
rescue JRPC::Transport::SocketTcp::Error
raise ConnectionError, "Can't connect to #{@uri}"
end
def ensure_connected
if @transport.closed?
logger.debug { 'Connecting transport...' }
connect_transport!
logger.debug { 'Connected.' }
end
end
def send_command(request, options = {})
ensure_connected
read_timeout = options.fetch(:read_timeout)
write_timeout = options.fetch(:write_timeout)
response = nil
t = Benchmark.realtime do
logger.debug { "Request address: #{uri}" }
logger.debug { "Request message: #{Utils.truncate(request, MAX_LOGGED_MESSAGE_LENGTH)}" }
logger.debug { "Request read_timeout: #{read_timeout}" }
logger.debug { "Request write_timeout: #{write_timeout}" }
send_request(request, write_timeout)
response = receive_response(read_timeout)
end
logger.debug do
"(#{'%.2f' % (t * 1000)}ms) Response message: #{Utils.truncate(response, MAX_LOGGED_MESSAGE_LENGTH)}"
end
response
ensure
@transport.close if @close_after_sent
end
def send_notification(request, options = {})
ensure_connected
write_timeout = options.fetch(:write_timeout)
logger.debug { "Request address: #{uri}" }
logger.debug { "Request message: #{Utils.truncate(request, MAX_LOGGED_MESSAGE_LENGTH)}" }
logger.debug { "Request write_timeout: #{write_timeout}" }
send_request(request, write_timeout)
logger.debug { 'No response required' }
ensure
@transport.close if @close_after_sent
end
def create_message(method, params)
super("#{namespace}#{method}", params)
end
def send_request(request, timeout)
timeout ||= @transport.write_timeout
@transport.write Netstring.dump(request.to_s), timeout
rescue ::SocketError
raise ConnectionError, "Can't send request to #{uri}"
rescue JRPC::ConnectionClosedError
raise ConnectionError, "Connection to #{uri} was closed unexpectedly"
end
def receive_response(timeout)
timeout ||= @transport.read_timeout
length = get_msg_length(timeout)
response = @transport.read(length + 1, timeout)
raise ClientError.new('invalid response. missed comma as terminator') if response[-1] != ','
response.chomp(',')
rescue ::SocketError
raise ConnectionError, "Can't receive response from #{uri}"
rescue JRPC::ConnectionClosedError
raise ConnectionError, "Connection to #{uri} was closed unexpectedly"
end
def get_msg_length(timeout)
length = ''
while true do
character = @transport.read(1, timeout)
break if character == ':'
length += character
end
Integer(length)
end
end
end
| 34.563025 | 109 | 0.641138 |
e26e7bd35491bde226780bb7f415738f90daa051 | 2,446 | require 'fileutils'
require 'sys/filesystem'
require 'digest/sha1'
module Daisy
class Storage
def initialize(config)
config.deep_symbolize_keys!
@name = config[:name]
@mount_point = config[:mount_point]
case config[:quota][:unit].to_sym
when :bytes
@quota = config[:quota][:size].to_f
when :percentage
@quota = (size * config[:quota][:size].to_f).floor
end
end
attr_reader :name, :mount_point, :quota
def write(file)
raise FileNotFound unless File.exist?(file)
raise WriteError unless writable?(write_size: File.size(file))
out_path = path(file)
FileUtils.mkdir_p(File.dirname(out_path)) unless Dir.exist?(File.dirname(out_path))
FileUtils.mv(file, out_path)
end
def alive?
FileUtils.touch('daisy_alive_test')
FileUtils.rm('daisy_alive_test')
end
def permissions
File.stat(@mount_point)
end
def stat
File.stat(@mount_point)
end
def readable?
stat.readable?
end
def writable?(write_size: nil)
return false if out_of_quota?
return false unless stat.writable?
return true if write_size.nil?
return false if out_of_quota?(write_size: write_size)
return true
end
def executable?
stat.executable?
end
def out_of_quota?(write_size: nil)
if write_size.nil?
free <= @quota
else
(free - write_size) <= @quota
end
end
def size(suffix: false)
stat = Sys::Filesystem.stat(@mount_point)
size = (stat.blocks * stat.block_size)
if suffix
size.to_s(:human_size)
else
size
end
end
def free(suffix: false)
stat = Sys::Filesystem.stat(@mount_point)
available_size = (stat.blocks_available * stat.block_size)
if suffix
available_size.to_s(:human_size)
else
available_size
end
end
def exist?(name)
File.exist?(path(name))
end
def path(name)
File.expand_path(File.join(@mount_point, Storage.output_dir(name), File.basename(name)))
end
class << self
def output_dir(name)
filename_hash(File.basename(name)).split(//).each_slice(2).first(2).map do |char|
char.join()
end.join(File::SEPARATOR)
end
def filename_hash(name)
Digest::SHA1.hexdigest(File.basename(name))
end
end
end
end
| 22.036036 | 94 | 0.621832 |
1d9d905e94dc62299b769a34b158c4d9f4223b77 | 1,317 | ##
# NOTE: Hashie does not make it easy to write a custom KeyConversion.
# So here we are. :construction_worker: :arrow_heading_down:
##
# Reopen String to underscore the camel-cased strings. Miss you ActiveSupport.
module Underscore
def underscore
word = self.dup
word.gsub!(/::/, '/')
word.gsub!(/([A-Z]+)([A-Z][a-z])/,'\1_\2')
word.gsub!(/([a-z\d])([A-Z])/,'\1_\2')
word.tr!("-", "_")
word.downcase!
word
end
end
String.send :include, Underscore
##
# Reopen Hash to lowercase the keys.
# Hashie does not have a KeyConversion for that
module KeyConversion
def downcase_keys!
keys.each do |k|
downcase_keys_recursively!(self[k])
self[k.to_s.underscore.downcase] = delete(k)
end
self
end
# Return a new hash with all keys converted
# to downcase strings.
def downcase_keys
dup.downcase_keys!
end
protected
# Downcase all keys recursively within nested
# hashes and arrays.
def downcase_keys_recursively!(object)
if self.class === object
object.downcase_keys!
elsif ::Array === object
object.each do |i|
downcase_keys_recursively!(i)
end
object
elsif object.respond_to?(:downcase_keys!)
object.downcase_keys!
else
object
end
end
end
Hash.send :include, KeyConversion
| 22.706897 | 78 | 0.661352 |
7aa6d473f6fc908b51c1d6c1230644847d384e3e | 403 | require 'liquid'
module TackleBox
module Email
class Template
module Tags
class RandomDate < Liquid::Tag
def render(context)
today = Date.today
return today.strftime("%b #{rand(today.day - 1) + 1}")
end
end
end
end
end
end
Liquid::Template.register_tag('random_date', TackleBox::Email::Template::Tags::RandomDate)
| 18.318182 | 90 | 0.598015 |
797759ee95b3b6c79ff971032ada1cfd8c272cb4 | 892 | # encoding: utf-8
module QueryBuilder::CQL
module Contexts
# Describes the user-defined trigger for the table
#
class Trigger < Base
attribute :table, required: true
attribute :name, required: true
# Returns the full name of the type
#
# @return [String]
#
def to_s
[table.keyspace.name, name].join(".")
end
# Builds the 'CREATE TRIGGER' CQL statement for the current trigger
#
# @return [QueryBuilder::Statements::CreateTrigger]
#
def create
Statements::CreateTrigger.new(context: self)
end
# Builds the 'DROP TRIGGER' CQL statement
#
# @return [QueryBuilder::Statements::DropTrigger]
#
def drop
Statements::DropTrigger.new(context: self)
end
end # class Trigger
end # module Contexts
end # module QueryBuilder::CQL
| 20.744186 | 73 | 0.610987 |
01d7b7d2e966f88300d0f41740b64ec0b776e38c | 325 | # class ItemSerializer < ActiveModel::Serializer
# attributes :id,:title,:code,:state,:reference,:domain
#
# has_many :kits, :through => :kit_items
# has_many :items, :through => :item_components
#
# has_many :kit_items
# has_many :item_components
#
# has_many :owners
# # has_and_belongs_to_many :owners
# end
| 25 | 57 | 0.707692 |
bb9319aea8bf852c2ffd06445a15e0ecee5b3b02 | 33,552 | require "test/unit"
require "testutil"
require "util"
require "openid/consumer/idres"
require "openid/protocolerror"
require "openid/store/memory"
require "openid/store/nonce"
module OpenID
class Consumer
class IdResHandler
# Subclass of IdResHandler that doesn't do verification upon
# construction. All of the tests call this, except for the ones
# explicitly for id_res.
class IdResHandler < OpenID::Consumer::IdResHandler
def id_res
end
end
class CheckForFieldsTest < Test::Unit::TestCase
include ProtocolErrorMixin
BASE_FIELDS = ['return_to', 'assoc_handle', 'sig', 'signed']
OPENID2_FIELDS = BASE_FIELDS + ['op_endpoint']
OPENID1_FIELDS = BASE_FIELDS + ['identity']
OPENID1_SIGNED = ['return_to', 'identity']
OPENID2_SIGNED =
OPENID1_SIGNED + ['response_nonce', 'claimed_id', 'assoc_handle',
'op_endpoint']
def mkMsg(ns, fields, signed_fields)
msg = Message.new(ns)
fields.each do |field|
msg.set_arg(OPENID_NS, field, "don't care")
end
if fields.member?('signed')
msg.set_arg(OPENID_NS, 'signed', signed_fields.join(','))
end
msg
end
1.times do # so as not to bleed into the outer namespace
n = 0
[[],
['foo'],
['bar', 'baz'],
].each do |signed_fields|
test = lambda do
msg = mkMsg(OPENID2_NS, OPENID2_FIELDS, signed_fields)
idres = IdResHandler.new(msg, nil)
assert_equal(signed_fields, idres.send(:signed_list))
# Do it again to make sure logic for caching is correct
assert_equal(signed_fields, idres.send(:signed_list))
end
define_method("test_signed_list_#{n += 1}", test)
end
end
# test all missing fields for OpenID 1 and 2
1.times do
[["openid1", OPENID1_NS, OPENID1_FIELDS],
["openid1", OPENID11_NS, OPENID1_FIELDS],
["openid2", OPENID2_NS, OPENID2_FIELDS],
].each_with_index do |(ver, ns, all_fields), i|
all_fields.each do |field|
test = lambda do
fields = all_fields.dup
fields.delete(field)
msg = mkMsg(ns, fields, [])
idres = IdResHandler.new(msg, nil)
assert_protocol_error("Missing required field #{field}") {
idres.send(:check_for_fields)
}
end
define_method("test_#{i}_#{ver}_check_missing_#{field}", test)
end
end
end
# Test all missing signed for OpenID 1 and 2
1.times do
[["openid1", OPENID1_NS, OPENID1_FIELDS, OPENID1_SIGNED],
["openid1", OPENID11_NS, OPENID1_FIELDS, OPENID1_SIGNED],
["openid2", OPENID2_NS, OPENID2_FIELDS, OPENID2_SIGNED],
].each_with_index do |(ver, ns, all_fields, signed_fields), i|
signed_fields.each do |signed_field|
test = lambda do
fields = signed_fields.dup
fields.delete(signed_field)
msg = mkMsg(ns, all_fields, fields)
# Make sure the signed field is actually in the request
msg.set_arg(OPENID_NS, signed_field, "don't care")
idres = IdResHandler.new(msg, nil)
assert_protocol_error("#{signed_field.inspect} not signed") {
idres.send(:check_for_fields)
}
end
define_method("test_#{i}_#{ver}_check_missing_signed_#{signed_field}", test)
end
end
end
def test_112
args = {'openid.assoc_handle' => 'fa1f5ff0-cde4-11dc-a183-3714bfd55ca8',
'openid.claimed_id' => 'http://binkley.lan/user/test01',
'openid.identity' => 'http://test01.binkley.lan/',
'openid.mode' => 'id_res',
'openid.ns' => 'http://specs.openid.net/auth/2.0',
'openid.ns.pape' => 'http://specs.openid.net/extensions/pape/1.0',
'openid.op_endpoint' => 'http://binkley.lan/server',
'openid.pape.auth_policies' => 'none',
'openid.pape.auth_time' => '2008-01-28T20:42:36Z',
'openid.pape.nist_auth_level' => '0',
'openid.response_nonce' => '2008-01-28T21:07:04Z99Q=',
'openid.return_to' => 'http://binkley.lan:8001/process?janrain_nonce=2008-01-28T21%3A07%3A02Z0tMIKx',
'openid.sig' => 'YJlWH4U6SroB1HoPkmEKx9AyGGg=',
'openid.signed' => 'assoc_handle,identity,response_nonce,return_to,claimed_id,op_endpoint,pape.auth_time,ns.pape,pape.nist_auth_level,pape.auth_policies'
}
assert_equal(args['openid.ns'], OPENID2_NS)
incoming = Message.from_post_args(args)
assert(incoming.is_openid2)
idres = IdResHandler.new(incoming, nil)
car = idres.send(:create_check_auth_request)
expected_args = args.dup
expected_args['openid.mode'] = 'check_authentication'
expected = Message.from_post_args(expected_args)
assert(expected.is_openid2)
assert_equal(expected, car)
assert_equal(expected_args, car.to_post_args)
end
def test_no_signed_list
msg = Message.new(OPENID2_NS)
idres = IdResHandler.new(msg, nil)
assert_protocol_error("Response missing signed") {
idres.send(:signed_list)
}
end
def test_success_openid1
msg = mkMsg(OPENID1_NS, OPENID1_FIELDS, OPENID1_SIGNED)
idres = IdResHandler.new(msg, nil)
assert_nothing_raised {
idres.send(:check_for_fields)
}
end
def test_success_openid1_1
msg = mkMsg(OPENID11_NS, OPENID1_FIELDS, OPENID1_SIGNED)
idres = IdResHandler.new(msg, nil)
assert_nothing_raised {
idres.send(:check_for_fields)
}
end
end
class ReturnToArgsTest < Test::Unit::TestCase
include OpenID::ProtocolErrorMixin
def check_return_to_args(query)
idres = IdResHandler.new(Message.from_post_args(query), nil)
class << idres
def verify_return_to_base(unused)
end
end
idres.send(:verify_return_to)
end
def assert_bad_args(msg, query)
assert_protocol_error(msg) {
check_return_to_args(query)
}
end
def test_return_to_args_okay
assert_nothing_raised {
check_return_to_args({
'openid.mode' => 'id_res',
'openid.return_to' => 'http://example.com/?foo=bar',
'foo' => 'bar',
})
}
end
def test_unexpected_arg_okay
assert_bad_args("Unexpected parameter", {
'openid.mode' => 'id_res',
'openid.return_to' => 'http://example.com/',
'foo' => 'bar',
})
end
def test_return_to_mismatch
assert_bad_args('Message missing ret', {
'openid.mode' => 'id_res',
'openid.return_to' => 'http://example.com/?foo=bar',
})
assert_bad_args("Parameter 'foo' val", {
'openid.mode' => 'id_res',
'openid.return_to' => 'http://example.com/?foo=bar',
'foo' => 'foos',
})
end
end
class ReturnToVerifyTest < Test::Unit::TestCase
def test_bad_return_to
return_to = "http://some.url/path?foo=bar"
m = Message.new(OPENID1_NS)
m.set_arg(OPENID_NS, 'mode', 'cancel')
m.set_arg(BARE_NS, 'foo', 'bar')
# Scheme, authority, and path differences are checked by
# IdResHandler.verify_return_to_base. Query args checked by
# IdResHandler.verify_return_to_args.
[
# Scheme only
"https://some.url/path?foo=bar",
# Authority only
"http://some.url.invalid/path?foo=bar",
# Path only
"http://some.url/path_extra?foo=bar",
# Query args differ
"http://some.url/path?foo=bar2",
"http://some.url/path?foo2=bar",
].each do |bad|
m.set_arg(OPENID_NS, 'return_to', bad)
idres = IdResHandler.new(m, return_to)
assert_raises(ProtocolError) {
idres.send(:verify_return_to)
}
end
end
def test_good_return_to
base = 'http://example.janrain.com/path'
[ [base, {}],
[base + "?another=arg", {'another' => 'arg'}],
[base + "?another=arg#frag", {'another' => 'arg'}],
['HTTP'+base[4..-1], {}],
[base.sub('com', 'COM'), {}],
['http://example.janrain.com:80/path', {}],
['http://example.janrain.com/p%61th', {}],
['http://example.janrain.com/./path',{}],
].each do |return_to, args|
args['openid.return_to'] = return_to
msg = Message.from_post_args(args)
idres = IdResHandler.new(msg, base)
assert_nothing_raised {
idres.send(:verify_return_to)
}
end
end
end
class DummyEndpoint
attr_accessor :server_url
def initialize(server_url)
@server_url = server_url
end
end
class CheckSigTest < Test::Unit::TestCase
include ProtocolErrorMixin
include TestUtil
def setup
@assoc = GoodAssoc.new('{not_dumb}')
@store = Store::Memory.new
@server_url = 'http://server.url/'
@endpoint = DummyEndpoint.new(@server_url)
@store.store_association(@server_url, @assoc)
@message = Message.from_post_args({
'openid.mode' => 'id_res',
'openid.identity' => '=example',
'openid.sig' => GOODSIG,
'openid.assoc_handle' => @assoc.handle,
'openid.signed' => 'mode,identity,assoc_handle,signed',
'frobboz' => 'banzit',
})
end
def call_idres_method(method_name)
idres = IdResHandler.new(@message, nil, @store, @endpoint)
idres.extend(InstanceDefExtension)
yield idres
idres.send(method_name)
end
def call_check_sig(&proc)
call_idres_method(:check_signature, &proc)
end
def no_check_auth(idres)
idres.instance_def(:check_auth) { fail "Called check_auth" }
end
def test_sign_good
assert_nothing_raised {
call_check_sig(&method(:no_check_auth))
}
end
def test_bad_sig
@message.set_arg(OPENID_NS, 'sig', 'bad sig!')
assert_protocol_error('Bad signature') {
call_check_sig(&method(:no_check_auth))
}
end
def test_check_auth_ok
@message.set_arg(OPENID_NS, 'assoc_handle', 'dumb-handle')
check_auth_called = false
call_check_sig do |idres|
idres.instance_def(:check_auth) do
check_auth_called = true
end
end
assert(check_auth_called)
end
def test_check_auth_ok_no_store
@store = nil
check_auth_called = false
call_check_sig do |idres|
idres.instance_def(:check_auth) do
check_auth_called = true
end
end
assert(check_auth_called)
end
def test_expired_assoc
@assoc.expires_in = -1
@store.store_association(@server_url, @assoc)
assert_protocol_error('Association with') {
call_check_sig(&method(:no_check_auth))
}
end
def call_check_auth(&proc)
assert_log_matches("Using 'check_authentication'") {
call_idres_method(:check_auth, &proc)
}
end
def test_check_auth_create_fail
assert_protocol_error("Could not generate") {
call_check_auth do |idres|
idres.instance_def(:create_check_auth_request) do
raise Message::KeyNotFound, "Testing"
end
end
}
end
def test_check_auth_okay
OpenID.extend(OverrideMethodMixin)
me = self
send_resp = Proc.new do |req, server_url|
me.assert_equal(:req, req)
:expected_response
end
OpenID.with_method_overridden(:make_kv_post, send_resp) do
call_check_auth do |idres|
idres.instance_def(:create_check_auth_request) {
:req
}
idres.instance_def(:process_check_auth_response) do |resp|
me.assert_equal(:expected_response, resp)
end
end
end
end
def test_check_auth_process_fail
OpenID.extend(OverrideMethodMixin)
me = self
send_resp = Proc.new do |req, server_url|
me.assert_equal(:req, req)
:expected_response
end
OpenID.with_method_overridden(:make_kv_post, send_resp) do
assert_protocol_error("Testing") do
call_check_auth do |idres|
idres.instance_def(:create_check_auth_request) { :req }
idres.instance_def(:process_check_auth_response) do |resp|
me.assert_equal(:expected_response, resp)
raise ProtocolError, "Testing"
end
end
end
end
end
1.times do
# Fields from the signed list
['mode', 'identity', 'assoc_handle'
].each do |field|
test = lambda do
@message.del_arg(OPENID_NS, field)
assert_raises(Message::KeyNotFound) {
call_idres_method(:create_check_auth_request) {}
}
end
define_method("test_create_check_auth_missing_#{field}", test)
end
end
def test_create_check_auth_request_success
ca_msg = call_idres_method(:create_check_auth_request) {}
expected = @message.copy
expected.set_arg(OPENID_NS, 'mode', 'check_authentication')
assert_equal(expected, ca_msg)
end
end
class CheckAuthResponseTest < Test::Unit::TestCase
include TestUtil
include ProtocolErrorMixin
def setup
@message = Message.from_openid_args({
'is_valid' => 'true',
})
@assoc = GoodAssoc.new
@store = Store::Memory.new
@server_url = 'http://invalid/'
@endpoint = DummyEndpoint.new(@server_url)
@idres = IdResHandler.new(nil, nil, @store, @endpoint)
end
def call_process
@idres.send(:process_check_auth_response, @message)
end
def test_valid
assert_log_matches() { call_process }
end
def test_invalid
['false', 'monkeys'].each do
@message.set_arg(OPENID_NS, 'is_valid', 'false')
assert_protocol_error("Server #{@server_url} responds") {
assert_log_matches() { call_process }
}
end
end
def test_valid_invalidate
@message.set_arg(OPENID_NS, 'invalidate_handle', 'cheese')
assert_log_matches("Received 'invalidate_handle'") { call_process }
end
def test_invalid_invalidate
@message.set_arg(OPENID_NS, 'invalidate_handle', 'cheese')
['false', 'monkeys'].each do
@message.set_arg(OPENID_NS, 'is_valid', 'false')
assert_protocol_error("Server #{@server_url} responds") {
assert_log_matches("Received 'invalidate_handle'") {
call_process
}
}
end
end
def test_invalidate_no_store
@idres.instance_variable_set(:@store, nil)
@message.set_arg(OPENID_NS, 'invalidate_handle', 'cheese')
assert_log_matches("Received 'invalidate_handle'",
'Unexpectedly got "invalidate_handle"') {
call_process
}
end
end
class NonceTest < Test::Unit::TestCase
include TestUtil
include ProtocolErrorMixin
def setup
@store = Object.new
class << @store
attr_accessor :nonces, :succeed
def use_nonce(server_url, time, extra)
@nonces << [server_url, time, extra]
@succeed
end
end
@store.nonces = []
@nonce = Nonce.mk_nonce
end
def call_check_nonce(post_args, succeed=false)
response = Message.from_post_args(post_args)
if [email protected]?
@store.succeed = succeed
end
idres = IdResHandler.new(response, nil, @store, nil)
idres.send(:check_nonce)
end
def test_openid1_success
[{},
{'openid.ns' => OPENID1_NS},
{'openid.ns' => OPENID11_NS}
].each do |args|
assert_nothing_raised {
call_check_nonce({'rp_nonce' => @nonce}.merge(args), true)
}
end
end
def test_openid1_missing
[{},
{'openid.ns' => OPENID1_NS},
{'openid.ns' => OPENID11_NS}
].each do |args|
assert_protocol_error('Nonce missing') { call_check_nonce(args) }
end
end
def test_openid2_ignore_rp_nonce
assert_protocol_error('Nonce missing') {
call_check_nonce({'rp_nonce' => @nonce,
'openid.ns' => OPENID2_NS})
}
end
def test_openid2_success
assert_nothing_raised {
call_check_nonce({'openid.response_nonce' => @nonce,
'openid.ns' => OPENID2_NS}, true)
}
end
def test_openid1_ignore_response_nonce
[{},
{'openid.ns' => OPENID1_NS},
{'openid.ns' => OPENID11_NS}
].each do |args|
assert_protocol_error('Nonce missing') {
call_check_nonce({'openid.response_nonce' => @nonce}.merge(args))
}
end
end
def test_no_store
@store = nil
assert_nothing_raised {
call_check_nonce({'rp_nonce' => @nonce})
}
end
def test_already_used
assert_protocol_error('Nonce already used') {
call_check_nonce({'rp_nonce' => @nonce}, false)
}
end
def test_malformed_nonce
assert_protocol_error('Malformed nonce') {
call_check_nonce({'rp_nonce' => 'whee!'})
}
end
end
class DiscoveryVerificationTest < Test::Unit::TestCase
include ProtocolErrorMixin
include TestUtil
def setup
@endpoint = OpenIDServiceEndpoint.new
end
def call_verify(msg_args)
call_verify_modify(msg_args){}
end
def call_verify_modify(msg_args)
msg = Message.from_openid_args(msg_args)
idres = IdResHandler.new(msg, nil, nil, @endpoint)
idres.extend(InstanceDefExtension)
yield idres
idres.send(:verify_discovery_results)
idres.instance_variable_get(:@endpoint)
end
def assert_verify_protocol_error(error_prefix, openid_args)
assert_protocol_error(error_prefix) {call_verify(openid_args)}
end
def test_openid1_no_local_id
@endpoint.claimed_id = 'http://invalid/'
assert_verify_protocol_error("Missing required field: "\
"<#{OPENID1_NS}>identity", {})
end
def test_openid1_no_endpoint
@endpoint = nil
assert_raises(ProtocolError) {
call_verify({'identity' => 'snakes on a plane'})
}
end
def test_openid1_fallback_1_0
[OPENID1_NS, OPENID11_NS].each do |openid1_ns|
claimed_id = 'http://claimed.id/'
@endpoint = nil
resp_mesg = Message.from_openid_args({
'ns' => openid1_ns,
'identity' => claimed_id,
})
# Pass the OpenID 1 claimed_id this way since we're
# passing None for the endpoint.
resp_mesg.set_arg(BARE_NS, 'openid1_claimed_id', claimed_id)
# We expect the OpenID 1 discovery verification to try
# matching the discovered endpoint against the 1.1 type
# and fall back to 1.0.
expected_endpoint = OpenIDServiceEndpoint.new
expected_endpoint.type_uris = [OPENID_1_0_TYPE]
expected_endpoint.local_id = nil
expected_endpoint.claimed_id = claimed_id
hacked_discover = Proc.new {
|_claimed_id| ['unused', [expected_endpoint]]
}
idres = IdResHandler.new(resp_mesg, nil, nil, @endpoint)
assert_log_matches('Performing discovery') {
OpenID.with_method_overridden(:discover, hacked_discover) {
idres.send(:verify_discovery_results)
}
}
actual_endpoint = idres.instance_variable_get(:@endpoint)
assert_equal(actual_endpoint, expected_endpoint)
end
end
def test_openid2_no_op_endpoint
assert_protocol_error("Missing required field: "\
"<#{OPENID2_NS}>op_endpoint") {
call_verify({'ns'=>OPENID2_NS})
}
end
def test_openid2_local_id_no_claimed
assert_verify_protocol_error('openid.identity is present without',
{'ns' => OPENID2_NS,
'op_endpoint' => 'Phone Home',
'identity' => 'Jorge Lius Borges'})
end
def test_openid2_no_local_id_claimed
assert_log_matches() {
assert_protocol_error('openid.claimed_id is present without') {
call_verify({'ns' => OPENID2_NS,
'op_endpoint' => 'Phone Home',
'claimed_id' => 'Manuel Noriega'})
}
}
end
def test_openid2_no_identifiers
op_endpoint = 'Phone Home'
result_endpoint = assert_log_matches() {
call_verify({'ns' => OPENID2_NS,
'op_endpoint' => op_endpoint})
}
assert(result_endpoint.is_op_identifier)
assert_equal(op_endpoint, result_endpoint.server_url)
assert(result_endpoint.claimed_id.nil?)
end
def test_openid2_no_endpoint_does_disco
endpoint = OpenIDServiceEndpoint.new
endpoint.claimed_id = 'monkeysoft'
@endpoint = nil
result = assert_log_matches('No pre-discovered') {
call_verify_modify({'ns' => OPENID2_NS,
'identity' => 'sour grapes',
'claimed_id' => 'monkeysoft',
'op_endpoint' => 'Phone Home'}) do |idres|
idres.instance_def(:discover_and_verify) do |claimed_id, endpoints|
@endpoint = endpoint
end
end
}
assert_equal(endpoint, result)
end
def test_openid2_mismatched_does_disco
@endpoint.claimed_id = 'nothing special, but different'
@endpoint.local_id = 'green cheese'
endpoint = OpenIDServiceEndpoint.new
endpoint.claimed_id = 'monkeysoft'
result = assert_log_matches('Error attempting to use stored',
'Attempting discovery') {
call_verify_modify({'ns' => OPENID2_NS,
'identity' => 'sour grapes',
'claimed_id' => 'monkeysoft',
'op_endpoint' => 'Green Cheese'}) do |idres|
idres.instance_def(:discover_and_verify) do |claimed_id, endpoints|
@endpoint = endpoint
end
end
}
assert(endpoint.equal?(result))
end
def test_verify_discovery_single_claimed_id_mismatch
idres = IdResHandler.new(nil, nil)
@endpoint.local_id = 'my identity'
@endpoint.claimed_id = 'http://i-am-sam/'
@endpoint.server_url = 'Phone Home'
@endpoint.type_uris = [OPENID_2_0_TYPE]
to_match = @endpoint.dup
to_match.claimed_id = 'http://something.else/'
e = assert_raises(ProtocolError) {
idres.send(:verify_discovery_single, @endpoint, to_match)
}
assert(e.to_s =~ /different subjects/)
end
def test_openid1_1_verify_discovery_single_no_server_url
idres = IdResHandler.new(nil, nil)
@endpoint.local_id = 'my identity'
@endpoint.claimed_id = 'http://i-am-sam/'
@endpoint.server_url = 'Phone Home'
@endpoint.type_uris = [OPENID_1_1_TYPE]
to_match = @endpoint.dup
to_match.claimed_id = 'http://i-am-sam/'
to_match.type_uris = [OPENID_1_1_TYPE]
to_match.server_url = nil
idres.send(:verify_discovery_single, @endpoint, to_match)
end
def test_openid2_use_pre_discovered
@endpoint.local_id = 'my identity'
@endpoint.claimed_id = 'http://i-am-sam/'
@endpoint.server_url = 'Phone Home'
@endpoint.type_uris = [OPENID_2_0_TYPE]
result = assert_log_matches() {
call_verify({'ns' => OPENID2_NS,
'identity' => @endpoint.local_id,
'claimed_id' => @endpoint.claimed_id,
'op_endpoint' => @endpoint.server_url
})
}
assert(result.equal?(@endpoint))
end
def test_openid2_use_pre_discovered_wrong_type
text = "verify failed"
me = self
@endpoint.local_id = 'my identity'
@endpoint.claimed_id = 'i am sam'
@endpoint.server_url = 'Phone Home'
@endpoint.type_uris = [OPENID_1_1_TYPE]
endpoint = @endpoint
msg = Message.from_openid_args({'ns' => OPENID2_NS,
'identity' => @endpoint.local_id,
'claimed_id' =>
@endpoint.claimed_id,
'op_endpoint' =>
@endpoint.server_url})
idres = IdResHandler.new(msg, nil, nil, @endpoint)
idres.extend(InstanceDefExtension)
idres.instance_def(:discover_and_verify) { |claimed_id, to_match|
me.assert_equal(endpoint.claimed_id, to_match[0].claimed_id)
me.assert_equal(claimed_id, endpoint.claimed_id)
raise ProtocolError, text
}
assert_log_matches('Error attempting to use stored',
'Attempting discovery') {
assert_protocol_error(text) {
idres.send(:verify_discovery_results)
}
}
end
def test_openid1_use_pre_discovered
@endpoint.local_id = 'my identity'
@endpoint.claimed_id = 'http://i-am-sam/'
@endpoint.server_url = 'Phone Home'
@endpoint.type_uris = [OPENID_1_1_TYPE]
result = assert_log_matches() {
call_verify({'ns' => OPENID1_NS,
'identity' => @endpoint.local_id})
}
assert(result.equal?(@endpoint))
end
def test_openid1_use_pre_discovered_wrong_type
verified_error = Class.new(Exception)
@endpoint.local_id = 'my identity'
@endpoint.claimed_id = 'i am sam'
@endpoint.server_url = 'Phone Home'
@endpoint.type_uris = [OPENID_2_0_TYPE]
assert_log_matches('Error attempting to use stored',
'Attempting discovery') {
assert_raises(verified_error) {
call_verify_modify({'ns' => OPENID1_NS,
'identity' => @endpoint.local_id}) { |idres|
idres.instance_def(:discover_and_verify) do |claimed_id, endpoints|
raise verified_error
end
}
}
}
end
def test_openid2_fragment
claimed_id = "http://unittest.invalid/"
claimed_id_frag = claimed_id + "#fragment"
@endpoint.local_id = 'my identity'
@endpoint.claimed_id = claimed_id
@endpoint.server_url = 'Phone Home'
@endpoint.type_uris = [OPENID_2_0_TYPE]
result = assert_log_matches() {
call_verify({'ns' => OPENID2_NS,
'identity' => @endpoint.local_id,
'claimed_id' => claimed_id_frag,
'op_endpoint' => @endpoint.server_url})
}
[:local_id, :server_url, :type_uris].each do |sym|
assert_equal(@endpoint.send(sym), result.send(sym))
end
assert_equal(claimed_id_frag, result.claimed_id)
end
def test_endpoint_without_local_id
# An endpoint like this with no local_id is generated as a result of
# e.g. Yadis discovery with no LocalID tag.
@endpoint.server_url = "http://localhost:8000/openidserver"
@endpoint.claimed_id = "http://localhost:8000/id/id-jo"
to_match = OpenIDServiceEndpoint.new
to_match.server_url = "http://localhost:8000/openidserver"
to_match.claimed_id = "http://localhost:8000/id/id-jo"
to_match.local_id = "http://localhost:8000/id/id-jo"
idres = IdResHandler.new(nil, nil)
assert_log_matches() {
idres.send(:verify_discovery_single, @endpoint, to_match)
}
end
end
class IdResTopLevelTest < Test::Unit::TestCase
def test_id_res
endpoint = OpenIDServiceEndpoint.new
endpoint.server_url = 'http://invalid/server'
endpoint.claimed_id = 'http://my.url/'
endpoint.local_id = 'http://invalid/username'
endpoint.type_uris = [OPENID_2_0_TYPE]
assoc = GoodAssoc.new
store = Store::Memory.new
store.store_association(endpoint.server_url, assoc)
signed_fields =
[
'response_nonce',
'op_endpoint',
'assoc_handle',
'identity',
'claimed_id',
'ns',
'return_to',
]
return_to = 'http://return.to/'
args = {
'ns' => OPENID2_NS,
'return_to' => return_to,
'claimed_id' => endpoint.claimed_id,
'identity' => endpoint.local_id,
'assoc_handle' => assoc.handle,
'op_endpoint' => endpoint.server_url,
'response_nonce' => Nonce.mk_nonce,
'signed' => signed_fields.join(','),
'sig' => GOODSIG,
}
msg = Message.from_openid_args(args)
idres = OpenID::Consumer::IdResHandler.new(msg, return_to,
store, endpoint)
assert_equal(idres.signed_fields,
signed_fields.map {|f|'openid.' + f})
end
end
class DiscoverAndVerifyTest < Test::Unit::TestCase
include ProtocolErrorMixin
include TestUtil
def test_no_services
me = self
disco = Proc.new do |e|
me.assert_equal(e, :sentinel)
[:undefined, []]
end
endpoint = OpenIDServiceEndpoint.new
endpoint.claimed_id = :sentinel
idres = IdResHandler.new(nil, nil)
assert_log_matches('Performing discovery on') do
assert_protocol_error('No OpenID information found') do
OpenID.with_method_overridden(:discover, disco) do
idres.send(:discover_and_verify, :sentinel, [endpoint])
end
end
end
end
end
class VerifyDiscoveredServicesTest < Test::Unit::TestCase
include ProtocolErrorMixin
include TestUtil
def test_no_services
endpoint = OpenIDServiceEndpoint.new
endpoint.claimed_id = :sentinel
idres = IdResHandler.new(nil, nil)
assert_log_matches('Discovery verification failure') do
assert_protocol_error('No matching endpoint') do
idres.send(:verify_discovered_services,
'http://bogus.id/', [], [endpoint])
end
end
end
end
end
end
end
| 34.804979 | 171 | 0.548343 |
793a6c3626a0efc33cbb2465e4037dff782e4f46 | 1,593 | Pod::Spec.new do |s|
s.name = "PSPDFKit"
s.version = "3.5.2"
s.homepage = "http://pspdfkit.com"
s.license = { :type => 'Commercial', :file => 'PSPDFKit.embeddedframework/PSPDFKit.framework/Resources/LICENSE' }
s.author = { "PSPDFKit GmbH" => "[email protected]" }
s.summary = "The leading framework for displaying and annotating PDFs in your iOS apps."
s.screenshots = "http://pspdfkit.com/images/frontpage/heroshot_pspdfkit_ipadmini.jpg", "http://pspdfkit.com/images/frontpage/heroshot_pspdfkit_iphone5.png"
s.platform = :ios, '6.0'
s.source = { :http => "https://customers.pspdfkit.com/download/3.5.2.zip" }
s.preserve_paths = 'PSPDFKit.embeddedframework/PSPDFKit.framework'
s.public_header_files = 'PSPDFKit.embeddedframework/PSPDFKit.framework/Versions/A/Headers/*.h'
s.resource = 'PSPDFKit.embeddedframework/PSPDFKit.framework/Versions/A/Resources/PSPDFKit.bundle'
s.vendored_frameworks = 'PSPDFKit.embeddedframework/PSPDFKit.framework'
s.library = 'z', 'sqlite3', 'xml2'
s.xcconfig = { 'FRAMEWORK_SEARCH_PATHS' => '"$(PODS_ROOT)/PSPDFKit/**"',
'HEADER_SEARCH_PATHS' => '$(SDKROOT)/usr/include/libxml2' }
s.frameworks = 'QuartzCore', 'CoreText', 'CoreMedia', 'MediaPlayer', 'AVFoundation', 'ImageIO', 'MessageUI',
'CoreGraphics', 'Foundation', 'CFNetwork', 'MobileCoreServices', 'SystemConfiguration',
'AssetsLibrary', 'Security', 'UIKit', 'AudioToolbox', 'QuickLook', 'CoreTelephony'
s.requires_arc = true
end
| 59 | 158 | 0.666667 |
61ec8a39cb7d10b2e0a110f07fcf481a82c9f4fc | 1,662 | class Provider < ApplicationRecord
include NewWithTypeStiMixin
include AuthenticationMixin
include ReportableMixin
include AsyncDeleteMixin
include EmsRefresh::Manager
include TenancyMixin
belongs_to :tenant
belongs_to :zone
has_many :managers, :class_name => "ExtManagementSystem"
has_many :endpoints, :through => :managers
delegate :verify_ssl,
:verify_ssl=,
:verify_ssl?,
:to => :default_endpoint
virtual_column :verify_ssl, :type => :integer
def self.leaf_subclasses
descendants.select { |d| d.subclasses.empty? }
end
def self.supported_subclasses
subclasses.flat_map do |s|
s.subclasses.empty? ? s : s.supported_subclasses
end
end
def self.short_token
parent.name.demodulize
end
def image_name
self.class.short_token.underscore
end
def default_endpoint
default = endpoints.detect { |e| e.role == "default" }
default || endpoints.build(:role => "default")
end
def with_provider_connection(options = {})
raise _("no block given") unless block_given?
_log.info("Connecting through #{self.class.name}: [#{name}]")
yield connect(options)
end
def my_zone
zone.try(:name).presence || MiqServer.my_zone
end
alias_method :zone_name, :my_zone
def refresh_ems
if missing_credentials?
raise _("no %{table} credentials defined") % {:table => ui_lookup(:table => "provider")}
end
unless authentication_status_ok?
raise _("%{table} failed last authentication check") % {:table => ui_lookup(:table => "provider")}
end
managers.each { |manager| EmsRefresh.queue_refresh(manager) }
end
end
| 25.181818 | 104 | 0.694946 |
f8021993c01223450490111261ce12e9115f5588 | 1,073 | class Service::Firebase < Service
string :firebase, :secret
white_list :firebase
url 'https://www.firebase.com'
logo_url 'https://www.firebase.com/images/logo.png'
maintained_by :github => 'anantn'
supported_by :email => '[email protected]'
def receive_push
url = data['firebase'].to_s
url.gsub! /\s/, ''
if url.empty?
raise_config_error 'Invalid URL.'
end
if url !~ /^https\:\/\//
raise_config_error 'Invalid URL (did you include the https prefix?)'
end
if url !~ /^.*\.json$/
url = url + '.json'
end
secret = data['secret'].to_s
if secret.length > 0
url = url + '?auth=' + secret
end
payload['commits'].each do |commit|
http_post url, JSON.generate(commit)
end
rescue Addressable::URI::InvalidURIError, Errno::EHOSTUNREACH
raise_missing_error $!.to_s
rescue SocketError
if $!.to_s =~ /getaddrinfo:/
raise_missing_error 'Invalid host name.'
else
raise
end
rescue EOFError
raise_config_error 'Invalid server response.'
end
end
| 23.326087 | 74 | 0.640261 |
62f197948a17ac3aac7c609817cc9b4f16c1ac22 | 124 | module Startbootstrap
module Freelancer
module Rails
class Engine < ::Rails::Engine
end
end
end
end
| 13.777778 | 36 | 0.653226 |
acdc27926f3e88440747350922b6d11a12da96d4 | 1,449 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20111111143014) do
create_table "rails_admin_histories", :force => true do |t|
t.text "message"
t.string "username"
t.integer "item"
t.string "table"
t.integer "month", :limit => 2
t.integer "year", :limit => 5
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "rails_admin_histories", ["item", "table", "month", "year"], :name => "index_rails_admin_histories"
create_table "searches", :force => true do |t|
t.string "string"
t.integer "count", :default => 0
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "searches", ["string"], :name => "index_searches_on_string"
end
| 38.131579 | 111 | 0.710145 |
ac5c6b6f48108fd7df7e332051932b5413e0e2cc | 231 | # frozen_string_literal: true
class ChangeYearToBeIntegerInCarModels < ActiveRecord::Migration[6.0]
def up
change_column :car_models, :year, :integer
end
def down
change_column :car_models, :year, :string
end
end
| 19.25 | 69 | 0.748918 |
ffb33892de0138f034680a6f7dd78ecbc2372d81 | 2,360 | # Helper class for arguments validation
module SequelDM::ArgsValidator
class << self
# Checks that specifid +obj+ is a symbol
# @param obj some object
# @param obj_name object's name, used to clarify error causer in exception
def is_symbol!(obj, obj_name)
unless obj.is_a?(Symbol)
raise ArgumentError, "#{obj_name} should be a Symbol"
end
end
# Checks that specifid +obj+ is an Array
# @param obj some object
# @param obj_name object's name, used to clarify error causer in exception
def is_array!(obj, obj_name)
unless obj.is_a?(Array)
raise ArgumentError, "#{obj_name} should be an Array"
end
end
# Checks that specifid +obj+ is a Hash
# @param obj some object
# @param obj_name object's name, used to clarify error causer in exception
def is_hash!(obj, obj_name)
unless obj.is_a?(Hash)
raise ArgumentError, "#{obj_name} should be a Hash"
end
end
# Checks that specifid +obj+ is a Class
# @param obj some object
# @param obj_name object's name, used to clarify error causer in exception
def is_class!(obj, obj_name)
unless obj.is_a?(Class)
raise ArgumentError, "#{obj_name} should be a Class"
end
end
# Checks that specifid +obj+ is a Proc
# @param obj some object
# @param obj_name object's name, used to clarify error causer in exception
def is_proc!(obj, obj_name)
unless obj.is_a?(Proc)
raise ArgumentError, "#{obj_name} should be a Proc"
end
end
# Checks that specifid +obj+ is a symbol or Class
# @param obj some object
# @param obj_name object's name, used to clarify error causer in exception
def is_symbol_or_class!(obj, obj_name)
if !obj.is_a?(Symbol) && !obj.is_a?(Class)
raise ArgumentError, "#{obj_name} should be a Symbol or Class"
end
end
# Checks that specifid +hash+ has a specified +key+
# @param hash some hash
# @param key hash's key
def has_key!(hash, key)
unless hash.has_key?(key)
raise ArgumentError, "#{hash} should has #{key} key"
end
end
# Checks that specified +block+ is given
# @param block some block
def block_given!(block)
unless block
raise ArgumentError, "Block should be given"
end
end
end
end
| 30.649351 | 78 | 0.649576 |
4a88bd273cbb11287cc52935db02fc98a79d2822 | 1,806 | test_name "Validate Sign Cert" do
need_to_run = false
hosts.each do |host|
need_to_run ||= !host['use_existing_container']
end
skip_test 'No new hosts to create, skipping' unless need_to_run
skip_test 'not testing with puppetserver' unless @options['is_puppetserver']
hostname = on(master, 'facter hostname').stdout.strip
fqdn = on(master, 'facter fqdn').stdout.strip
puppet_version = on(master, puppet("--version")).stdout.chomp
if master.use_service_scripts?
step "Ensure puppet is stopped"
# Passenger, in particular, must be shutdown for the cert setup steps to work,
# but any running puppet master will interfere with webrick starting up and
# potentially ignore the puppet.conf changes.
on(master, puppet('resource', 'service', master['puppetservice'], "ensure=stopped"))
end
step "Clear SSL on all hosts"
hosts.each do |host|
ssldir = on(host, puppet('agent --configprint ssldir')).stdout.chomp
# preserve permissions for master's ssldir so puppetserver can read it
on(host, "rm -rf '#{ssldir}/'*")
end
step "Set 'server' setting"
hosts.each do |host|
on(host, puppet("config set server #{master.hostname} --section main"))
end
step "Start puppetserver" do
master_opts = {
main: {
dns_alt_names: "puppet,#{hostname},#{fqdn}",
server: fqdn,
autosign: true
},
}
# In Puppet 6, we want to be using an intermediate CA
unless version_is_less(puppet_version, "5.99")
on master, 'puppetserver ca setup' unless master['use_existing_container']
end
with_puppet_running_on(master, master_opts) do
step "Agents: Run agent --test with autosigning enabled to get cert"
on agents, puppet("agent --test"), :acceptable_exit_codes => [0,2]
end
end
end
| 35.411765 | 88 | 0.689922 |
ac3a2c4d803056e5e1b98b60ee3d1d6c28d0797b | 2,971 | class Vte3 < Formula
desc "Terminal emulator widget used by GNOME terminal"
homepage "https://developer.gnome.org/vte/"
url "https://download.gnome.org/sources/vte/0.48/vte-0.48.1.tar.xz"
sha256 "d3b7d6a13e6b850f3ea1c35af2746ef78039493ed4bb87cb7a36f29b260861f6"
bottle do
sha256 "7021496e3792f5c6b6a5e3c65dd4c1401fb01d575e4e4c38ed3ce6d0de05c049" => :sierra
sha256 "834190450a00b3a8fc72d1c6303d374848d42fdccd3d0790c1b81aec2cb09c90" => :el_capitan
sha256 "0561e660b4eca8f2a4ac32126eaaf3669b43bcf75cc9981224899df71621d93b" => :yosemite
end
depends_on "pkg-config" => :build
depends_on "intltool" => :build
depends_on "gettext"
depends_on "gtk+3"
depends_on "gnutls"
depends_on "vala"
depends_on "gobject-introspection"
depends_on "pcre2"
def install
args = [
"--disable-dependency-tracking",
"--prefix=#{prefix}",
"--disable-Bsymbolic",
"--enable-introspection=yes",
"--enable-gnome-pty-helper",
]
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <vte/vte.h>
int main(int argc, char *argv[]) {
guint v = vte_get_major_version();
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
gnutls = Formula["gnutls"]
gtkx3 = Formula["gtk+3"]
libepoxy = Formula["libepoxy"]
libpng = Formula["libpng"]
libtasn1 = Formula["libtasn1"]
nettle = Formula["nettle"]
pango = Formula["pango"]
pixman = Formula["pixman"]
flags = %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/gio-unix-2.0/
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{gnutls.opt_include}
-I#{gtkx3.opt_include}/gtk-3.0
-I#{include}/vte-2.91
-I#{libepoxy.opt_include}
-I#{libpng.opt_include}/libpng16
-I#{libtasn1.opt_include}
-I#{nettle.opt_include}
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{gnutls.opt_lib}
-L#{gtkx3.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-latk-1.0
-lcairo
-lcairo-gobject
-lgdk-3
-lgdk_pixbuf-2.0
-lgio-2.0
-lglib-2.0
-lgnutls
-lgobject-2.0
-lgtk-3
-lintl
-lpango-1.0
-lpangocairo-1.0
-lvte-2.91
-lz
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
| 27.509259 | 92 | 0.623023 |
bb6cbe3540ae6e4a3765b2eec65dd3a506afe597 | 297 | module FormtasticBootstrap
module Inputs
class RangeInput < Formtastic::Inputs::RangeInput
include Base
include Base::Stringish
def to_html
generic_input_wrapping do
builder.range_field(method, input_html_options)
end
end
end
end
end | 19.8 | 57 | 0.673401 |
7945070cd4a7b6ebc77fc3a4418c5098bde174ba | 1,695 | require "test_helper"
class KenkenizerTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::Kenkenizer::VERSION
end
end
class KenkenTest < Minitest::Test
def test_pow
ken = Kenken.new("ken")
assert_equal (ken ^ 2), "kenken"
end
def test_pow_when_str_is_hayapi
ken = Kenken.new("hayapi")
assert_equal (ken ^ 2), "kenken"
end
def test_to_s
ken = Kenken.new("ken")
assert_equal ken.to_s, "ken"
end
def test_to_s_when_str_is_hayapi
ken = Kenken.new("hayapi")
assert_equal ken.to_s, "hayapi"
end
def test_subtract
kenken = Kenken.new("ken")
assert_equal (kenken - "e"), "knkn"
end
def test_subtract_when_str_is_hayapi
kenken = Kenken.new("hayapi")
assert_equal (kenken - "e"), "knkn"
end
end
class StringTest < Minitest::Test
def test_string_pow
ken = String.new("ken")
assert_equal (ken ^ 2), "kenken"
end
def test_string_pow_when_str_is_hayapi
ken = String.new("hayapi")
assert_equal (ken ^ 2), "kenken"
end
def test_string_literal_pow
ken = "ken"
assert_equal (ken ^ 2), "kenken"
end
def test_string_literal_pow_when_str_is_hayapi
ken = "hayapi"
assert_equal (ken ^ 2), "kenken"
end
def test_string_subtact
kenken = String.new("ken")
assert_equal (kenken - "e"), "knkn"
end
def test_string_subtact_when_str_is_hayapi
kenken = String.new("hayapi")
assert_equal (kenken - "e"), "knkn"
end
def test_string_literal_subtact
kenken = "ken"
assert_equal (kenken - "e"), "knkn"
end
def test_string_literal_subtact_when_str_is_hayapi
kenken = "hayapi"
assert_equal (kenken - "e"), "knkn"
end
end
| 20.670732 | 52 | 0.680236 |
d580e5f49a860262d45217aa483a6837a0f6df76 | 1,343 | # -*- encoding: utf-8 -*-
# stub: angularjs-rails 1.3.15 ruby lib
Gem::Specification.new do |s|
s.name = "angularjs-rails"
s.version = "1.3.15"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Hirav Gandhi"]
s.date = "2015-03-26"
s.description = "Injects Angular.js into your asset pipeline as well as other Angular modules."
s.email = "[email protected]"
s.homepage = "https://github.com/hiravgandhi/angularjs-rails/"
s.licenses = ["MIT"]
s.rubygems_version = "2.4.5"
s.summary = "Angular.js on Rails"
s.installed_by_version = "2.4.5" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake>, [">= 0"])
s.add_development_dependency(%q<versionomy>, [">= 0"])
s.add_development_dependency(%q<nokogiri>, [">= 0"])
else
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<versionomy>, [">= 0"])
s.add_dependency(%q<nokogiri>, [">= 0"])
end
else
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<versionomy>, [">= 0"])
s.add_dependency(%q<nokogiri>, [">= 0"])
end
end
| 34.435897 | 105 | 0.647803 |
e2ddf1bf80b641ff64370b5df1b08934370249a6 | 216 | class Class
def descendants
descendants = []
ObjectSpace.each_object(singleton_class) do |k|
next if k.singleton_class?
descendants.unshift k unless k == self
end
descendants
end
end
| 18 | 51 | 0.680556 |
5df1cd29e35d997ed7e8824fc52acb05904aa7c1 | 5,265 | require 'test_helper'
class ExternalAuthorsControllerTest < ActionController::TestCase
# Test create POST /:locale/users/:user_id/external_authors
# def test_should_create_external_author
# user = create_user
# @request.session[:user] = user
# assert_difference('ExternalAuthor.count') do
# post :create, :user_id => user.login,
# :external_author => { :email => '[email protected]' }
# end
# assert_redirected_to user_external_authors_path(user)
# end
#
# # Test destroy DELETE /:locale/users/:user_id/external_authors/:id
# def test_should_destroy_external_author
# user = create_user
# external_author = create_external_author(:user => user)
# user = User.find(user.id)
# @request.session[:user] = user
# assert_difference('ExternalAuthor.count', -1) do
# delete :destroy,
# :user_id => user.login,
# :id => ExternalAuthor.find(external_author.id)
# end
#
# assert_redirected_to user_external_authors_path(user)
# end
# Test edit GET /:locale/users/:user_id/external_authors/:id/edit (named path: edit_user_external_author)
def test_should_get_edit
user = create_user
external_author = create_external_author(:user => user)
@request.session[:user] = user
get :edit, :user_id => user.login,
:id => external_author.id
assert_response :success
end
# Test index GET /:locale/users/:user_id/external_authors (named path: user_external_authors)
def test_user_external_authors_path
user = create_user
external_author = create_external_author(:user => user)
@request.session[:user] = user
get :index, :user_id => user.login
assert_response :success
assert_not_nil assigns(:external_authors)
end
# Test update PUT /:locale/users/:user_id/external_authors/:id
# We don't allow you to change the name of the fallback external_author, so this tests a new external_author
def test_update_external_author
user = create_user
external_author = create_external_author(:user => user, :email => '[email protected]')
@request.session[:user] = user
put :update, :user_id => user.login,
:id => external_author.id,
:external_author => { :do_not_email => 'true' }
assert_redirected_to user_external_authors_path(user)
end
context "when claiming an external author" do
context "without a valid invitation" do
setup do
get :claim
end
should_set_the_flash_to /You need an invitation/
should_redirect_to("the home page") {root_path}
end
context "with a valid invitation" do
setup do
@invitation = create_invitation
@invitation.save
end
context "that has no external author" do
setup do
get :claim, :invitation_token => @invitation.token
end
should_set_the_flash_to /no stories to claim/
should_redirect_to("the signup page") {signup_path(@invitation.token)}
end
context "with an external author attached" do
setup do
@external_author = create_external_author
@test_work = create_work
@test_work.add_default_tags
creatorship = create_external_creatorship(:external_author_name => @external_author.names.first, :creation => @test_work)
@test_work.save
@invitation.external_author = @external_author
@invitation.save
get :claim, :invitation_token => @invitation.token
end
should_respond_with :success
should "display a form" do
assert_select "form", true
end
should_render_template :claim
context "and completing a claim" do
context "without being logged in" do
setup do
get :complete_claim, :locale => 'en', :invitation_token => @invitation.token
end
should_set_the_flash_to /Please log in/
should_redirect_to("the login page") {new_session_path}
end
end
end
end
end
context "completing a claim when logged in" do
setup do
@user = create_user
@request.session[:user] = @user
@external_author = create_external_author
archivist = create_user(:login => "archivist")
@test_work = create_work(:authors => [archivist.default_pseud], :chapters => [new_chapter(:authors => [archivist.default_pseud])])
@test_work.add_default_tags
creatorship = create_external_creatorship(:external_author_name => @external_author.names.first, :creation => @test_work)
@test_work.save
@invitation = create_invitation(:external_author => @external_author)
get :complete_claim, :locale => 'en', :invitation_token => @invitation.token
end
should_set_the_flash_to /have added the stories imported under/
should_redirect_to("the user's external authors page") {user_external_authors_path(@user)}
should "claim the external author for the user" do
@user.reload
@external_author.reload
assert @user.external_authors.include?(@external_author)
assert @external_author.user == @user
assert @user.works.include?(@test_work)
end
end
end
| 37.340426 | 137 | 0.670465 |
87bb124338479758e07e384b31eda63b169d9ff4 | 19,346 | require 'securerandom'
require 'mittsu/core/event_dispatcher'
module Mittsu
class BufferGeometry
include EventDispatcher
DrawCall = Struct.new(:start, :count, :index)
attr_reader :id, :name, :type, :uuid, :attributes, :draw_calls, :bounding_box, :bounding_sphere
def initialize
@id = (@@id ||= 1).tap { @@id += 1 }
@uuid = SecureRandom.uuid
@name = ''
@type = 'BufferGeometry'
@attributes = {}
@draw_calls = []
@_listeners = {}
end
def keys
@attributes.keys
end
def []=(key, value)
@attributes[key] = value
end
def [](key)
@attributes[key]
end
def add_draw_call(start, count, index_offset = 0)
@draw_calls << DrawCall.new(start, count, index_offset)
end
def apply_matrix(matrix)
position = @attributes[:position]
if position
matrix.apply_to_vector3_array(position.array)
position.needs_update = true
end
normal = @attributes[:normal]
if normal
normal_matrix = Mittsu::Matrix3.new.normal_matrix(matrix)
normal_matrix.apply_to_vector3_array(normal.array)
normal.needs_update = true
end
if @bounding_box
self.compute_bounding_box
end
if @bounding_sphere
self.compute_bounding_sphere
end
nil
end
def center
self.computer_bounding_box
@bounding_boc.center.negate.tap do |offset|
self.apply_matrix(Mittsu::Matrix4.new.set_position(offset))
end
end
def from_geometry(geometry, settings = {})
vertices = geometry.vertices
faces = geometry.faces
face_vertex_uvs = geometry.face_vertex_uvs
vertex_colors = settings.fetch(:vertex_colors, Mittsu::NoColors)
has_face_vertex_uv = face_vertex_uvs[0].length > 0
has_face_vertex_normals = faces[0].vertex_normals.length == 3
positions = Array.new(faces.length * 3 * 3)
self[:position] = Mittsu::BufferAttribute.new(positions, 3)
normals = Array.new(faces.length * 3 * 3)
self[:normal] = Mittsu::BufferAttribute.new(normals, 3)
if vertex_colors != Mittsu::NoColors
colors = Array.new(faces.length * 3 * 3)
self[:color] = Mittsu::BufferAttribute.new(colors, 3)
end
if has_face_vertex_uv
uvs = Array.new(faces.length * 3 * 2)
self[:uv] = Mittsu::BufferAttribute.new(uvs, 2)
end
faces.each_with_index do |face, i|
i2 = i * 6
i3 = i * 9
set_array3(positions, i3, vertices[face.a], vertices[face.b], vertices[face.b])
if has_face_vertex_normals
set_array3(normals, i3, face.vertex_normals[0], face.vertex_normals[1], face.vertex_normals[2])
else
set_array3(normals, i3, face.normal)
end
if vertex_colors == Mittsu::FaceColors
set_array3(colors, i3, face,color)
elsif vertex_colors == Mittsu::VertexColors
set_array3(colors, i3, face.vertex_colors[0], face.vertex_colors[1], face.vertex_colors[2])
end
if has_face_vertex_uv
set_array2(uvs, i2, face_vertex_uvs[0][i][0], face_vertex_uvs[0][i][1], face_vertex_uvs[0][i][2])
end
end
self.compute_bounding_sphere
self
end
def compute_bounding_box
vector = Mittsu::Vector3.new
@bounding_box ||= Mittsu::Box3.new
positions = self[:position].array
if positions
@bounding_box.make_empty
positions.each_slice(3) do |p|
vector.set(*p)
@bounding_box.expand_by_point(vector)
end
end
if positions.nil? || positions.empty?
@bounding_box.min.set(0, 0, 0)
@bounding_box.max.set(0, 0, 0)
end
if @bounding_box.min.x.nan? || @bounding_box.min.y.nan? || @bounding_box.min.z.nan?
puts 'ERROR: Mittsu::BufferGeometry#compute_bounding_box: Computed min/max have NaN values. The "position" attribute is likely to have NaN values.'
end
end
def compute_bounding_sphere
box = Mittsu::Box3.new
vector = Mittsu::Vector3.new
@bounding_sphere ||= Mittsu::Sphere.new
positions = self[:position].array
if positions
box.make_empty
center = @bounding_sphere.center
positions.each_slice(3) do |p|
vector.set(*p)
box.expand_by_point(vector)
end
box.center(center)
# hoping to find a boundingSphere with a radius smaller than the
# boundingSphere of the boundingBox: sqrt(3) smaller in the best case
max_radius_sq = 0
positions.each_slice(3) do |p|
vector.set(*p)
max_radius_sq = [max_radius_sq, center.distance_to_squared(vector)].max
end
@bounding_sphere.radius = ::Math.sqrt(max_radius_sq)
if @bounding_radius.nan?
puts 'ERROR: Mittsu::BufferGeometry#computeBoundingSphere: Computed radius is NaN. The "position" attribute is likely to have NaN values.'
end
end
end
def compute_vertex_normals
if self[:position]
positions = self[:position].array
if self[:normal].nil?
self[:normal] = Mittsu::BufferAttribute.new(Array.new(positions.length), 3)
else
# reset existing normals to zero
normals = self[:normal].array
normals.each_index { |i| normals[i] = 0 }
end
normals = self[:normal].array
p_a = Mittsu::Vector3.new
p_b = Mittsu::Vector3.new
p_c = Mittsu::Vector3.new
cb = Mittsu::Vector3.new
ab = Mittsu::Vector3.new
# indexed elements
if self[:index]
indices = self[:index].array
draw_calls = @draw_calls.length > 0 ? @draw_calls : [DrawCall.new(0, indices.length, 0)]
draw_calls.each do |draw_call|
start = draw_call.start
count = draw_call.count
index = draw_call.index
i = start
il = start + count
while i < il
v_a = (index + indices[i ]) * 3
v_b = (index + indices[i + 1]) * 3
v_c = (index + indices[i + 2]) * 3
p_a.from_array(positions, v_a)
p_b.from_array(positions, v_a)
p_c.from_array(positions, v_c)
cb.sub_vectors(p_c, p_b)
ab.sub_vectors(p_a, p_b)
cb.cross(ab)
normals[v_a ] += cb.x
normals[v_a + 1] += cb.y
normals[v_a + 2] += cb.z
normals[v_b ] += cb.x
normals[v_b + 1] += cb.y
normals[v_b + 2] += cb.z
normals[v_c ] += cb.x
normals[v_c + 1] += cb.y
normals[v_c + 2] += cb.z
i += 3
end
end
else
# non-indexed elements (unconnected triangle soup)
positions.each_slice(9).with_index do |p, i|
i *= 9
p_a.from_array(positions, i)
p_a.from_array(positions, i + 3)
p_a.from_array(positions, i + 6)
cb.sub_vectors(p_c, p_b)
ab.sub_vectors(p_a, p_b)
set_array3(normals, i, cb)
end
end
self.normalize_normals
self[:normal].needs_update = true
end
end
def compute_tangents
# based on http://www.terathon.com/code/tangent.html
# (per vertex tangents)
if [:index, :position, :normal, :uv].any { |s| [email protected]_key?}
puts 'WARNING: Mittsu::BufferGeometry: Missing required attributes (index, position, normal or uv) in BufferGeometry#computeTangents'
return
end
indices = self[:index].array
positions = self[:position].array
normals = self[:normal].array
uvs = self[:uv].array
n_vertices = position.length / 3
if self[:tangent].nil?
self[:tangent] = Mittsu::BufferAttribute.new(Array.new(4 * n_vertices), 4)
end
tangents = self[:tangent].array
tan1 = []; tan2 = []
n_vertices.times do |k|
tan1[k] = Mittsu::Vector3.new
tan2[k] = Mittsu::Vector3.new
end
v_a = Mittsu::Vector3.new
v_b = Mittsu::Vector3.new
v_c = Mittsu::Vector3.new
uv_a = Mittsu::Vectoe3.new
uv_b = Mittsu::Vector3.new
uv_c = Mittsu::Vector3.new
sdir = Mittsu::Vector3.new
tdir = Mittsu::Vector3.new
handle_triangle = -> (a, b, c) {
v_a.from_array(positions, a * 3)
v_b.from_array(positions, b * 3)
v_c.from_array(positions, c * 3)
uv_a.from_array(uvs, a * 2)
uv_b.from_array(uvs, b * 2)
uv_c.from_array(uvs, c * 2)
x1 = v_b.x - v_a.x
x2 = v_c.x - v_a.x
y1 = v_b.y - v_a.y
y2 = v_c.y - v_a.y
z1 = v_b.z - v_a.z
z2 = v_c.z - v_a.z
s1 = uv_b.x - uv_a.x
s2 = uv_c.x - uv_a.x
t1 = uv_b.y - uv_a.y
t2 = uv_c.y - uv_a.y
r = 1.0 / (s1 * t2 - s2 * t1)
sdir.set(
(t2 * x1 - t1 * x2) * r,
(t2 * y1 - t1 * y2) * r,
(t2 * z1 - t1 * z2) * r
)
tdir.set(
(s2 * x2 - s2 * x1) * r,
(s2 * y2 - s2 * y1) * r,
(s2 * z2 - s2 * z1) * r
)
tan1[a].add(sdir)
tan1[b].add(sdir)
tan1[c].add(sdir)
tan2[a].add(tdir)
tan2[b].add(tdir)
tan2[c].add(tdir)
}
if @draw_calls.empty?
self.add_draw_call(0, indices.length, 0)
end
@draw_calls.each do |draw_call|
start = draw_call.start
count = draw_call.count
index = draw_call.index
i = start
il = start + count
while i < il
i_a = index + indices[i]
i_b = index + indices[i + 1]
i_c = index + indices[i + 2]
handle_triangle[i_a, i_b, i_c]
i += 3
end
end
tmp = Mittsu::Vector3.new
tmp2 = Mittsu::Vector3.new
n = Mittsu::Vector3.new
n2 = Mittsu::Vector3.new
handle_vertex = -> (v) {
n.from_array(normals, v * 3)
n2.copy(n)
t = tan1[v]
# Gram-Schmidt orthogonalize
tmp.copy(t)
tmp.sub(n.multiply_scalar(n.dot(t))).normalize
# Calculate handedness
tmp2.cross_vectors(n2, t)
test = tmp2.dot(tan2[v])
w = (test < 0.0) ? -1.0 : 1.0
tangents[v * 4 ] = tmp.x
tangents[v * 4 + 1] = tmp.y
tangents[v * 4 + 2] = tmp.z
tangents[v * 4 + 3] = w
}
draw_calls.each do |draw_call|
start = draw_call.start
count = draw_call.count
index = draw_call.index
i = start
il = start + count
while i < il
i_a = index + indices[i]
i_b = index + indices[i + 1]
i_c = index + indices[i + 2]
handle_vertex[i_a]
handle_vertex[i_b]
handle_vertex[i_c]
i += 3
end
end
end
# Compute the draw offset for large models by chunking the index buffer into chunks of 65k addressable vertices.
# This method will effectively rewrite the index buffer and remap all attributes to match the new indices.
# WARNING: This method will also expand the vertex count to prevent sprawled triangles across draw offsets.
# size - Defaults to 65535, but allows for larger or smaller chunks.
def compute_offsets(size = 65535)
# WebGL limits type of index buffer values to 16-bit.
# TODO: check what the limit is for OpenGL, as we aren't using WebGL here
indices = self[:index].array
vertices = self[:position].array
faces_count = indices.length / 3
# puts "Computing buffers in offsets of #{size} -> indices:#{indices.length} vertices:#{vertices.length}"
# puts "Faces to process: #{(indices.length/3)}"
# puts "Reordering #{verticesCount} vertices."
sorted_indices = Array.new(indices.length) # 16-bit (Uint16Array in THREE.js)
index_ptr = 0
vertex_ptr = 0
offsets = [DrawCall.new(0, 0, 0)]
offset = offsets.first
duplicated_vertices = 0
new_vertice_maps = 0
face_vertices = Array.new(6) # (Int32Array)
vertex_map = Array.new(vertices.length) # (Int32Array)
rev_vertex_map = Array.new(vertices.length) # (Int32Array)
vertices.each_index do |j|
vertex_map[j] = -1
rev_vertex_map[j] = -1
end
# Traverse every face and reorder vertices in the proper offsets of 65k.
# We can have more than 65k entries in the index buffer per offset, but only reference 65k values.
faces_count.times do |findex|
new_vertice_maps = 0
3.times do |vo|
vid = indices[findex * 3 * vo]
if vertex_map[vid] == -1
# unmapped vertice
face_vertices[vo * 2] = vid
face_vertices[vo * 2 + 1] = -1
new_vertice_maps += 1
elsif vertex_map[vid] < offset.index
# reused vertices from previous block (duplicate)
face_vertices[vo * 2] = vid
face_vertices[vo * 2 + 1] = -1
duplicated_vertices += 1
else
# reused vertice in the current block
face_vertices[vo * 2] =vid
face_vertices[vo * 2 + 1] = vertec_map[vid]
end
end
face_max = vertex_ptr + new_vertex_maps
if face_max > offset.index + size
new_offset = DrawCall.new(index_ptr, 0, vertex_ptr)
offsets << new_offset
offset = new_offset
# Re-evaluate reused vertices in light of new offset.
(0...6).step(2) do |v|
new_vid = face_vertices[v + 1]
if (new_vid > -1 && new_vid < offset.index)
faceVertices[v + 1] = -1
end
end
# Reindex the face.
(0...6).step(2) do |v|
vid = face_vertices[v]
new_vid = face_vertices[v + 1]
if new_vid == -1
new_vid = vertex_ptr
vertex_ptr += 1
end
vertex_map[vid] = new_vid
rev_vertex_map[new_vid] = vid
sorted_indices [index_ptr] = new_vid - offset.index # XXX: overflows at 16bit
index_ptr += 1
offset.count += 1
end
end
# Move all attribute values to map to the new computed indices , also expand the vertice stack to match our new vertexPtr.
self.reorder_buffers(sorted_indices, rev_vertex_map, vertex_ptr)
@draw_calls = offsets
# order_time = Time.now
# puts "Reorder time: #{(order_time - s)}ms"
# puts "Duplicated #{duplicated_vertices} vertices."
# puts "Compute Buffers time: #{(Time.now - s)}ms"
# puts "Draw offsets: #{offsets.length}"
offsets
end
end
def merge(geometry, offset = 0)
if !geometry.is_a? Mittsu::BufferGeometry
puts "ERROR: Mittsu::BufferGeometry#merge: geometry not an instance of Mittsu::BufferGeometry. #{geometry.inspect}"
return
end
@attributes.each_key do |key, attribute1|
next if attribute1.nil?
attribute_array1 = attribute1.array
attribute2 = geometry[key]
attribute_array2 = attribute2.array
attribute_size = attribute2.item_size
i, j = 0, attribute_size * offset
while i < attribute_array2.length
attribute_array1[j] = attribute_array2[i]
i += 1; j += 1
end
end
self
end
def normalize_normals
normals = self[:normal].array
normals.each_slice(3).with_index do |normal, i|
x, y, z = *normal
n = 1.0 / ::Math.sqrt(x * x + y * y + z * z)
i *= 3
normals[i] *= n
normals[i + 1] *= n
normals[i + 2] *= n
end
end
# reoderBuffers:
# Reorder attributes based on a new indexBuffer and indexMap.
# indexBuffer - Uint16Array of the new ordered indices.
# indexMap - Int32Array where the position is the new vertex ID and the value the old vertex ID for each vertex.
# vertexCount - Amount of total vertices considered in this reordering (in case you want to grow the vertice stack).
def reorder_buffers(index_buffer, index_map, vertex_count)
# Create a copy of all attributes for reordering
sorted_attributes = {}
@attributes.each do |key, attribute|
next if key == :index
source_array = attribute.array
sorted_attributes[key] = source_array.class.new(attribute.item_size * vertex_count)
end
# move attribute positions based on the new index map
vertex_count.times do |new_vid|
vid = index_map[new_vid]
@attributes.each do |key, attribute|
next if key == :index
attr_array = attribute.array
attr_size = attribute.item_size
sorted_attr = sorted_attributes[key]
attr_size.times do |k|
sorted_attr[new_vid * attr_size + k] = attr_array[vid * attr_size + k]
end
end
end
# Carry the new sorted buffers locally
@attributes[:index].array = index_buffer
@attributes.each do |key, attribute|
next if key == :index
attribute.array = sorted_attributes[key]
attribute.num_items = attribute.item_size * vertex_count
end
end
def to_json
output = {
metadata: {
version: 4.0,
type: 'BufferGeometry',
generator: 'BufferGeometryExporter'
},
uuid: @uuid,
type: @type,
data: {
attributes: {}
}
}
offsets = @draw_calls
@attributes.each do |key, attribute|
array = attribute.array.dup
output[:data][:attributes][key] = {
itemSize: attribute.itemSize,
type: attribute.array.class.name,
array: array
}
end
if !offsets.empty?
output[:data][:offsets] = offsets.map do |offset|
{ start: offset.start, count: offset.count, index: offet.index}
end
end
if !bounding_sphere.nil?
output[:data][:boundingSphere] = {
center: bounding_sphere.center.to_a,
radius: bounding_sphere.radius
}
end
output
end
def clone
geometry = Mittsu::BufferGeometry.news
@attributes.each do |key, attribute|
geometry[key] = attribute.clone
end
@draw_calls.each do |draw_call|
geometry.draw_calls << DrawCall.new(draw_call.start, draw_call.count, draw_call.index)
end
geometry
end
def dispose
self.dispatch_event type: :dispose
end
private
def set_array3(array, i3, a, b = a, c = b)
array[i3 ] = a.x
array[i3 + 1] = a.y
array[i3 + 2] = a.z
array[i3 + 3] = b.x
array[i3 + 4] = b.y
array[i3 + 5] = b.z
array[i3 + 6] = c.x
array[i3 + 7] = c.y
array[i3 + 8] = c.z
end
def set_array2(array, i2, a, b = a, c = b)
array[i2 ] = a.x
array[i2 + 1] = a.y
array[i2 + 2] = b.x
array[i2 + 3] = b.y
array[i2 + 4] = c.x
array[i2 + 5] = c.y
end
end
end
| 27.835971 | 155 | 0.5688 |
ed91ac648d9d9fd9f4c7fe558cd47fbe3970bbed | 1,485 | require 'rack'
require 'grape'
module Grape
# An Endpoint is the proxy scope in which all routing
# blocks are executed. In other words, any methods
# on the instance level of this class may be called
# from inside a `get`, `post`, etc. block.
class Endpoint
def initialize(&block)
@block = block
end
attr_reader :env, :request
def params
@params ||= request.params.merge(env['rack.routing_args'] || {}).inject({}) do |h,(k,v)|
h[k.to_s] = v
h[k.to_sym] = v
h
end
end
def version; env['api.version'] end
def error!(message, status=403)
throw :error, :message => message, :status => status
end
# Set or retrieve the HTTP status code.
def status(status = nil)
if status
@status = status
else
return @status if @status
case request.request_method.to_s.upcase
when 'POST'
201
else
200
end
end
end
# Set an individual header or retrieve
# all headers that have been set.
def header(key = nil, val = nil)
if key
val ? @header[key.to_s] = val : @header.delete(key.to_s)
else
@header
end
end
def call(env)
@env = env
@request = Rack::Request.new(@env)
@header = {}
response_text = instance_eval &@block
[status, header, [response_text]]
end
end
end
| 22.5 | 94 | 0.553535 |
abfe7de0024b73a6f8e0ccb4f1b1ccf9116ed2c4 | 1,768 | # Encoding: utf-8
#
# This is auto-generated code, changes will be overwritten.
#
# Copyright:: Copyright 2019, Google Inc. All Rights Reserved.
# License:: Licensed under the Apache License, Version 2.0.
#
# Code generated by AdsCommon library 1.0.2 on 2019-05-06 15:08:25.
require 'ads_common/savon_service'
require 'ad_manager_api/v201905/base_rate_service_registry'
module AdManagerApi; module V201905; module BaseRateService
class BaseRateService < AdsCommon::SavonService
def initialize(config, endpoint)
namespace = 'https://www.google.com/apis/ads/publisher/v201905'
super(config, endpoint, namespace, :v201905)
end
def create_base_rates(*args, &block)
return execute_action('create_base_rates', args, &block)
end
def create_base_rates_to_xml(*args)
return get_soap_xml('create_base_rates', args)
end
def get_base_rates_by_statement(*args, &block)
return execute_action('get_base_rates_by_statement', args, &block)
end
def get_base_rates_by_statement_to_xml(*args)
return get_soap_xml('get_base_rates_by_statement', args)
end
def perform_base_rate_action(*args, &block)
return execute_action('perform_base_rate_action', args, &block)
end
def perform_base_rate_action_to_xml(*args)
return get_soap_xml('perform_base_rate_action', args)
end
def update_base_rates(*args, &block)
return execute_action('update_base_rates', args, &block)
end
def update_base_rates_to_xml(*args)
return get_soap_xml('update_base_rates', args)
end
private
def get_service_registry()
return BaseRateServiceRegistry
end
def get_module()
return AdManagerApi::V201905::BaseRateService
end
end
end; end; end
| 28.063492 | 72 | 0.732466 |
03436ee1cbdea33acc02d6f916432241e8528363 | 5,828 | # frozen_string_literal: true
require_relative '../../support/helpers/stub_env'
require_relative '../../support/time_travel'
require_relative '../../../tooling/rspec_flaky/flaky_example'
RSpec.describe RspecFlaky::FlakyExample, :aggregate_failures do
include ActiveSupport::Testing::TimeHelpers
include StubENV
let(:flaky_example_attrs) do
{
example_id: 'spec/foo/bar_spec.rb:2',
file: 'spec/foo/bar_spec.rb',
line: 2,
description: 'hello world',
first_flaky_at: 1234,
last_flaky_at: 2345,
last_flaky_job: 'https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/12',
last_attempts_count: 2,
flaky_reports: 1
}
end
let(:example_attrs) do
{
uid: 'abc123',
example_id: flaky_example_attrs[:example_id],
file: flaky_example_attrs[:file],
line: flaky_example_attrs[:line],
description: flaky_example_attrs[:description],
status: 'passed',
exception: 'BOOM!',
attempts: flaky_example_attrs[:last_attempts_count]
}
end
before do
# Stub these env variables otherwise specs don't behave the same on the CI
stub_env('CI_JOB_URL', nil)
end
describe '#initialize', :freeze_time do
shared_examples 'a valid FlakyExample instance' do
let(:flaky_example) { described_class.new(args) }
it 'returns valid attributes' do
attrs = flaky_example.to_h
expect(attrs[:uid]).to eq(flaky_example_attrs[:uid])
expect(attrs[:file]).to eq(flaky_example_attrs[:file])
expect(attrs[:line]).to eq(flaky_example_attrs[:line])
expect(attrs[:description]).to eq(flaky_example_attrs[:description])
expect(attrs[:first_flaky_at]).to eq(expected_first_flaky_at)
expect(attrs[:last_flaky_at]).to eq(expected_last_flaky_at)
expect(attrs[:last_attempts_count]).to eq(flaky_example_attrs[:last_attempts_count])
expect(attrs[:flaky_reports]).to eq(expected_flaky_reports)
end
end
context 'when given an Example hash' do
it_behaves_like 'a valid FlakyExample instance' do
let(:args) { example_attrs }
let(:expected_first_flaky_at) { Time.now }
let(:expected_last_flaky_at) { Time.now }
let(:expected_flaky_reports) { 0 }
end
end
context 'when given a FlakyExample hash' do
it_behaves_like 'a valid FlakyExample instance' do
let(:args) { flaky_example_attrs }
let(:expected_flaky_reports) { flaky_example_attrs[:flaky_reports] }
let(:expected_first_flaky_at) { flaky_example_attrs[:first_flaky_at] }
let(:expected_last_flaky_at) { flaky_example_attrs[:last_flaky_at] }
end
end
end
describe '#update_flakiness!' do
shared_examples 'an up-to-date FlakyExample instance' do
let(:flaky_example) { described_class.new(args) }
it 'sets the first_flaky_at if none exists' do
args[:first_flaky_at] = nil
freeze_time do
flaky_example.update_flakiness!
expect(flaky_example.to_h[:first_flaky_at]).to eq(Time.now)
end
end
it 'maintains the first_flaky_at if exists' do
flaky_example.update_flakiness!
expected_first_flaky_at = flaky_example.to_h[:first_flaky_at]
travel_to(Time.now + 42) do
flaky_example.update_flakiness!
expect(flaky_example.to_h[:first_flaky_at]).to eq(expected_first_flaky_at)
end
end
it 'updates the last_flaky_at' do
travel_to(Time.now + 42) do
the_future = Time.now
flaky_example.update_flakiness!
expect(flaky_example.to_h[:last_flaky_at]).to eq(the_future)
end
end
it 'updates the flaky_reports' do
expected_flaky_reports = flaky_example.to_h[:first_flaky_at] ? flaky_example.to_h[:flaky_reports] + 1 : 1
expect { flaky_example.update_flakiness! }.to change { flaky_example.to_h[:flaky_reports] }.by(1)
expect(flaky_example.to_h[:flaky_reports]).to eq(expected_flaky_reports)
end
context 'when passed a :last_attempts_count' do
it 'updates the last_attempts_count' do
flaky_example.update_flakiness!(last_attempts_count: 42)
expect(flaky_example.to_h[:last_attempts_count]).to eq(42)
end
end
context 'when run on the CI' do
let(:job_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/42' }
before do
stub_env('CI_JOB_URL', job_url)
end
it 'updates the last_flaky_job' do
flaky_example.update_flakiness!
expect(flaky_example.to_h[:last_flaky_job]).to eq(job_url)
end
end
end
context 'when given an Example hash' do
it_behaves_like 'an up-to-date FlakyExample instance' do
let(:args) { example_attrs }
end
end
context 'when given a FlakyExample hash' do
it_behaves_like 'an up-to-date FlakyExample instance' do
let(:args) { flaky_example_attrs }
end
end
end
describe '#to_h', :freeze_time do
shared_examples 'a valid FlakyExample hash' do
let(:additional_attrs) { {} }
it 'returns a valid hash' do
flaky_example = described_class.new(args)
final_hash = flaky_example_attrs.merge(additional_attrs)
expect(flaky_example.to_h).to eq(final_hash)
end
end
context 'when given an Example hash' do
let(:args) { example_attrs }
it_behaves_like 'a valid FlakyExample hash' do
let(:additional_attrs) do
{ first_flaky_at: Time.now, last_flaky_at: Time.now, last_flaky_job: nil, flaky_reports: 0 }
end
end
end
context 'when given a FlakyExample hash' do
let(:args) { flaky_example_attrs }
it_behaves_like 'a valid FlakyExample hash'
end
end
end
| 31.333333 | 113 | 0.670041 |
0182ae82e928b7a87ae1b21730b0bbd4982ffc72 | 843 | # https://www.hackerrank.com/challenges/mars-exploration/problem?h_r=next-challenge&h_v=zen&h_r=next-challenge&h_v=zen
# Letters in some of the SOS messages are altered by cosmic radiation during transmission.
# Given the signal received by Earth as a string, s , determine how many letters of the SOS message have been changed by radiation.
def marsExploration(s)
expected_msg = %w[S O S]
bad_signals = 0
chunks = s.scan(/.{3}/)
chunks.each { |chunk| chunk.chars.each_with_index { |char, i| bad_signals += 1 unless expected_msg[i] == char } }
bad_signals
end
# puts marsExploration('SOSSPSSQSSOR') == 3
# puts marsExploration('SOSSOT') == 1
# puts marsExploration('SPS') == 1
# puts marsExploration('POR') == 2
puts marsExploration('SOSSOSSSSOSOIEISOSPOISOSIUSISO')
puts marsExploration('SOSSOSSSSOSOIEISOSPOISOSIUSISO') == 14
| 38.318182 | 131 | 0.748517 |
e990e425cb6bf659aa2798f270c306bb36a107f6 | 6,515 | # frozen_string_literal: true
module EventsHelper
ICON_NAMES_BY_EVENT_TYPE = {
'pushed to' => 'commit',
'pushed new' => 'commit',
'created' => 'status_open',
'opened' => 'status_open',
'closed' => 'status_closed',
'accepted' => 'fork',
'commented on' => 'comment',
'deleted' => 'remove',
'imported' => 'import',
'joined' => 'users'
}.freeze
def link_to_author(event, self_added: false)
author = event.author
if author
name = self_added ? 'You' : author.name
link_to name, user_path(author.username), title: name
else
escape_once(event.author_name)
end
end
def event_action_name(event)
target = if event.target_type
if event.note?
event.note_target_type
else
event.target_type.titleize.downcase
end
else
'project'
end
[event.action_name, target].join(" ")
end
def event_filter_link(key, text, tooltip)
key = key.to_s
active = 'active' if @event_filter.active?(key)
link_opts = {
class: "event-filter-link",
id: "#{key}_event_filter",
title: tooltip
}
content_tag :li, class: active do
link_to request.path, link_opts do
content_tag(:span, ' ' + text)
end
end
end
def event_filter_visible(feature_key)
return true unless @project
@project.feature_available?(feature_key, current_user)
end
def comments_visible?
event_filter_visible(:repository) ||
event_filter_visible(:merge_requests) ||
event_filter_visible(:issues)
end
def event_preposition(event)
if event.push_action? || event.commented_action? || event.target
"at"
elsif event.milestone?
"in"
end
end
def event_feed_title(event)
words = []
words << event.author_name
words << event_action_name(event)
if event.push_action?
words << event.ref_type
words << event.ref_name
words << "at"
elsif event.commented_action?
words << event.note_target_reference
words << "at"
elsif event.milestone?
words << "##{event.target_iid}" if event.target_iid
words << "in"
elsif event.target
prefix =
if event.merge_request?
MergeRequest.reference_prefix
else
Issue.reference_prefix
end
words << "#{prefix}#{event.target_iid}:" if event.target_iid
words << event.target.title if event.target.respond_to?(:title)
words << "at"
end
words << event.project_name
words.join(" ")
end
def event_feed_url(event)
if event.issue?
project_issue_url(event.project,
event.issue)
elsif event.merge_request?
project_merge_request_url(event.project, event.merge_request)
elsif event.commit_note?
project_commit_url(event.project,
event.note_target)
elsif event.note?
if event.note_target
event_note_target_url(event)
end
elsif event.push_action?
push_event_feed_url(event)
elsif event.created_project_action?
project_url(event.project)
end
end
def push_event_feed_url(event)
if event.push_with_commits? && event.md_ref?
if event.commits_count > 1
project_compare_url(event.project,
from: event.commit_from, to:
event.commit_to)
else
project_commit_url(event.project,
id: event.commit_to)
end
else
project_commits_url(event.project,
event.ref_name)
end
end
def event_feed_summary(event)
if event.issue?
render "events/event_issue", issue: event.issue
elsif event.push_action?
render "events/event_push", event: event
elsif event.merge_request?
render "events/event_merge_request", merge_request: event.merge_request
elsif event.note?
render "events/event_note", note: event.note
end
end
def event_note_target_url(event)
if event.commit_note?
project_commit_url(event.project, event.note_target, anchor: dom_id(event.target))
elsif event.project_snippet_note?
project_snippet_url(event.project, event.note_target, anchor: dom_id(event.target))
elsif event.issue_note?
project_issue_url(event.project, id: event.note_target, anchor: dom_id(event.target))
elsif event.merge_request_note?
project_merge_request_url(event.project, id: event.note_target, anchor: dom_id(event.target))
else
polymorphic_url([event.project.namespace.becomes(Namespace),
event.project, event.note_target],
anchor: dom_id(event.target))
end
end
def event_note_title_html(event)
if event.note_target
capture do
concat content_tag(:span, event.note_target_type, class: "event-target-type append-right-4")
concat link_to(event.note_target_reference, event_note_target_url(event), title: event.target_title, class: 'has-tooltip event-target-link append-right-4')
end
else
content_tag(:strong, '(deleted)')
end
end
def event_commit_title(message)
message ||= ''
(message.split("\n").first || "").truncate(70)
rescue
"--broken encoding"
end
def icon_for_event(note, size: 24)
icon_name = ICON_NAMES_BY_EVENT_TYPE[note]
sprite_icon(icon_name, size: size) if icon_name
end
def icon_for_profile_event(event)
if current_path?('users#show')
content_tag :div, class: "system-note-image #{event.action_name.parameterize}-icon" do
icon_for_event(event.action_name)
end
else
content_tag :div, class: 'system-note-image user-avatar' do
author_avatar(event, size: 40)
end
end
end
def inline_event_icon(event)
unless current_path?('users#show')
content_tag :span, class: "system-note-image-inline d-none d-sm-flex append-right-4 #{event.action_name.parameterize}-icon align-self-center" do
icon_for_event(event.action_name, size: 14)
end
end
end
def event_user_info(event)
content_tag(:div, class: "event-user-info") do
concat content_tag(:span, link_to_author(event), class: "author_name")
concat " ".html_safe
concat content_tag(:span, event.author.to_reference, class: "username")
end
end
end
| 28.827434 | 163 | 0.639908 |
1cffd7a803ad4879a6cc52c86ba47b91352a004a | 1,501 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2017 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
module Queries::Operators
class ClosedWorkPackages < Base
label 'closed_work_packages'
set_symbol 'c'
require_value false
def self.sql_for_field(_values, _db_table, _db_field)
"#{Status.table_name}.is_closed=#{connection.quoted_true}"
end
end
end
| 36.609756 | 91 | 0.754164 |
b92e35a051d172779d20e7937b009e0e8ec99a27 | 3,991 | # frozen_string_literal: false
require "rss/utils"
module RSS
class Converter
include Utils
def initialize(to_enc, from_enc=nil)
if "".respond_to?(:encode)
@to_encoding = to_enc
return
end
normalized_to_enc = to_enc.downcase.gsub(/-/, '_')
from_enc ||= 'utf-8'
normalized_from_enc = from_enc.downcase.gsub(/-/, '_')
if normalized_to_enc == normalized_from_enc
def_same_enc()
else
def_diff_enc = "def_to_#{normalized_to_enc}_from_#{normalized_from_enc}"
if respond_to?(def_diff_enc)
__send__(def_diff_enc)
else
def_else_enc(to_enc, from_enc)
end
end
end
def convert(value)
if value.is_a?(String) and value.respond_to?(:encode)
value.encode(@to_encoding)
else
value
end
end
def def_convert(depth=0)
instance_eval(<<-EOC, *get_file_and_line_from_caller(depth))
def convert(value)
if value.kind_of?(String)
#{yield('value')}
else
value
end
end
EOC
end
def def_iconv_convert(to_enc, from_enc, depth=0)
begin
require "iconv"
@iconv = Iconv.new(to_enc, from_enc)
def_convert(depth+1) do |value|
<<-EOC
begin
@iconv.iconv(#{value})
rescue Iconv::Failure
raise ConversionError.new(#{value}, "#{to_enc}", "#{from_enc}")
end
EOC
end
rescue LoadError, ArgumentError, SystemCallError
raise UnknownConversionMethodError.new(to_enc, from_enc)
end
end
def def_else_enc(to_enc, from_enc)
def_iconv_convert(to_enc, from_enc, 0)
end
def def_same_enc()
def_convert do |value|
value
end
end
def def_uconv_convert_if_can(meth, to_enc, from_enc, nkf_arg)
begin
require "uconv"
def_convert(1) do |value|
<<-EOC
begin
Uconv.#{meth}(#{value})
rescue Uconv::Error
raise ConversionError.new(#{value}, "#{to_enc}", "#{from_enc}")
end
EOC
end
rescue LoadError
require 'nkf'
if NKF.const_defined?(:UTF8)
def_convert(1) do |value|
"NKF.nkf(#{nkf_arg.dump}, #{value})"
end
else
def_iconv_convert(to_enc, from_enc, 1)
end
end
end
def def_to_euc_jp_from_utf_8
def_uconv_convert_if_can('u8toeuc', 'EUC-JP', 'UTF-8', '-We')
end
def def_to_utf_8_from_euc_jp
def_uconv_convert_if_can('euctou8', 'UTF-8', 'EUC-JP', '-Ew')
end
def def_to_shift_jis_from_utf_8
def_uconv_convert_if_can('u8tosjis', 'Shift_JIS', 'UTF-8', '-Ws')
end
def def_to_utf_8_from_shift_jis
def_uconv_convert_if_can('sjistou8', 'UTF-8', 'Shift_JIS', '-Sw')
end
def def_to_euc_jp_from_shift_jis
require "nkf"
def_convert do |value|
"NKF.nkf('-Se', #{value})"
end
end
def def_to_shift_jis_from_euc_jp
require "nkf"
def_convert do |value|
"NKF.nkf('-Es', #{value})"
end
end
def def_to_euc_jp_from_iso_2022_jp
require "nkf"
def_convert do |value|
"NKF.nkf('-Je', #{value})"
end
end
def def_to_iso_2022_jp_from_euc_jp
require "nkf"
def_convert do |value|
"NKF.nkf('-Ej', #{value})"
end
end
def def_to_utf_8_from_iso_8859_1
def_convert do |value|
"#{value}.unpack('C*').pack('U*')"
end
end
def def_to_iso_8859_1_from_utf_8
def_convert do |value|
<<-EOC
array_utf8 = #{value}.unpack('U*')
array_enc = []
array_utf8.each do |num|
if num <= 0xFF
array_enc << num
else
array_enc.concat "&\#\#{num};".unpack('C*')
end
end
array_enc.pack('C*')
EOC
end
end
end
end
| 23.203488 | 80 | 0.567276 |
62d71abc214f53413ac8b07add176e747f16922d | 7,092 | require File.dirname(__FILE__) + '/../../test_helper.rb'
module ORMAdaptersTests
module ActiveRecordAdapterTests
class ActsAsAuthenticTest < ActiveSupport::TestCase
def test_user_validations
user = User.new
assert !user.valid?
assert user.errors.on(:login)
assert user.errors.on(:password)
user.login = "a"
assert !user.valid?
assert user.errors.on(:login)
assert user.errors.on(:password)
user.login = "%ben*"
assert !user.valid?
assert user.errors.on(:login)
assert user.errors.on(:password)
user.login = "bjohnson"
assert !user.valid?
assert user.errors.on(:login)
assert user.errors.on(:password)
user.login = "my login"
assert !user.valid?
assert !user.errors.on(:login)
assert user.errors.on(:password)
user.password = "my pass"
assert !user.valid?
assert !user.errors.on(:password)
assert user.errors.on(:confirm_password)
user.confirm_password = "my pizass"
assert !user.valid?
assert !user.errors.on(:password)
assert user.errors.on(:confirm_password)
user.confirm_password = "my pass"
assert user.valid?
end
def test_employee_validations
employee = Employee.new
employee.password = "pass"
employee.confirm_password = "pass"
assert !employee.valid?
assert employee.errors.on(:email)
employee.email = "fdsf"
assert !employee.valid?
assert employee.errors.on(:email)
employee.email = "[email protected]"
assert !employee.valid?
assert employee.errors.on(:email)
employee.email = "[email protected]"
assert employee.valid?
end
def test_named_scopes
assert_equal 0, User.logged_in.count
assert_equal User.count, User.logged_out.count
http_basic_auth_for(users(:ben)) { UserSession.find }
assert_equal 1, User.logged_in.count
assert_equal User.count - 1, User.logged_out.count
end
def test_unique_token
assert_equal 128, User.unique_token.length
assert_equal 128, Employee.unique_token.length # make sure encryptions use hashes also
unique_tokens = []
1000.times { unique_tokens << User.unique_token }
unique_tokens.uniq!
assert_equal 1000, unique_tokens.size
end
def test_crypto_provider
assert_equal Authlogic::CryptoProviders::Sha512, User.crypto_provider
assert_equal AES128CryptoProvider, Employee.crypto_provider
end
def test_forget_all
http_basic_auth_for(users(:ben)) { UserSession.find }
http_basic_auth_for(users(:zack)) { UserSession.find(:ziggity_zack) }
assert UserSession.find
assert UserSession.find(:ziggity_zack)
User.forget_all!
assert !UserSession.find
assert !UserSession.find(:ziggity_zack)
end
def test_logged_in
ben = users(:ben)
assert !ben.logged_in?
http_basic_auth_for(ben) { UserSession.find }
assert ben.reload.logged_in?
end
def test_password
user = User.new
user.password = "sillywilly"
assert user.crypted_password
assert user.password_salt
assert user.remember_token
assert_equal true, user.tried_to_set_password
assert_nil user.password
employee = Employee.new
employee.password = "awesome"
assert employee.crypted_password
assert employee.remember_token
assert_equal true, employee.tried_to_set_password
assert_nil employee.password
end
def test_valid_password
ben = users(:ben)
assert ben.valid_password?("benrocks")
assert ben.valid_password?(ben.crypted_password)
drew = employees(:drew)
assert drew.valid_password?("drewrocks")
assert drew.valid_password?(drew.crypted_password)
end
def test_forget
ben = users(:ben)
zack = users(:zack)
http_basic_auth_for(ben) { UserSession.find }
http_basic_auth_for(zack) { UserSession.find(:ziggity_zack) }
assert ben.reload.logged_in?
assert zack.reload.logged_in?
ben.forget!
assert !UserSession.find
assert UserSession.find(:ziggity_zack)
end
def test_reset_password
ben = users(:ben)
UserSession.create(ben)
old_password = ben.crypted_password
old_salt = ben.password_salt
old_remember_token = ben.remember_token
ben.reset_password!
ben.reload
assert_not_equal old_password, ben.crypted_password
assert_not_equal old_salt, ben.password_salt
assert_not_equal old_remember_token, ben.remember_token
assert !UserSession.find
end
def test_login_after_create
assert User.create(:login => "awesome", :password => "saweet", :confirm_password => "saweet")
assert UserSession.find
end
def test_update_session_after_password_modify
ben = users(:ben)
UserSession.create(ben)
old_session_key = @controller.session["user_credentials"]
old_cookie_key = @controller.cookies["user_credentials"]
ben.password = "newpass"
ben.confirm_password = "newpass"
ben.save
assert @controller.session["user_credentials"]
assert @controller.cookies["user_credentials"]
assert_not_equal @controller.session["user_credentials"], old_session_key
assert_not_equal @controller.cookies["user_credentials"], old_cookie_key
end
def test_no_session_update_after_modify
ben = users(:ben)
UserSession.create(ben)
old_session_key = @controller.session["user_credentials"]
old_cookie_key = @controller.cookies["user_credentials"]
ben.first_name = "Ben"
ben.save
assert_equal @controller.session["user_credentials"], old_session_key
assert_equal @controller.cookies["user_credentials"], old_cookie_key
end
def test_updating_other_user
ben = users(:ben)
UserSession.create(ben)
old_session_key = @controller.session["user_credentials"]
old_cookie_key = @controller.cookies["user_credentials"]
zack = users(:zack)
zack.password = "newpass"
zack.confirm_password = "newpass"
zack.save
assert_equal @controller.session["user_credentials"], old_session_key
assert_equal @controller.cookies["user_credentials"], old_cookie_key
end
def test_resetting_password_when_logged_out
ben = users(:ben)
assert !UserSession.find
ben.password = "newpass"
ben.confirm_password = "newpass"
ben.save
assert UserSession.find
assert_equal ben, UserSession.find.record
end
end
end
end | 32.682028 | 101 | 0.643965 |
08403a6340b8790a64c373d8397bebe85e55e24c | 769 | require_relative '../../jenkins/type'
require_relative '../../jenkins/config'
module Puppet::X::Jenkins::Type::Cli
def self.newtype(*args, &block)
type = Puppet::Type.newtype(*args, &block)
# The jenkins master needs to be avaiable in order to interact with it via
# the cli jar.
type.autorequire(:service) do
['jenkins']
end
# If a file resource is declared for file path params, make sure that it's
# converged so we can read it off disk.
type.autorequire(:file) do
config = Puppet::X::Jenkins::Config.new(catalog)
autos = []
%w( ssh_private_key puppet_helper cli_jar ).each do |param|
value = config[param.to_sym]
autos << value unless value.nil?
end
autos
end
end
end
| 26.517241 | 78 | 0.644993 |
edd30e51ff1a8cf09185147064450d0daf1497bf | 657 | cask "font-mali" do
version :latest
sha256 :no_check
# github.com/google/fonts/ was verified as official when first introduced to the cask
url "https://github.com/google/fonts/trunk/ofl/mali",
using: :svn,
trust_cert: true
name "Mali"
homepage "https://fonts.google.com/specimen/Mali"
font "Mali-Bold.ttf"
font "Mali-BoldItalic.ttf"
font "Mali-ExtraLight.ttf"
font "Mali-ExtraLightItalic.ttf"
font "Mali-Italic.ttf"
font "Mali-Light.ttf"
font "Mali-LightItalic.ttf"
font "Mali-Medium.ttf"
font "Mali-MediumItalic.ttf"
font "Mali-Regular.ttf"
font "Mali-SemiBold.ttf"
font "Mali-SemiBoldItalic.ttf"
end
| 26.28 | 87 | 0.704718 |
39aabb5965ab82e7005ad9020a6d972739d419b6 | 288 | # frozen_string_literal: true
module TaxCloud #:nodoc:
module Responses #:nodoc:
# Response to a TaxCloud Authorized API call.
#
# See https://api.taxcloud.net/1.0/TaxCloud.asmx?op=Authorized.
class Authorized < Generic
response_key :authorized
end
end
end
| 22.153846 | 67 | 0.697917 |
1c720c5a8e2c4bace6ba8065501e58688fd253e6 | 6,195 | require "action_dispatch/http/request"
require "action_dispatch/middleware/exception_wrapper"
require "action_dispatch/routing/inspector"
require "action_view"
require "action_view/base"
require "pp"
module ActionDispatch
# This middleware is responsible for logging exceptions and
# showing a debugging page in case the request is local.
class DebugExceptions
RESCUES_TEMPLATE_PATH = File.expand_path("../templates", __FILE__)
class DebugView < ActionView::Base
def debug_params(params)
clean_params = params.clone
clean_params.delete("action")
clean_params.delete("controller")
if clean_params.empty?
"None"
else
PP.pp(clean_params, "", 200)
end
end
def debug_headers(headers)
if headers.present?
headers.inspect.gsub(",", ",\n")
else
"None"
end
end
def debug_hash(object)
object.to_hash.sort_by { |k, _| k.to_s }.map { |k, v| "#{k}: #{v.inspect rescue $!.message}" }.join("\n")
end
def render(*)
logger = ActionView::Base.logger
if logger && logger.respond_to?(:silence)
logger.silence { super }
else
super
end
end
end
def initialize(app, routes_app = nil, response_format = :default)
@app = app
@routes_app = routes_app
@response_format = response_format
end
def call(env)
request = ActionDispatch::Request.new env
_, headers, body = response = @app.call(env)
if headers["X-Cascade"] == "pass"
body.close if body.respond_to?(:close)
raise ActionController::RoutingError, "No route matches [#{env['REQUEST_METHOD']}] #{env['PATH_INFO'].inspect}"
end
response
rescue Exception => exception
raise exception unless request.show_exceptions?
render_exception(request, exception)
end
private
def render_exception(request, exception)
backtrace_cleaner = request.get_header("action_dispatch.backtrace_cleaner")
wrapper = ExceptionWrapper.new(backtrace_cleaner, exception)
log_error(request, wrapper)
if request.get_header("action_dispatch.show_detailed_exceptions")
content_type = request.formats.first
if api_request?(content_type)
render_for_api_request(content_type, wrapper)
else
render_for_browser_request(request, wrapper)
end
else
raise exception
end
end
def render_for_browser_request(request, wrapper)
template = create_template(request, wrapper)
file = "rescues/#{wrapper.rescue_template}"
if request.xhr?
body = template.render(template: file, layout: false, formats: [:text])
format = "text/plain"
else
body = template.render(template: file, layout: "rescues/layout")
format = "text/html"
end
render(wrapper.status_code, body, format)
end
def render_for_api_request(content_type, wrapper)
body = {
status: wrapper.status_code,
error: Rack::Utils::HTTP_STATUS_CODES.fetch(
wrapper.status_code,
Rack::Utils::HTTP_STATUS_CODES[500]
),
exception: wrapper.exception.inspect,
traces: wrapper.traces
}
to_format = "to_#{content_type.to_sym}"
if content_type && body.respond_to?(to_format)
formatted_body = body.public_send(to_format)
format = content_type
else
formatted_body = body.to_json
format = Mime[:json]
end
render(wrapper.status_code, formatted_body, format)
end
def create_template(request, wrapper)
traces = wrapper.traces
trace_to_show = "Application Trace"
if traces[trace_to_show].empty? && wrapper.rescue_template != "routing_error"
trace_to_show = "Full Trace"
end
if source_to_show = traces[trace_to_show].first
source_to_show_id = source_to_show[:id]
end
DebugView.new([RESCUES_TEMPLATE_PATH],
request: request,
exception: wrapper.exception,
traces: traces,
show_source_idx: source_to_show_id,
trace_to_show: trace_to_show,
routes_inspector: routes_inspector(wrapper.exception),
source_extracts: wrapper.source_extracts,
line_number: wrapper.line_number,
file: wrapper.file
)
end
def render(status, body, format)
[status, { "Content-Type" => "#{format}; charset=#{Response.default_charset}", "Content-Length" => body.bytesize.to_s }, [body]]
end
def log_error(request, wrapper)
logger = logger(request)
return unless logger
exception = wrapper.exception
trace = wrapper.application_trace
trace = wrapper.framework_trace if trace.empty?
ActiveSupport::Deprecation.silence do
logger.fatal " "
logger.fatal "#{exception.class} (#{exception.message}):"
log_array logger, exception.annoted_source_code if exception.respond_to?(:annoted_source_code)
logger.fatal " "
log_array logger, trace
end
end
def log_array(logger, array)
if logger.formatter && logger.formatter.respond_to?(:tags_text)
logger.fatal array.join("\n#{logger.formatter.tags_text}")
else
logger.fatal array.join("\n")
end
end
def logger(request)
request.logger || ActionView::Base.logger || stderr_logger
end
def stderr_logger
@stderr_logger ||= ActiveSupport::Logger.new($stderr)
end
def routes_inspector(exception)
if @routes_app.respond_to?(:routes) && (exception.is_a?(ActionController::RoutingError) || exception.is_a?(ActionView::Template::Error))
ActionDispatch::Routing::RoutesInspector.new(@routes_app.routes.routes)
end
end
def api_request?(content_type)
@response_format == :api && !content_type.html?
end
end
end
| 30.367647 | 144 | 0.626634 |
62be66974fa7dc3497683d4875dbb32b228883bd | 17,864 | require 'spec_helper'
describe 'metricbeat' do
on_supported_os(facterversion: '2.4').each do |os, os_facts|
context "on #{os}" do
let(:facts) { os_facts }
it { is_expected.to compile }
describe 'metricbeat::config' do
if os_facts[:kernel] == 'windows'
it do
is_expected.to contain_file('metricbeat.yml').with(
ensure: 'present',
path: 'C:/Program Files/Metricbeat/metricbeat.yml',
validate_cmd: "\"C:\\Program Files\\Metricbeat\\metricbeat.exe\" -N configtest -c \"%\"", # rubocop:disable StringLiterals
)
end
else
it do
is_expected.to contain_file('metricbeat.yml').with(
ensure: 'present',
owner: 'root',
group: 'root',
mode: '0600',
path: '/etc/metricbeat/metricbeat.yml',
validate_cmd: '/usr/share/metricbeat/bin/metricbeat -configtest -c %',
)
end
end
describe 'with ensure = absent' do
let(:params) { { 'ensure' => 'absent' } }
if os_facts[:kernel] == 'windows'
it do
is_expected.to contain_file('metricbeat.yml').with(
ensure: 'absent',
path: 'C:/Program Files/Metricbeat/metricbeat.yml',
validate_cmd: "\"C:\\Program Files\\Metricbeat\\metricbeat.exe\" -N configtest -c \"%\"", # rubocop:disable StringLiterals
)
end
else
it do
is_expected.to contain_file('metricbeat.yml').with(
ensure: 'absent',
path: '/etc/metricbeat/metricbeat.yml',
validate_cmd: '/usr/share/metricbeat/bin/metricbeat -configtest -c %',
)
end
end
end
describe 'with disable_configtest = true' do
let(:params) { { 'disable_configtest' => true } }
if os_facts[:kernel] == 'windows'
it do
is_expected.to contain_file('metricbeat.yml').with(
ensure: 'present',
path: 'C:/Program Files/Metricbeat/metricbeat.yml',
validate_cmd: nil,
)
end
else
it do
is_expected.to contain_file('metricbeat.yml').with(
ensure: 'present',
owner: 'root',
group: 'root',
mode: '0600',
path: '/etc/metricbeat/metricbeat.yml',
validate_cmd: nil,
)
end
end
end
describe 'with config_mode = 0644' do
let(:params) { { 'config_mode' => '0644' } }
if os_facts[:kernel] != 'windows'
it do
is_expected.to contain_file('metricbeat.yml').with(
ensure: 'present',
owner: 'root',
group: 'root',
mode: '0644',
path: '/etc/metricbeat/metricbeat.yml',
validate_cmd: '/usr/share/metricbeat/bin/metricbeat -configtest -c %',
)
end
end
end
describe 'with config_mode = 9999' do
let(:params) { { 'config_mode' => '9999' } }
it { is_expected.to raise_error(Puppet::Error) }
end
describe 'with major_version = 6 for new config test flag' do
let(:params) { { 'major_version' => '6' } }
if os_facts[:kernel] == 'windows'
it do
is_expected.to contain_file('metricbeat.yml').with(
ensure: 'present',
path: 'C:/Program Files/Metricbeat/metricbeat.yml',
validate_cmd: "\"C:\\Program Files\\Metricbeat\\metricbeat.exe\" --path.config \"C:/Program Files/Metricbeat\" test config", # rubocop:disable StringLiterals
)
end
else
it do
is_expected.to contain_file('metricbeat.yml').with(
ensure: 'present',
owner: 'root',
group: 'root',
mode: '0600',
path: '/etc/metricbeat/metricbeat.yml',
validate_cmd: '/usr/share/metricbeat/bin/metricbeat --path.config /etc/metricbeat test config',
)
end
end
end
end
describe 'metricbeat::install' do
if os_facts[:kernel] == 'windows'
it do
is_expected.to contain_file('C:/Program Files').with(ensure: 'directory')
is_expected.to contain_archive('C:/Windows/Temp/metricbeat-6.6.1-windows-x86_64.zip').with(
creates: 'C:/Program Files/Metricbeat/metricbeat-6.6.1-windows-x86_64',
source: 'https://artifacts.elastic.co/downloads/beats/metricbeat/metricbeat-6.6.1-windows-x86_64.zip',
)
is_expected.to contain_exec('unzip metricbeat-6.6.1-windows-x86_64').with(
command: "\$sh=New-Object -COM Shell.Application;\$sh.namespace((Convert-Path 'C:/Program Files')).Copyhere(\$sh.namespace((Convert-Path 'C:/Windows/Temp/metricbeat-6.6.1-windows-x86_64.zip')).items(), 16)", # rubocop:disable LineLength
creates: 'C:/Program Files/Metricbeat/metricbeat-6.6.1-windows-x86_64',
)
is_expected.to contain_exec('stop service metricbeat-6.6.1-windows-x86_64').with(
creates: 'C:/Program Files/Metricbeat/metricbeat-6.6.1-windows-x86_64',
command: 'Set-Service -Name metricbeat -Status Stopped',
onlyif: 'if(Get-WmiObject -Class Win32_Service -Filter "Name=\'metricbeat\'") {exit 0} else {exit 1}',
)
is_expected.to contain_exec('rename metricbeat-6.6.1-windows-x86_64').with(
creates: 'C:/Program Files/Metricbeat/metricbeat-6.6.1-windows-x86_64',
command: "Remove-Item 'C:/Program Files/Metricbeat' -Recurse -Force -ErrorAction SilentlyContinue;Rename-Item 'C:/Program Files/metricbeat-6.6.1-windows-x86_64' 'C:/Program Files/Metricbeat'", # rubocop:disable LineLength
)
is_expected.to contain_exec('mark metricbeat-6.6.1-windows-x86_64').with(
creates: 'C:/Program Files/Metricbeat/metricbeat-6.6.1-windows-x86_64',
command: "New-Item 'C:/Program Files/Metricbeat/metricbeat-6.6.1-windows-x86_64' -ItemType file",
)
is_expected.to contain_exec('install metricbeat-6.6.1-windows-x86_64').with(
command: './install-service-metricbeat.ps1',
cwd: 'C:/Program Files/Metricbeat',
refreshonly: true,
)
end
else
it { is_expected.to contain_package('metricbeat').with(ensure: 'present') }
end
describe 'with ensure = absent' do
let(:params) { { 'ensure' => 'absent' } }
if os_facts[:kernel] != 'windows'
it { is_expected.to contain_package('metricbeat').with(ensure: 'absent') }
end
end
describe 'with package_ensure to a specific version' do
let(:params) { { 'package_ensure' => '6.6.1' } }
if os_facts[:kernel] != 'windows'
it { is_expected.to contain_package('metricbeat').with(ensure: '6.6.1') }
end
end
describe 'with package_ensure = latest' do
let(:params) { { 'package_ensure' => 'latest' } }
if os_facts[:kernel] != 'windows'
it { is_expected.to contain_package('metricbeat').with(ensure: 'latest') }
end
end
end
describe 'metricbeat::repo' do
case os_facts[:osfamily]
when 'RedHat'
it do
is_expected.to contain_yumrepo('beats').with(
baseurl: 'https://artifacts.elastic.co/packages/5.x/yum',
enabled: 1,
gpgcheck: 1,
gpgkey: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
)
end
when 'Debian'
it { is_expected.to contain_class('apt') }
it { is_expected.to contain_class('apt::update').that_comes_before('Package[metricbeat]') }
it do
is_expected.to contain_apt__source('beats').with(
location: 'https://artifacts.elastic.co/packages/5.x/apt',
release: 'stable',
repos: 'main',
key: {
'id' => '46095ACC8548582C1A2699A9D27D666CD88E42B4',
'source' => 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
},
)
end
when 'SuSe'
it do
is_expected.to contain_zypprepo('beats').with(
baseurl: 'https://artifacts.elastic.co/packages/5.x/yum',
autorefresh: 1,
enabled: 1,
gpgcheck: 1,
gpgkey: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
name: 'beats',
type: 'yum',
)
end
end
describe 'with major_version = 6' do
let(:params) { { 'major_version' => '6' } }
case os_facts[:osfamily]
when 'RedHat'
it do
is_expected.to contain_yumrepo('beats').with(
baseurl: 'https://artifacts.elastic.co/packages/6.x/yum',
enabled: 1,
gpgcheck: 1,
gpgkey: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
)
end
when 'Debian'
it { is_expected.to contain_class('apt') }
it do
is_expected.to contain_apt__source('beats').with(
location: 'https://artifacts.elastic.co/packages/6.x/apt',
release: 'stable',
repos: 'main',
key: {
'id' => '46095ACC8548582C1A2699A9D27D666CD88E42B4',
'source' => 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
},
)
end
when 'SuSe'
it do
is_expected.to contain_zypprepo('beats').with(
baseurl: 'https://artifacts.elastic.co/packages/6.x/yum',
autorefresh: 1,
enabled: 1,
gpgcheck: 1,
gpgkey: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
name: 'beats',
type: 'yum',
)
end
end
end
describe 'with major_version = 7' do
let(:params) { { 'major_version' => '7' } }
case os_facts[:osfamily]
when 'RedHat'
it do
is_expected.to contain_yumrepo('beats').with(
baseurl: 'https://artifacts.elastic.co/packages/7.x/yum',
enabled: 1,
gpgcheck: 1,
gpgkey: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
)
end
when 'Debian'
it { is_expected.to contain_class('apt') }
it do
is_expected.to contain_apt__source('beats').with(
location: 'https://artifacts.elastic.co/packages/7.x/apt',
release: 'stable',
repos: 'main',
key: {
'id' => '46095ACC8548582C1A2699A9D27D666CD88E42B4',
'source' => 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
},
)
end
when 'SuSe'
it do
is_expected.to contain_zypprepo('beats').with(
baseurl: 'https://artifacts.elastic.co/packages/7.x/yum',
autorefresh: 1,
enabled: 1,
gpgcheck: 1,
gpgkey: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
name: 'beats',
type: 'yum',
)
end
end
end
describe 'with major_version = idontknow' do
let(:params) { { 'major_version' => 'idontknow' } }
it { is_expected.to raise_error(Puppet::Error) }
end
end
describe 'metricbeat::service' do
it do
is_expected.to contain_service('metricbeat').with(
ensure: 'running',
enable: true,
hasrestart: true,
)
end
describe 'with ensure = absent' do
let(:params) { { 'ensure' => 'absent' } }
it do
is_expected.to contain_service('metricbeat').with(
ensure: 'stopped',
enable: false,
hasrestart: true,
)
end
end
describe 'with service_has_restart = false' do
let(:params) { { 'service_has_restart' => false } }
it do
is_expected.to contain_service('metricbeat').with(
ensure: 'running',
enable: true,
hasrestart: false,
)
end
end
describe 'with service_ensure = disabled' do
let(:params) { { 'service_ensure' => 'disabled' } }
it do
is_expected.to contain_service('metricbeat').with(
ensure: 'stopped',
enable: false,
hasrestart: true,
)
end
end
describe 'with service_ensure = running' do
let(:params) { { 'service_ensure' => 'running' } }
it do
is_expected.to contain_service('metricbeat').with(
ensure: 'running',
enable: false,
hasrestart: true,
)
end
end
describe 'with service_ensure = unmanaged' do
let(:params) { { 'service_ensure' => 'unmanaged' } }
it do
is_expected.to contain_service('metricbeat').with(
ensure: nil,
enable: false,
hasrestart: true,
)
end
end
end
context 'with elasticsearch output' do
let(:params) do
{
'modules' => [{ 'module' => 'system', 'metricsets' => ['cpu', 'memory'], 'period' => '10s' }],
'outputs' => { 'elasticsearch' => { 'hosts' => ['http://localhost:9200'] } },
}
end
it { is_expected.to compile }
it { is_expected.to contain_class('metricbeat::config').that_notifies('Class[metricbeat::service]') }
it { is_expected.to contain_class('metricbeat::install').that_comes_before('Class[metricbeat::config]').that_notifies('Class[metricbeat::service]') }
it { is_expected.to contain_class('metricbeat::repo').that_comes_before('Class[metricbeat::install]') }
it { is_expected.to contain_class('metricbeat::service') }
end
context 'with manage_repo = false' do
let(:params) do
{
'manage_repo' => false,
'modules' => [{ 'module' => 'system', 'metricsets' => ['cpu', 'memory'], 'period' => '10s' }],
'outputs' => { 'elasticsearch' => { 'hosts' => ['http://localhost:9200'] } },
}
end
it { is_expected.to compile }
it { is_expected.to contain_class('metricbeat::config').that_notifies('Class[metricbeat::service]') }
it { is_expected.to contain_class('metricbeat::install').that_comes_before('Class[metricbeat::config]').that_notifies('Class[metricbeat::service]') }
it { is_expected.not_to contain_class('metricbeat::repo') }
it { is_expected.to contain_class('metricbeat::service') }
end
context 'with ensure = absent' do
let(:params) do
{
'ensure' => 'absent',
'modules' => [{ 'module' => 'system', 'metricsets' => ['cpu', 'memory'], 'period' => '10s' }],
'outputs' => { 'elasticsearch' => { 'hosts' => ['http://localhost:9200'] } },
}
end
it { is_expected.to compile }
it { is_expected.to contain_class('metricbeat::config') }
it { is_expected.to contain_class('metricbeat::install') }
it { is_expected.to contain_class('metricbeat::repo').that_comes_before('Class[metricbeat::install]') }
it { is_expected.to contain_class('metricbeat::service').that_comes_before('Class[metricbeat::install]') }
end
context 'with ensure = idontknow' do
let(:params) { { 'ensure' => 'idontknow' } }
it { is_expected.to raise_error(Puppet::Error) }
end
context 'with service_ensure = thisisnew' do
let(:params) { { 'ensure' => 'thisisnew' } }
it { is_expected.to raise_error(Puppet::Error) }
end
context 'with multiple modules' do
let(:params) do
{
'ensure' => 'absent',
'modules' => [
{ 'module' => 'system', 'metricsets' => ['cpu', 'memory'], 'period' => '10s' },
{ 'module' => 'apache', 'metricsets' => ['status'], 'period' => '10s', 'hosts' => ['http://127.0.0.1'] },
],
'outputs' => { 'elasticsearch' => { 'hosts' => ['http://localhost:9200'] } },
}
end
it { is_expected.to compile }
end
context 'with multiple processors' do
let(:params) do
{
'ensure' => 'absent',
'modules' => [{ 'module' => 'system', 'metricsets' => ['cpu', 'memory'], 'period' => '10s' }],
'outputs' => { 'elasticsearch' => { 'hosts' => ['http://localhost:9200'] } },
'processors' => [
{ 'add_cloud_metadata' => { 'timeout' => '3s' } },
{ 'drop_fields' => { 'fields' => ['field1', 'field2'] } },
],
}
end
it { is_expected.to compile }
end
end
end
end
| 37.139293 | 250 | 0.517241 |
9186b00280dfa4b8283869548104abe447f30e85 | 3,342 | require_relative "boot"
require "dotenv"
require "rails/all"
Dotenv.overload(".env", ".env.#{Rails.env}").tap do |env|
if env.empty?
raise "Cannot run Supermarket without a .env file."
end
end
%w{
active_record
action_controller
action_mailer
sprockets
}.each do |framework|
require "#{framework}/railtie"
rescue LoadError
Rails.logger.info "Unable to load #{framework}."
end
require_relative "../app/lib/supermarket/host"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Supermarket
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# This flag needs to be set to false from rails 6
# onwards as we are currently not using cache versioning.
config.active_record.cache_versioning = false
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Include vendor fonts in the asset pipeline
config.assets.paths << Rails.root.join("vendor/assets/fonts")
# Include vendor images in the asset pipeline
config.assets.paths << Rails.root.join("vendor/assets/images")
# Ensure fonts and images are precompiled during asset compilation
config.assets.precompile += %w{*.svg *.eot *.woff *.woff2 *.ttf *.gif *.png}
# Ensurer mailer assets are precompiled during asset compilation
config.assets.precompile += %w{mailers.css}
# Use a custom exception handling application
config.exceptions_app = proc do |env|
ExceptionsController.action(:show).call(env)
end
# Define the status codes for rescuing our custom exceptions
config.action_dispatch.rescue_responses.merge!(
"Supermarket::Authorization::NoAuthorizerError" => :not_implemented,
"Supermarket::Authorization::NotAuthorizedError" => :unauthorized
)
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.active_record.default_timezone = :utc
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Skip locale validation.
# Note: if the time comes to support locales, this will want to be set to
# true.
config.i18n.enforce_available_locales = false
# Give application URL info so it can build full links back to itself
self.default_url_options = {
host: ENV["FQDN"],
port: ENV["PORT"],
protocol: ENV["PROTOCOL"],
}
# Configure the email renderer for building links back to the site
config.action_mailer.default_url_options = default_url_options
config.action_mailer.asset_host = Supermarket::Host.full_url
# Set default from email for ActionMailer
ActionMailer::Base.default from: ENV["FROM_EMAIL"]
config.autoload_paths += %W{#{config.root}/lib}
end
end
| 35.553191 | 99 | 0.720527 |
28aa34d09e4b2a29552ac67dac6829691b3567a7 | 3,410 | Capybara::SpecHelper.spec '#click_link' do
before do
@session.visit('/with_html')
end
it "should wait for asynchronous load", :requires => [:js] do
@session.visit('/with_js')
@session.click_link('Click me')
@session.click_link('Has been clicked')
end
it "casts to string" do
@session.click_link(:'foo')
@session.should have_content('Another World')
end
context "with id given" do
it "should take user to the linked page" do
@session.click_link('foo')
@session.should have_content('Another World')
end
end
context "with text given" do
it "should take user to the linked page" do
@session.click_link('labore')
@session.should have_content('Bar')
end
it "should accept partial matches" do
@session.click_link('abo')
@session.should have_content('Bar')
end
end
context "with title given" do
it "should take user to the linked page" do
@session.click_link('awesome title')
@session.should have_content('Bar')
end
it "should accept partial matches" do
@session.click_link('some tit')
@session.should have_content('Bar')
end
end
context "with alternative text given to a contained image" do
it "should take user to the linked page" do
@session.click_link('awesome image')
@session.should have_content('Bar')
end
it "should take user to the linked page" do
@session.click_link('some imag')
@session.should have_content('Bar')
end
end
context "with a locator that doesn't exist" do
it "should raise an error" do
msg = "Unable to find link \"does not exist\""
running do
@session.click_link('does not exist')
end.should raise_error(Capybara::ElementNotFound, msg)
end
end
it "should follow relative links" do
@session.visit('/')
@session.click_link('Relative')
@session.should have_content('This is a test')
end
it "should follow protocol relative links" do
@session.click_link('Protocol')
@session.should have_content('Another World')
end
it "should follow redirects" do
@session.click_link('Redirect')
@session.should have_content('You landed')
end
it "should follow redirects" do
@session.click_link('BackToMyself')
@session.should have_content('This is a test')
end
it "should add query string to current URL with naked query string" do
@session.click_link('Naked Query String')
@session.should have_content('Query String sent')
end
it "should do nothing on anchor links" do
@session.fill_in("test_field", :with => 'blah')
@session.click_link('Normal Anchor')
@session.find_field("test_field").value.should == 'blah'
@session.click_link('Blank Anchor')
@session.find_field("test_field").value.should == 'blah'
end
it "should do nothing on URL+anchor links for the same page" do
@session.fill_in("test_field", :with => 'blah')
@session.click_link('Anchor on same page')
@session.find_field("test_field").value.should == 'blah'
end
it "should follow link on URL+anchor links for a different page" do
@session.click_link('Anchor on different page')
@session.should have_content('Bar')
end
it "raise an error with links with no href" do
running do
@session.click_link('No Href')
end.should raise_error(Capybara::ElementNotFound)
end
end
| 28.416667 | 72 | 0.678592 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.