hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
87a02231b366c2d5c73eaf7b19856e656eff8fec | 1,323 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'portal_module/version'
Gem::Specification.new do |spec|
spec.name = "portal_module"
spec.version = PortalModule::VERSION
spec.authors = ["Jeff McAffee"]
spec.email = ["[email protected]"]
spec.summary = %q{Portal Module CLI}
spec.description = %q{Command line interface for Portal Module}
spec.homepage = "https://github.com/jmcaffee/portal_module"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "cucumber", "~> 1.3.9"
spec.add_development_dependency "guard"
spec.add_development_dependency "guard-rspec"
#spec.add_development_dependency "pry-byebug", "~> 1.3.3"
spec.add_development_dependency "pry", "~> 0.10"
spec.add_runtime_dependency "nokogiri"
spec.add_runtime_dependency "page-object"
spec.add_runtime_dependency "thor"
spec.add_runtime_dependency "ktutils"
end
| 38.911765 | 74 | 0.693878 |
f7a8978ac530ac843374db340a91c9e6506103e2 | 137 | json.array!(@users) do |user|
json.extract! user, :id, :name, :password, :email, :address
json.url user_url(user, format: :json)
end
| 27.4 | 61 | 0.678832 |
0135bb2fc6a2cb3c4e80c3215e9ee6177d3198ac | 45 | GlobalID::Locator.use :pvb, ModelLocator.new
| 22.5 | 44 | 0.8 |
5dca37486cce2d37fcdf6a863c0eec7b9b177bfe | 578 | module UniversalAr::Concerns::Commentable
extend ActiveSupport::Concern
included do
has_many :comments, class_name: 'UniversalAr::Comment', as: :subject
def save_comment!(content, user, passed_scope=nil, kind=nil, title=nil)
comment = self.comments.create content: content,
user: user,
scope: (passed_scope.nil? ? self.scope : passed_scope),
title: title
comment.kind = kind if !kind.nil?
return comment
end
end
end
| 28.9 | 83 | 0.562284 |
5d3542098340822a4926cafd58290d8cb6d74e94 | 6,417 | class FfmpegAT28 < Formula
desc "Play, record, convert, and stream audio and video"
homepage "https://ffmpeg.org/"
url "https://ffmpeg.org/releases/ffmpeg-2.8.15.tar.bz2"
sha256 "35647f6c1f6d4a1719bc20b76bf4c26e4ccd665f46b5676c0e91c5a04622ee21"
revision 2
bottle do
sha256 "6e6ac30c4fb35f630d0cf34fc746047b44ae67de3b529cded1ad21ee012b6576" => :mojave
sha256 "7241f4c1b83dbc84e4fd98b6022d071c89e51ce68c1f3825eb52d2dc41f6e629" => :high_sierra
sha256 "cb7c8914935839055089fb551ae26cd19e8814601937a8c94e030e85b8f6a5f2" => :sierra
end
keg_only :versioned_formula
option "with-rtmpdump", "Enable RTMP protocol"
option "with-libass", "Enable ASS/SSA subtitle format"
option "with-opencore-amr", "Enable Opencore AMR NR/WB audio format"
option "with-openjpeg", "Enable JPEG 2000 image format"
option "with-openssl", "Enable SSL support"
option "with-libssh", "Enable SFTP protocol via libssh"
option "with-schroedinger", "Enable Dirac video format"
option "with-fdk-aac", "Enable the Fraunhofer FDK AAC library"
option "with-libvidstab", "Enable vid.stab support for video stabilization"
option "with-libsoxr", "Enable the soxr resample library"
option "with-webp", "Enable using libwebp to encode WEBP images"
option "with-zeromq", "Enable using libzeromq to receive commands sent through a libzeromq client"
option "with-dcadec", "Enable dcadec library"
depends_on "pkg-config" => :build
depends_on "texi2html" => :build
depends_on "yasm" => :build
depends_on "lame"
depends_on "libvo-aacenc"
depends_on "libvorbis"
depends_on "libvpx"
depends_on "opus"
depends_on "sdl"
depends_on "snappy"
depends_on "theora"
depends_on "x264"
depends_on "x265"
depends_on "xvid"
depends_on "dcadec" => :optional
depends_on "faac" => :optional
depends_on "fdk-aac" => :optional
depends_on "fontconfig" => :optional
depends_on "freetype" => :optional
depends_on "frei0r" => :optional
depends_on "libass" => :optional
depends_on "libbluray" => :optional
depends_on "libbs2b" => :optional
depends_on "libcaca" => :optional
depends_on "libquvi" => :optional
depends_on "libsoxr" => :optional
depends_on "libssh" => :optional
depends_on "libvidstab" => :optional
depends_on "opencore-amr" => :optional
depends_on "openjpeg" => :optional
depends_on "openssl" => :optional
depends_on "rtmpdump" => :optional
depends_on "schroedinger" => :optional
depends_on "speex" => :optional
depends_on "webp" => :optional
depends_on "zeromq" => :optional
def install
# Fixes "dyld: lazy symbol binding failed: Symbol not found: _clock_gettime"
if MacOS.version == "10.11" && MacOS::Xcode.installed? && MacOS::Xcode.version >= "8.0"
inreplace %w[libavdevice/v4l2.c libavutil/time.c], "HAVE_CLOCK_GETTIME",
"UNDEFINED_GIBBERISH"
end
args = %W[
--prefix=#{prefix}
--enable-shared
--enable-pthreads
--enable-gpl
--enable-version3
--enable-hardcoded-tables
--enable-avresample
--cc=#{ENV.cc}
--host-cflags=#{ENV.cflags}
--host-ldflags=#{ENV.ldflags}
--enable-ffplay
--enable-libmp3lame
--enable-libopus
--enable-libsnappy
--enable-libtheora
--enable-libvo-aacenc
--enable-libvorbis
--enable-libvpx
--enable-libx264
--enable-libx265
--enable-libxvid
]
args << "--enable-opencl" if MacOS.version > :lion
args << "--enable-libfontconfig" if build.with? "fontconfig"
args << "--enable-libfreetype" if build.with? "freetype"
args << "--enable-librtmp" if build.with? "rtmpdump"
args << "--enable-libopencore-amrnb" << "--enable-libopencore-amrwb" if build.with? "opencore-amr"
args << "--enable-libfaac" if build.with? "faac"
args << "--enable-libass" if build.with? "libass"
args << "--enable-libssh" if build.with? "libssh"
args << "--enable-libspeex" if build.with? "speex"
args << "--enable-libschroedinger" if build.with? "schroedinger"
args << "--enable-libfdk-aac" if build.with? "fdk-aac"
args << "--enable-openssl" if build.with? "openssl"
args << "--enable-frei0r" if build.with? "frei0r"
args << "--enable-libcaca" if build.with? "libcaca"
args << "--enable-libsoxr" if build.with? "libsoxr"
args << "--enable-libquvi" if build.with? "libquvi"
args << "--enable-libvidstab" if build.with? "libvidstab"
args << "--enable-libwebp" if build.with? "webp"
args << "--enable-libzmq" if build.with? "zeromq"
args << "--enable-libbs2b" if build.with? "libbs2b"
args << "--enable-libdcadec" if build.with? "dcadec"
if build.with? "openjpeg"
args << "--enable-libopenjpeg"
args << "--disable-decoder=jpeg2000"
args << "--extra-cflags=" + `pkg-config --cflags libopenjpeg`.chomp
end
# These librares are GPL-incompatible, and require ffmpeg be built with
# the "--enable-nonfree" flag, which produces unredistributable libraries
if %w[faac fdk-aac openssl].any? { |f| build.with? f }
args << "--enable-nonfree"
end
# A bug in a dispatch header on 10.10, included via CoreFoundation,
# prevents GCC from building VDA support. GCC has no problems on
# 10.9 and earlier.
# See: https://github.com/Homebrew/homebrew/issues/33741
if MacOS.version < :yosemite || ENV.compiler == :clang
args << "--enable-vda"
else
args << "--disable-vda"
end
# For 32-bit compilation under gcc 4.2, see:
# https://trac.macports.org/ticket/20938#comment:22
ENV.append_to_cflags "-mdynamic-no-pic" if Hardware::CPU.is_32_bit? && Hardware::CPU.intel? && ENV.compiler == :clang
system "./configure", *args
if MacOS.prefer_64_bit?
inreplace "config.mak" do |s|
shflags = s.get_make_var "SHFLAGS"
if shflags.gsub!(" -Wl,-read_only_relocs,suppress", "")
s.change_make_var! "SHFLAGS", shflags
end
end
end
system "make", "install"
# Build and install additional FFmpeg tools
system "make", "alltools"
bin.install Dir["tools/*"].select { |f| File.executable? f }
end
test do
# Create an example mp4 file
system "#{bin}/ffmpeg", "-y", "-filter_complex",
"testsrc=rate=1:duration=1", "#{testpath}/video.mp4"
assert_predicate testpath/"video.mp4", :exist?
end
end
| 37.092486 | 121 | 0.666355 |
e9d7a47f11b1e597842bfba77e920fb20644806a | 7,874 | require 'active_support/xml_mini'
require 'active_support/time'
require 'active_support/core_ext/object/blank'
require 'active_support/core_ext/object/to_param'
require 'active_support/core_ext/object/to_query'
require 'active_support/core_ext/array/wrap'
require 'active_support/core_ext/hash/reverse_merge'
require 'active_support/core_ext/string/inflections'
class Hash
# Returns a string containing an XML representation of its receiver:
#
# { foo: 1, bar: 2 }.to_xml
# # =>
# # <?xml version="1.0" encoding="UTF-8"?>
# # <hash>
# # <foo type="integer">1</foo>
# # <bar type="integer">2</bar>
# # </hash>
#
# To do so, the method loops over the pairs and builds nodes that depend on
# the _values_. Given a pair +key+, +value+:
#
# * If +value+ is a hash there's a recursive call with +key+ as <tt>:root</tt>.
#
# * If +value+ is an array there's a recursive call with +key+ as <tt>:root</tt>,
# and +key+ singularized as <tt>:children</tt>.
#
# * If +value+ is a callable object it must expect one or two arguments. Depending
# on the arity, the callable is invoked with the +options+ hash as first argument
# with +key+ as <tt>:root</tt>, and +key+ singularized as second argument. The
# callable can add nodes by using <tt>options[:builder]</tt>.
#
# 'foo'.to_xml(lambda { |options, key| options[:builder].b(key) })
# # => "<b>foo</b>"
#
# * If +value+ responds to +to_xml+ the method is invoked with +key+ as <tt>:root</tt>.
#
# class Foo
# def to_xml(options)
# options[:builder].bar 'fooing!'
# end
# end
#
# { foo: Foo.new }.to_xml(skip_instruct: true)
# # =>
# # <hash>
# # <bar>fooing!</bar>
# # </hash>
#
# * Otherwise, a node with +key+ as tag is created with a string representation of
# +value+ as text node. If +value+ is +nil+ an attribute "nil" set to "true" is added.
# Unless the option <tt>:skip_types</tt> exists and is true, an attribute "type" is
# added as well according to the following mapping:
#
# XML_TYPE_NAMES = {
# "Symbol" => "symbol",
# "Integer" => "integer",
# "BigDecimal" => "decimal",
# "Float" => "float",
# "TrueClass" => "boolean",
# "FalseClass" => "boolean",
# "Date" => "date",
# "DateTime" => "dateTime",
# "Time" => "dateTime"
# }
#
# By default the root node is "hash", but that's configurable via the <tt>:root</tt> option.
#
# The default XML builder is a fresh instance of <tt>Builder::XmlMarkup</tt>. You can
# configure your own builder with the <tt>:builder</tt> option. The method also accepts
# options like <tt>:dasherize</tt> and friends, they are forwarded to the builder.
def to_xml(options = {})
require 'active_support/builder' unless defined?(Builder)
options = options.dup
options[:indent] ||= 2
options[:root] ||= 'hash'
options[:builder] ||= Builder::XmlMarkup.new(indent: options[:indent])
builder = options[:builder]
builder.instruct! unless options.delete(:skip_instruct)
root = ActiveSupport::XmlMini.rename_key(options[:root].to_s, options)
builder.tag!(root) do
each { |key, value| ActiveSupport::XmlMini.to_tag(key, value, options) }
yield builder if block_given?
end
end
class << self
# Returns a Hash containing a collection of pairs when the key is the node name and the value is
# its content
#
# xml = <<-XML
# <?xml version="1.0" encoding="UTF-8"?>
# <hash>
# <foo type="integer">1</foo>
# <bar type="integer">2</bar>
# </hash>
# XML
#
# hash = Hash.from_xml(xml)
# # => {"hash"=>{"foo"=>1, "bar"=>2}}
#
# +DisallowedType+ is raised if the XML contains attributes with <tt>type="yaml"</tt> or
# <tt>type="symbol"</tt>. Use <tt>Hash.from_trusted_xml</tt> to parse this XML.
def from_xml(xml, disallowed_types = nil)
ActiveSupport::XMLConverter.new(xml, disallowed_types).to_h
end
# Builds a Hash from XML just like <tt>Hash.from_xml</tt>, but also allows Symbol and YAML.
def from_trusted_xml(xml)
from_xml xml, []
end
end
end
module ActiveSupport
class XMLConverter # :nodoc:
class DisallowedType < StandardError
def initialize(type)
super "Disallowed type attribute: #{type.inspect}"
end
end
DISALLOWED_TYPES = %w(symbol yaml)
def initialize(xml, disallowed_types = nil)
@xml = normalize_keys(XmlMini.parse(xml))
@disallowed_types = disallowed_types || DISALLOWED_TYPES
end
def to_h
deep_to_h(@xml)
end
private
def normalize_keys(params)
case params
when Hash
Hash[params.map { |k,v| [k.to_s.tr('-', '_'), normalize_keys(v)] } ]
when Array
params.map { |v| normalize_keys(v) }
else
params
end
end
def deep_to_h(value)
case value
when Hash
process_hash(value)
when Array
process_array(value)
when String
value
else
raise "can't typecast #{value.class.name} - #{value.inspect}"
end
end
def process_hash(value)
if value.include?('type') && !value['type'].is_a?(Hash) && @disallowed_types.include?(value['type'])
raise DisallowedType, value['type']
end
if become_array?(value)
_, entries = Array.wrap(value.detect { |k,v| not v.is_a?(String) })
if entries.nil? || value['__content__'].try(:empty?)
[]
else
case entries
when Array
entries.collect { |v| deep_to_h(v) }
when Hash
[deep_to_h(entries)]
else
raise "can't typecast #{entries.inspect}"
end
end
elsif become_content?(value)
process_content(value)
elsif become_empty_string?(value)
''
elsif become_hash?(value)
xml_value = Hash[value.map { |k,v| [k, deep_to_h(v)] }]
# Turn { files: { file: #<StringIO> } } into { files: #<StringIO> } so it is compatible with
# how multipart uploaded files from HTML appear
xml_value['file'].is_a?(StringIO) ? xml_value['file'] : xml_value
end
end
def become_content?(value)
value['type'] == 'file' || (value['__content__'] && (value.keys.size == 1 || value['__content__'].present?))
end
def become_array?(value)
value['type'] == 'array'
end
def become_empty_string?(value)
# { "string" => true }
# No tests fail when the second term is removed.
value['type'] == 'string' && value['nil'] != 'true'
end
def become_hash?(value)
!nothing?(value) && !garbage?(value)
end
def nothing?(value)
# blank or nil parsed values are represented by nil
value.blank? || value['nil'] == 'true'
end
def garbage?(value)
# If the type is the only element which makes it then
# this still makes the value nil, except if type is
# an XML node(where type['value'] is a Hash)
value['type'] && !value['type'].is_a?(::Hash) && value.size == 1
end
def process_content(value)
content = value['__content__']
if parser = ActiveSupport::XmlMini::PARSING[value['type']]
parser.arity == 1 ? parser.call(content) : parser.call(content, value)
else
content
end
end
def process_array(value)
value.map! { |i| deep_to_h(i) }
value.length > 1 ? value : value.first
end
end
end
| 32.403292 | 116 | 0.58255 |
1d931640d82b31a89768a2c7d9b5c726aec9b3dd | 1,834 | #
# Be sure to run `pod lib lint hippy.podspec --verbose --use-libraries' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'hippy'
s.version = '2.2.0'
s.summary = 'hippy lib for ios'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'http://hippyjs.org'
s.license = { :type => 'Apache2', :file => 'LICENSE' }
s.author = { 'mengyanluo' => '[email protected]' }
s.source = {:git => 'https://github.com/Tencent/Hippy.git', :tag => s.version}
s.ios.deployment_target = '8.0'
s.source_files = 'ios/sdk/**/*.{h,m,c,mm,s,cpp,cc}'
s.public_header_files = 'ios/sdk/**/*.h'
s.default_subspec = 'core'
s.subspec 'core' do |cores|
cores.source_files = 'core/**/*.{h,cc}'
cores.exclude_files = ['core/include/core/napi/v8','core/src/napi/v8','core/js','core/third_party/base/src/platform/adr']
cores.libraries = 'c++'
cores.header_mappings_dir = 'core/include/'
cores.pod_target_xcconfig = {'HEADER_SEARCH_PATHS' => '${PODS_ROOT}/hippy/core/third_party/base/include/'}
end
if ENV['hippy_use_frameworks']
else
s.user_target_xcconfig = {'OTHER_LDFLAGS' => '-force_load "${PODS_CONFIGURATION_BUILD_DIR}/hippy/libhippy.a"'}
end
end
| 40.755556 | 125 | 0.648855 |
e2fa6fa845e268f08d9516262a8091ec6efed6c1 | 4,284 | require 'spec_helper'
describe CclaSignature do
context 'associations' do
it { should belong_to(:user) }
it { should belong_to(:ccla) }
it { should belong_to(:organization) }
end
context 'validations' do
it { should validate_presence_of(:first_name) }
it { should validate_presence_of(:last_name) }
it { should validate_presence_of(:email) }
it { should validate_presence_of(:phone) }
it { should validate_presence_of(:company) }
it { should validate_presence_of(:address_line_1) }
it { should validate_presence_of(:city) }
it { should validate_presence_of(:state) }
it { should validate_presence_of(:zip) }
it { should validate_presence_of(:country) }
it { should validate_acceptance_of(:agreement) }
end
it_behaves_like 'exportable'
describe '#sign!' do
let(:ccla_signature) { build(:ccla_signature) }
before { ccla_signature.sign! }
it 'creates an associated organization' do
expect(ccla_signature.organization).to_not be_nil
end
it 'creates a contributor for the associated organization' do
expect(ccla_signature.organization.contributors.count).to eql(1)
end
it 'saves the ccla signature' do
expect(ccla_signature.persisted?).to be true
end
end
describe '.by_organization' do
context 'when multiple organizations have signed a CCLA' do
let(:old_org) { create(:organization, ccla_signatures_count: 0) }
let(:recent_org) { create(:organization, ccla_signatures_count: 0) }
let!(:recent_org_signature) { create(:ccla_signature, organization: recent_org, signed_at: 1.day.ago) }
let!(:old_org_signature) { create(:ccla_signature, organization: old_org, signed_at: 1.year.ago) }
it 'should return the signatures' do
expect(CclaSignature.by_organization.count).to eql(2)
end
it 'should order the signatures ascending by signed at date' do
expect(CclaSignature.by_organization.first).to eql(old_org_signature)
end
end
context 'when a organization has re-signed a CCLA' do
let(:organization) { create(:organization, ccla_signatures_count: 0) }
let!(:recent_signature) { create(:ccla_signature, organization: organization, signed_at: 1.month.ago) }
let!(:old_signature) { create(:ccla_signature, organization: organization, signed_at: 1.year.ago) }
it 'should return the latest signature' do
expect(CclaSignature.by_organization).to match_array([recent_signature])
end
it 'should not return older signatures' do
expect(CclaSignature.by_organization).to_not match_array([old_signature])
end
end
end
describe '.earliest_by_user' do
context 'when multiple users from a single organization have signed a CCLA' do
let(:organization) { create(:organization, ccla_signatures_count: 0) }
let(:repeat_signer) { create(:user, last_name: 'Repeater') }
let!(:latest_signature) { create(:ccla_signature, organization: organization, signed_at: 1.day.ago) }
let!(:recent_signature) do
create(:ccla_signature, organization: organization, signed_at: 1.month.ago,
user: repeat_signer, last_name: repeat_signer.last_name)
end
let!(:recent_repeat) do
create(:ccla_signature, organization: organization, signed_at: 1.week.ago,
user: repeat_signer, last_name: repeat_signer.last_name)
end
let!(:earliest_signature) { create(:ccla_signature, organization: organization, signed_at: 1.year.ago) }
it 'returns the earliest signature for each of the users' do
expect(CclaSignature.earliest_by_user).to match_array([earliest_signature, recent_signature, latest_signature])
end
it 'does not return a newer signature by the same user' do
expect(CclaSignature.earliest_by_user).to_not include(recent_repeat)
end
end
end
describe '.search' do
let!(:ihop) { create(:ccla_signature, company: 'International House of Pancakes') }
let!(:bhop) { create(:ccla_signature, company: "Bob's House of Pancakes") }
it 'returns ccla signatures with a similar company' do
expect(CclaSignature.search('pancakes')).to include(ihop, bhop)
end
end
end
| 39.666667 | 119 | 0.701914 |
6a59cd0b3c59eab641b34587528b94f99ec176d8 | 4,251 | # Track Chrome stable.
# https://omahaproxy.appspot.com/
class V8 < Formula
desc "Google's JavaScript engine"
homepage "https://github.com/v8/v8/wiki"
url "https://github.com/v8/v8-git-mirror/archive/5.1.281.47.tar.gz"
sha256 "63c9933227d6912689ea6bc012eea6a1fabaf526ac04bc245d9381e3ea238bf6"
bottle do
cellar :any
sha256 "179a8442510eb0a022ea6823cd6a76044c14c4fe18415710cac3d746d432020e" => :high_sierra
sha256 "8106efc14371982af11a66d8db533dc0589bc240950e0e445467cf6ce8871393" => :sierra
sha256 "487f2ca72096ee27d13533a6dad2d472a92ba40ef518a45226f19e94d4a79242" => :el_capitan
sha256 "dc9af3e08eda8a4acd1ff3c6b47a4c5170a92dbab7d2d79958a14d8aa42eefac" => :yosemite
sha256 "7bcd1bbd66c11305eeea0c36ca472de8a639f511abe0909c8815b1208dbce7b6" => :mavericks
end
option "with-readline", "Use readline instead of libedit"
# not building on Snow Leopard:
# https://github.com/Homebrew/homebrew/issues/21426
depends_on :macos => :lion
# gyp doesn't run under 2.6 or lower
depends_on "python@2" => :build
depends_on "readline" => :optional
needs :cxx11
# Update from "DEPS" file in tarball.
# Note that we don't require the "test" DEPS because we don't run the tests.
resource "gyp" do
url "https://chromium.googlesource.com/external/gyp.git",
:revision => "4ec6c4e3a94bd04a6da2858163d40b2429b8aad1"
end
resource "icu" do
url "https://chromium.googlesource.com/chromium/deps/icu.git",
:revision => "c291cde264469b20ca969ce8832088acb21e0c48"
end
resource "buildtools" do
url "https://chromium.googlesource.com/chromium/buildtools.git",
:revision => "80b5126f91be4eb359248d28696746ef09d5be67"
end
resource "common" do
url "https://chromium.googlesource.com/chromium/src/base/trace_event/common.git",
:revision => "c8c8665c2deaf1cc749d9f8e153256d4f67bf1b8"
end
resource "swarming_client" do
url "https://chromium.googlesource.com/external/swarming.client.git",
:revision => "df6e95e7669883c8fe9ef956c69a544154701a49"
end
resource "gtest" do
url "https://chromium.googlesource.com/external/github.com/google/googletest.git",
:revision => "6f8a66431cb592dad629028a50b3dd418a408c87"
end
resource "gmock" do
url "https://chromium.googlesource.com/external/googlemock.git",
:revision => "0421b6f358139f02e102c9c332ce19a33faf75be"
end
resource "clang" do
url "https://chromium.googlesource.com/chromium/src/tools/clang.git",
:revision => "faee82e064e04e5cbf60cc7327e7a81d2a4557ad"
end
def install
# Bully GYP into correctly linking with c++11
ENV.cxx11
ENV["GYP_DEFINES"] = "clang=#{ENV.compiler == :clang || OS.mac? ? 1 : 0} mac_deployment_target=#{MacOS.version}"
# https://code.google.com/p/v8/issues/detail?id=4511#c3
ENV.append "GYP_DEFINES", "v8_use_external_startup_data=0"
# fix up libv8.dylib install_name
# https://github.com/Homebrew/homebrew/issues/36571
# https://code.google.com/p/v8/issues/detail?id=3871
inreplace "tools/gyp/v8.gyp",
"'OTHER_LDFLAGS': ['-dynamiclib', '-all_load']",
"\\0, 'DYLIB_INSTALL_NAME_BASE': '#{opt_lib}'"
(buildpath/"build/gyp").install resource("gyp")
(buildpath/"third_party/icu").install resource("icu")
(buildpath/"buildtools").install resource("buildtools")
(buildpath/"base/trace_event/common").install resource("common")
(buildpath/"tools/swarming_client").install resource("swarming_client")
(buildpath/"testing/gtest").install resource("gtest")
(buildpath/"testing/gmock").install resource("gmock")
(buildpath/"tools/clang").install resource("clang")
system "make", "native", "library=shared", "snapshot=on",
"console=readline", "i18nsupport=off",
"strictaliasing=off"
include.install Dir["include/*"]
cd "out/native" do
if OS.mac?
rm ["libgmock.a", "libgtest.a"]
lib.install Dir["lib*"]
else
lib.install "lib.target/libv8.so"
end
bin.install "d8", "mksnapshot", "process", "shell" => "v8"
end
end
test do
assert_equal "Hello World!", pipe_output("#{bin}/v8 -e 'print(\"Hello World!\")'").chomp
end
end
| 36.025424 | 116 | 0.70454 |
3816a50ec74db26e45fbd8e437bf6a5a02746aba | 1,601 | require 'test_helper'
class UsersSignupTest < ActionDispatch::IntegrationTest
def setup
ActionMailer::Base.deliveries.clear
end
test "invalid signup information" do
get signup_path
assert_no_difference 'User.count' do
post users_path, params: {
user: {
name: "",
email: "user@invalid",
password: "foo",
password_confirmation: "bar"
}
}
end
assert_template 'users/new'
assert_select 'div#error_explanation'
assert_select 'div.field_with_errors'
end
test "valid signup information with account activation" do
get signup_path
assert_difference 'User.count', 1 do
post users_path, params: {
user: {
name: "Example User",
email: "[email protected]",
password: "password",
password_confirmation: "password"
}
}
end
assert_equal 1, ActionMailer::Base.deliveries.size
user = assigns(:user)
assert_not user.activated?
# Try to log in before activation.
log_in_as(user)
assert_not is_logged_in?
# Invalid activation token
get edit_account_activation_path("invalid token", email: user.email)
assert_not is_logged_in?
# Valid token, wrong email
get edit_account_activation_path(user.activation_token, email: 'wrong')
assert_not is_logged_in?
# Valid activation token
get edit_account_activation_path(user.activation_token, email: user.email)
assert user.reload.activated?
follow_redirect!
assert_template 'users/show'
assert is_logged_in?
end
end
| 27.135593 | 78 | 0.671455 |
21a193af02ba991cfd22075248cdb494013e021c | 479 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe EnotasApi::V1::IncluirAtualizarEmpresa do
let(:empresa) { EnotasApi::V1::Empresa.new({ id: '123' }) }
let(:instance) { described_class.new(empresa) }
it 'have expected uri' do
expect(instance.uri).to eq('/v1/empresas')
end
it 'have expected method' do
expect(instance.method).to eq(:POST)
end
it 'have expected content' do
expect(instance.content).to eq(empresa.to_json)
end
end
| 22.809524 | 61 | 0.705637 |
ab4933c0ca2b986802be790d5bd8e6492230a284 | 2,672 | require "rails_helper"
RSpec.describe "Plans", type: :feature do
before do
@default_template = create(:template, :default, :published)
@org = create(:org)
@research_org = create(:org, :organisation, :research_institute,
templates: 1)
@funding_org = create(:org, :funder, templates: 1)
@template = create(:template, org: @org)
@user = create(:user, org: @org)
sign_in(@user)
OpenURI.expects(:open_uri).returns(<<~XML
<form-value-pairs>
<value-pairs value-pairs-name="H2020projects" dc-term="relation">
<pair>
<displayed-value>
115797 - INNODIA - Translational approaches to disease modifying therapy of type 1 diabetes: an innovative approach towards understanding and arresting type 1 diabetes – Sofia ref.: 115797
</displayed-value>
<stored-value>info:eu-repo/grantAgreement/EC/H2020/115797/EU</stored-value>
</pair>
</value-pairs>
</form-value-pairs>
XML
)
end
scenario "User creates a new Plan", :js do
# Action
click_link "Create plan"
fill_in :plan_title, with: "My test plan"
fill_in :plan_org_name, with: @research_org.name
find('#suggestion-2-0').click
fill_in :plan_funder_name, with: @funding_org.name
find('#suggestion-3-0').click
click_button "Create plan"
# Expectations
expect(@user.plans).to be_one
@plan = Plan.last
expect(current_path).to eql(plan_path(@plan))
##
# User updates plan content...
# Action
expect(page).to have_css("input[type=text][value='#{@plan.title}']")
within "#edit_plan_#{@plan.id}" do
fill_in "Grant number", with: "Innodia"
fill_in "Project abstract", with: "Plan abstract..."
fill_in "ID", with: "ABCDEF"
fill_in "ORCID iD", with: "My ORCID"
fill_in "Phone", with: "07787 000 0000"
click_button "Save"
end
# Reload the plan to get the latest from memory
@plan.reload
expect(current_path).to eql(overview_plan_path(@plan))
expect(@plan.title).to eql("My test plan")
expect(@plan.funder_name).to eql(@funding_org.name)
expect(@plan.grant_number).to eql("115797")
expect(@plan.description).to eql("Plan abstract...")
expect(@plan.identifier).to eql("ABCDEF")
name = [@user.firstname, @user.surname].join(" ")
expect(@plan.principal_investigator).to eql(name)
expect(@plan.principal_investigator_identifier).to eql("My ORCID")
expect(@plan.principal_investigator_email).to eql(@user.email)
expect(@plan.principal_investigator_phone).to eql("07787 000 0000")
end
end
| 33.822785 | 202 | 0.647829 |
e94af46927f28405fbb544ef450a0a31ce19fbf2 | 451 | cask "pichon" do
version "1.0.9,11"
sha256 :no_check
url "https://desktop.icons8.com/updates/pichon/Pichon.dmg"
name "Pichon"
desc "Search utility for icons8"
homepage "https://icons8.com/"
livecheck do
url "https://desktop.icons8.com/updates/pichon/cast.xml"
strategy :sparkle
end
app "Pichon.app"
zap trash: [
"~/Library/Application Support/com.icons8.Pichon",
"~/Library/Application Support/Pichon",
]
end
| 20.5 | 60 | 0.685144 |
f7c21d64c838791836db0b7b9de53951b84f113b | 1,481 | class StatsController < ApplicationController
def speakers_count
@speakers = Speaker.all.sort_by { |s| s.talks.count }.reverse
@total_talks = Talk.all.count
end
def by_year
@year = year_params['year']
redirect_to root_path unless @year =~ /\d{4}/
@speakers = []
Speaker.all_by_year(@year).each do |speaker|
@speakers << {
speaker: speaker,
talks: speaker.talks.by_year(@year)
}
end
@speakers = @speakers.sort_by { |s| s[:talks].count }.reverse
@total_talks = Talk.by_year(@year).count
end
def by_speaker
redirect_to root_path unless speaker_params[:speaker] =~ /\d+/
@speaker = Speaker.find(speaker_params[:speaker])
@talks = @speaker.talks.order(date: :desc)
end
def by_speaker_year
@year = year_params['year']
redirect_to root_path unless @year =~ /\d{4}/
redirect_to root_path unless speaker_params[:speaker] =~ /\d+/
@speaker = Speaker.find(speaker_params[:speaker])
@talks = @speaker.talks.by_year(@year).order(date: :desc)
end
def speakers
speakers_data = Speaker.all
# sort by name
# speakers_data.sort! { |a,b| a.name.downcase <=> b.name.downcase }
speakers_data.order!(:name)
@speakers = []
@speakers_count = []
speakers_data.each do |s|
@speakers << s.name
@speakers_count << s.talks
end
end
def speaker_params
params.permit(:speaker)
end
def year_params
params.permit(:year)
end
end
| 24.278689 | 71 | 0.648886 |
f7caaa7957ffc7cf570ade3bd0daa7648c6bc00e | 1,074 | class YamlCpp < Formula
desc "C++ YAML parser and emitter for YAML 1.2 spec"
homepage "https://github.com/jbeder/yaml-cpp"
url "https://github.com/jbeder/yaml-cpp/archive/yaml-cpp-0.6.3.tar.gz"
sha256 "77ea1b90b3718aa0c324207cb29418f5bced2354c2e483a9523d98c3460af1ed"
bottle do
cellar :any_skip_relocation
sha256 "e359e13c2fc0564c7500572af0a711d0a9f8b6655f0ab9d214d644ccc855ff68" => :catalina
sha256 "1e43334e4896703dda18ca52e76b4ec8bf850fb253d2553f7a9598b426d81773" => :mojave
sha256 "b4b5fc6d5d29494aa10d9ac75de1514afeda044ef736c5b1bc7953d1ad7162ca" => :high_sierra
end
depends_on "cmake" => :build
def install
system "cmake", ".", *std_cmake_args, "-DBUILD_SHARED_LIBS=ON"
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <yaml-cpp/yaml.h>
int main() {
YAML::Node node = YAML::Load("[0, 0, 0]");
node[0] = 1;
return 0;
}
EOS
system ENV.cxx, "test.cpp", "-std=c++11", "-L#{lib}", "-lyaml-cpp", "-o", "test"
system "./test"
end
end
| 31.588235 | 93 | 0.681564 |
d52b6f0700079b402a21fb597dc24b939b75b92b | 2,959 | require "spec_helper"
describe 'Commenter use case (a1: p1>c1, a2: p2>c1, p3>c2, a3: p4>c3)' do
before do
@c1 = Category.create!
@c2 = Category.create!
@c3 = Category.create!
@a1 = Author.create!
@a2 = Author.create!
@a3 = Author.create!
@p1 = @a1.posts.create! :category => @c1
@p2 = @a2.posts.create! :category => @c1
@p3 = @a2.posts.create! :category => @c2
@p4 = @a3.posts.create! :category => @c3
@a1.reload
@a2.reload
end
it "a1.posts should == [p1]" do
@a1.posts.should == [@p1]
end
it "a1.categories should == [c1]" do
@a1.categories.should == [@c1]
end
it "a2.posts should == [p2, p3]" do
@a2.posts.should == [@p2, @p3]
end
it "a2.categories should == [c1, c2]" do
@a2.categories.should == [@c1, @c2]
end
describe "u1 comments on p2" do
before do
@u1 = User.create!
@comment = @p2.comments.create! :user => @u1
end
it "u1.comments should == [comment]" do
@u1.comments.should == [@comment]
end
it "a1.commenters should be empty" do
@a1.commenters.should be_empty
end
it "a2.commenters should == [u1]" do
@a2.commenters.should == [@u1]
end
it "u1.commented_posts should == [p2]" do
@u1.commented_posts.should == [@p2]
end
it "u1.commented_posts.find_inflamatory(:all) should be empty" do
@u1.commented_posts.find_inflamatory(:all).should be_empty
end
if ActiveRecord::Base.respond_to?(:named_scope)
it "u1.commented_posts.inflamatory should be empty" do
@u1.commented_posts.inflamatory.should be_empty
end
end
it "u1.commented_authors should == [a2]" do
@u1.commented_authors.should == [@a2]
end
it "u1.posts_of_interest should == [p1, p2, p3]" do
@u1.posts_of_interest.should == [@p1, @p2, @p3]
end
it "u1.categories_of_interest should == [c1, c2]" do
@u1.categories_of_interest.should == [@c1, @c2]
end
describe "when p2 is inflamatory" do
before do
@p2.toggle!(:inflamatory)
end
it "p2 should be inflamatory" do
@p2.should be_inflamatory
end
it "u1.commented_posts.find_inflamatory(:all) should == [p2]" do
# uniq ids is here (and next spec) because eager loading changed behaviour 2.0.2 => edge
@u1.commented_posts.find_inflamatory(:all).collect(&:id).uniq.should == [@p2.id]
end
it "u1.posts_of_interest.find_inflamatory(:all).uniq should == [p2]" do
@u1.posts_of_interest.find_inflamatory(:all).collect(&:id).uniq.should == [@p2.id]
end
if ActiveRecord::Base.respond_to?(:named_scope)
it "u1.commented_posts.inflamatory should == [p2]" do
@u1.commented_posts.inflamatory.should == [@p2]
end
it "u1.posts_of_interest.inflamatory should == [p2]" do
@u1.posts_of_interest.inflamatory.should == [@p2]
end
end
end
end
end
| 27.146789 | 96 | 0.611355 |
7a4380ca99744e5e9a041485b6177b6c154230b7 | 4,860 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/cloud/dialogflow/v2beta1/intent.proto for package 'google.cloud.dialogflow.v2beta1'
# Original file comments:
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/cloud/dialogflow/v2beta1/intent_pb'
module Google
module Cloud
module Dialogflow
module V2beta1
module Intents
# Service for managing [Intents][google.cloud.dialogflow.v2beta1.Intent].
class Service
include ::GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.cloud.dialogflow.v2beta1.Intents'
# Returns the list of all intents in the specified agent.
rpc :ListIntents, ::Google::Cloud::Dialogflow::V2beta1::ListIntentsRequest, ::Google::Cloud::Dialogflow::V2beta1::ListIntentsResponse
# Retrieves the specified intent.
rpc :GetIntent, ::Google::Cloud::Dialogflow::V2beta1::GetIntentRequest, ::Google::Cloud::Dialogflow::V2beta1::Intent
# Creates an intent in the specified agent.
#
# Note: You should always train an agent prior to sending it queries. See the
# [training
# documentation](https://cloud.google.com/dialogflow/es/docs/training).
rpc :CreateIntent, ::Google::Cloud::Dialogflow::V2beta1::CreateIntentRequest, ::Google::Cloud::Dialogflow::V2beta1::Intent
# Updates the specified intent.
#
# Note: You should always train an agent prior to sending it queries. See the
# [training
# documentation](https://cloud.google.com/dialogflow/es/docs/training).
rpc :UpdateIntent, ::Google::Cloud::Dialogflow::V2beta1::UpdateIntentRequest, ::Google::Cloud::Dialogflow::V2beta1::Intent
# Deletes the specified intent and its direct or indirect followup intents.
#
# Note: You should always train an agent prior to sending it queries. See the
# [training
# documentation](https://cloud.google.com/dialogflow/es/docs/training).
rpc :DeleteIntent, ::Google::Cloud::Dialogflow::V2beta1::DeleteIntentRequest, ::Google::Protobuf::Empty
# Updates/Creates multiple intents in the specified agent.
#
# This method is a [long-running
# operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations).
# The returned `Operation` type has the following method-specific fields:
#
# - `metadata`: An empty [Struct
# message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct)
# - `response`: [BatchUpdateIntentsResponse][google.cloud.dialogflow.v2beta1.BatchUpdateIntentsResponse]
#
# Note: You should always train an agent prior to sending it queries. See the
# [training
# documentation](https://cloud.google.com/dialogflow/es/docs/training).
rpc :BatchUpdateIntents, ::Google::Cloud::Dialogflow::V2beta1::BatchUpdateIntentsRequest, ::Google::Longrunning::Operation
# Deletes intents in the specified agent.
#
# This method is a [long-running
# operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations).
# The returned `Operation` type has the following method-specific fields:
#
# - `metadata`: An empty [Struct
# message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct)
# - `response`: An [Empty
# message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty)
#
# Note: You should always train an agent prior to sending it queries. See the
# [training
# documentation](https://cloud.google.com/dialogflow/es/docs/training).
rpc :BatchDeleteIntents, ::Google::Cloud::Dialogflow::V2beta1::BatchDeleteIntentsRequest, ::Google::Longrunning::Operation
end
Stub = Service.rpc_stub_class
end
end
end
end
end
| 51.157895 | 145 | 0.659465 |
5daaf1034dc2adf16561975dfb496d28d490bae7 | 808 | module BashCompSpecHelpers
module ClassMethods
def basename
BashCompleteFixtures::Main.basename
end
end
def basename
self.class.basename
end
def build_request *words, cword: -1, cur: nil, split: nil, prev: nil
words.map! { |word|
if word == '$0'
basename
else
word
end
}
if cword < 0
cword = words.length + cword
end
# Set split based on the last word ending if it wasn't explicitly
# provided.
if split.nil?
split = words[-1].end_with? '='
end
Thor::Completion::Bash::Request.new \
words: words,
cword: cword,
cur: (cur || words[cword]),
prev: (prev || words[cword - 1]),
split: split
end
def self.included base
base.send :extend, ClassMethods
end
end | 19.238095 | 70 | 0.592822 |
3382ee48cfd04916ec09d452dfe7e56bd9ae0569 | 1,693 | require 'spec_helper'
describe TxmlImporter do
it 'has a version number' do
expect(TxmlImporter::VERSION).not_to be nil
end
describe '#stats' do
it 'reports the stats of a .txml file' do
file_path = File.expand_path('../txml_importer/spec/sample_files/sample_1.txml')
txml = TxmlImporter::Txml.new(file_path: file_path)
expect(txml.stats).to eq({:tu_count=>112, :seg_count=>224, :language_pairs=>[["FR-FR", "EN"]]})
end
end
describe '#import' do
it 'imports a .txml file' do
file_path = File.expand_path('../txml_importer/spec/sample_files/sample_1.txml')
txml = TxmlImporter::Txml.new(file_path: file_path).import
expect(txml[0].length).to eq(112)
end
it 'imports a .txml file' do
file_path = File.expand_path('../txml_importer/spec/sample_files/sample_1.txml')
txml = TxmlImporter::Txml.new(file_path: file_path).import
expect(txml[1].length).to eq(224)
end
it 'imports a .txml file' do
file_path = File.expand_path('../txml_importer/spec/sample_files/sample_1.txml')
txml = TxmlImporter::Txml.new(file_path: file_path).import
expect(txml[0][0][0]).to eq(txml[1][0][0])
end
it 'imports a .txml file' do
file_path = File.expand_path('../txml_importer/spec/sample_files/sample_1.txml')
txml = TxmlImporter::Txml.new(file_path: file_path).import
expect(txml[0][-1][0]).to eq(txml[1][-1][0])
end
it 'imports a .txml file' do
file_path = File.expand_path('../txml_importer/spec/sample_files/sample_1.txml')
txml = TxmlImporter::Txml.new(file_path: file_path)
expect(txml.import[1][-1][1]).to eq('target')
end
end
end
| 35.270833 | 101 | 0.670408 |
ed799e5a58c3edfbac72d9ef36c405f06699f0ce | 5,285 | =begin
#Beanie ERP API
#An API specification for interacting with the Beanie ERP system
OpenAPI spec version: 0.8
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.0-SNAPSHOT
=end
require 'date'
module Beanie
class StockLocation
attr_accessor :id
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'id' => :'id'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'id' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @id.nil?
invalid_properties.push('invalid value for "id", id cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @id.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = Beanie.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 27.962963 | 107 | 0.612867 |
1127cb70581c18fd83d864da65ba0cb9d5d6334f | 1,369 | =begin
JRubyFX - Write JavaFX and FXML in Ruby
Copyright (C) 2013 The JRubyFX Team
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'jrubyfx/dsl'
# JRubyFX DSL extensions for JavaFX color stops
class Java::javafx::scene::transform::Rotate
extend JRubyFX::Utils::CommonConverters
@@axis_conversions = map_converter(x_axis: X_AXIS,
y_axis: Y_AXIS,
z_axis: Z_AXIS,
x: X_AXIS,
y: Y_AXIS,
z: Z_AXIS)
converter_for :axis, [@@axis_conversions]
class << self
extend JRubyFX::Utils::CommonConverters
converter_for :new, [], [:none], [:none, @axis_conversions], [:none, :none, :none],
[:none, :none, :none, :none], [:none, :none, :none, :none, @axis_conversions]
end
end
| 34.225 | 95 | 0.639883 |
38720ce3c01c993ca2e5d4bf66b9f76a5fb2a83c | 1,671 | module FarmEvents
class Update < Mutations::Command
NOT_YOURS = "Not your farm_event."
include FarmEvents::ExecutableHelpers
include FarmEvents::FragmentHelpers
using Sequences::CanonicalCeleryHelpers
has_executable_fields
required do
model :farm_event, class: FarmEvent
model :device, class: Device
end
optional do
integer :repeat, min: 1
string :time_unit, in: FarmEvent::UNITS_OF_TIME
time :start_time, after: Time.now - 20.years
time :end_time, before: Time.now + 20.years
body
end
def validate
validate_executable if (executable_id || executable_type)
validate_ownership
end
def execute
p = inputs.except(:farm_event, :body, :device)
# Keeps cleanup operations on schedule:
p[:end_time] = next_start_time + 1.minute if is_one_time_event
FarmEvent.auto_sync_debounce do
FarmEvent.transaction do
handle_body_field
farm_event.update!(p)
farm_event
end
end
end
def validate_ownership
raise Errors::Forbidden, NOT_YOURS if farm_event.device != device
end
def is_one_time_event
next_time_unit == FarmEvent::NEVER
end
# The FarmEvent's time_unit, after saving completes.
# Defaults to farm_event.time_unit if the user is not updating that field.
def next_time_unit
(time_unit || farm_event.time_unit)
end
# The FarmEvent's start_Time, after saving completes.
# Defaults to farm_event.start_time if the user is not updating that field.
def next_start_time
(start_time || farm_event.start_time)
end
end
end
| 26.52381 | 79 | 0.684022 |
18213cb23b10f2ef6ad316f19cb67d2f681fd3cd | 1,852 | module ProcessSpecs
class Daemonizer
attr_reader :input, :data
def initialize
# Fast feedback for implementations without Process.daemon
raise NotImplementedError, "Process.daemon is not implemented" unless Process.respond_to? :daemon
@script = fixture __FILE__, "daemon.rb"
@input = tmp("process_daemon_input_file")
@data = tmp("process_daemon_data_file")
@args = []
end
def wait_for_daemon
sleep 0.1 until File.exist?(@data) and File.size?(@data)
end
def invoke(behavior, arguments=[])
args = Marshal.dump(arguments).unpack("H*")
args << @input << @data << behavior
ruby_exe @script, args: args
wait_for_daemon
return unless File.exist? @data
File.open(@data, "rb") { |f| return f.read.chomp }
end
end
class Signalizer
attr_reader :pid_file, :pid
def initialize(scenario=nil, ruby_exe=nil)
platform_is :windows do
fail "not supported on windows"
end
@script = fixture __FILE__, "kill.rb"
@pid_file = tmp("process_kill_signal_file")
rm_r @pid_file
@thread = Thread.new do
Thread.current.abort_on_exception = true
args = [@pid_file, scenario, ruby_exe]
@result = ruby_exe @script, args: args
end
Thread.pass while @thread.status and !File.exist?(@pid_file)
while @thread.status && (@pid.nil? || @pid == 0)
@pid = IO.read(@pid_file).chomp.to_i
end
end
def wait_on_result
# Ensure the process exits
begin
Process.kill :TERM, pid if pid
rescue Errno::ESRCH
# Ignore the process not existing
end
@thread.join
end
def cleanup
wait_on_result
rm_r pid_file
end
def result
wait_on_result
@result.chomp if @result
end
end
end
| 24.051948 | 103 | 0.62743 |
38326e9fec8daefce735fde064ccdb5c060aafb0 | 18,688 | # :stopdoc:
ENV['RC_ARCHS'] = '' if RUBY_PLATFORM =~ /darwin/
require 'mkmf'
ROOT = File.expand_path(File.join(File.dirname(__FILE__), '..', '..'))
#
# functions
#
def windows?
RbConfig::CONFIG['target_os'] =~ /mingw32|mswin/
end
def solaris?
RbConfig::CONFIG['target_os'] =~ /solaris/
end
def darwin?
RbConfig::CONFIG['target_os'] =~ /darwin/
end
def nix?
! (windows? || solaris? || darwin?)
end
def sh_export_path path
# because libxslt 1.1.29 configure.in uses AC_PATH_TOOL which treats ":"
# as a $PATH separator, we need to convert windows paths from
#
# C:/path/to/foo
#
# to
#
# /C/path/to/foo
#
# which is sh-compatible, in order to find things properly during
# configuration
if windows?
match = Regexp.new("^([A-Z]):(/.*)").match(path)
if match && match.length == 3
return File.join("/", match[1], match[2])
end
end
path
end
def do_help
print <<HELP
usage: ruby #{$0} [options]
--disable-clean
Do not clean out intermediate files after successful build.
--disable-static
Do not statically link bundled libraries.
--with-iconv-dir=DIR
Use the iconv library placed under DIR.
--with-zlib-dir=DIR
Use the zlib library placed under DIR.
--use-system-libraries
Use system libraries instead of building and using the bundled
libraries.
--with-xml2-dir=DIR / --with-xml2-config=CONFIG
--with-xslt-dir=DIR / --with-xslt-config=CONFIG
--with-exslt-dir=DIR / --with-exslt-config=CONFIG
Use libxml2/libxslt/libexslt as specified.
--enable-cross-build
Do cross-build.
HELP
exit! 0
end
def do_clean
require 'pathname'
require 'fileutils'
root = Pathname(ROOT)
pwd = Pathname(Dir.pwd)
# Skip if this is a development work tree
unless (root + '.git').exist?
message "Cleaning files only used during build.\n"
# (root + 'tmp') cannot be removed at this stage because
# nokogiri.so is yet to be copied to lib.
# clean the ports build directory
Pathname.glob(pwd.join('tmp', '*', 'ports')) do |dir|
FileUtils.rm_rf(dir, verbose: true)
end
if enable_config('static')
# ports installation can be safely removed if statically linked.
FileUtils.rm_rf(root + 'ports', verbose: true)
else
FileUtils.rm_rf(root + 'ports' + 'archives', verbose: true)
end
end
exit! 0
end
def package_config pkg, options={}
package = pkg_config(pkg)
return package if package
begin
require 'rubygems'
gem 'pkg-config', (gem_ver='~> 1.1.7')
require 'pkg-config' and message("Using pkg-config gem version #{PKGConfig::VERSION}\n")
rescue LoadError
message "pkg-config could not be used to find #{pkg}\nPlease install either `pkg-config` or the pkg-config gem per\n\n gem install pkg-config -v #{gem_ver.inspect}\n\n"
else
return nil unless PKGConfig.have_package(pkg)
cflags = PKGConfig.cflags(pkg)
ldflags = PKGConfig.libs_only_L(pkg)
libs = PKGConfig.libs_only_l(pkg)
Logging::message "PKGConfig package configuration for %s\n", pkg
Logging::message "cflags: %s\nldflags: %s\nlibs: %s\n\n", cflags, ldflags, libs
[cflags, ldflags, libs]
end
end
def nokogiri_try_compile
try_compile "int main() {return 0;}", "", {werror: true}
end
def check_libxml_version version=nil
source = if version.nil?
<<-SRC
#include <libxml/xmlversion.h>
SRC
else
version_int = sprintf "%d%2.2d%2.2d", *(version.split("."))
<<-SRC
#include <libxml/xmlversion.h>
#if LIBXML_VERSION < #{version_int}
#error libxml2 is older than #{version}
#endif
SRC
end
try_cpp source
end
def add_cflags(flags)
print "checking if the C compiler accepts #{flags}... "
with_cflags("#{$CFLAGS} #{flags}") do
if nokogiri_try_compile
puts 'yes'
true
else
puts 'no'
false
end
end
end
def preserving_globals
values = [
$arg_config,
$CFLAGS, $CPPFLAGS,
$LDFLAGS, $LIBPATH, $libs
].map(&:dup)
yield
ensure
$arg_config,
$CFLAGS, $CPPFLAGS,
$LDFLAGS, $LIBPATH, $libs =
values
end
def asplode(lib)
abort "-----\n#{lib} is missing. Please locate mkmf.log to investigate how it is failing.\n-----"
end
def have_iconv?(using = nil)
checking_for(using ? "iconv using #{using}" : 'iconv') do
['', '-liconv'].any? do |opt|
preserving_globals do
yield if block_given?
try_link(<<-'SRC', opt)
#include <stdlib.h>
#include <iconv.h>
int main(void)
{
iconv_t cd = iconv_open("", "");
iconv(cd, NULL, NULL, NULL, NULL);
return EXIT_SUCCESS;
}
SRC
end
end
end
end
def iconv_configure_flags
# If --with-iconv-dir or --with-opt-dir is given, it should be
# the first priority
%w[iconv opt].each do |name|
if (config = preserving_globals { dir_config(name) }).any? &&
have_iconv?("--with-#{name}-* flags") { dir_config(name) }
idirs, ldirs = config.map do |dirs|
Array(dirs).flat_map do |dir|
dir.split(File::PATH_SEPARATOR)
end if dirs
end
return [
'--with-iconv=yes',
*("CPPFLAGS=#{idirs.map { |dir| '-I' << dir }.join(' ')}" if idirs),
*("LDFLAGS=#{ldirs.map { |dir| '-L' << dir }.join(' ')}" if ldirs),
]
end
end
if have_iconv?
return ['--with-iconv=yes']
end
if (config = preserving_globals { package_config('libiconv') }) &&
have_iconv?('pkg-config libiconv') { package_config('libiconv') }
cflags, ldflags, libs = config
return [
'--with-iconv=yes',
"CPPFLAGS=#{cflags}",
"LDFLAGS=#{ldflags}",
"LIBS=#{libs}",
]
end
asplode "libiconv"
end
# When using rake-compiler-dock on Windows, the underlying Virtualbox shared
# folders don't support symlinks, but libiconv expects it for a build on
# Linux. We work around this limitation by using the temp dir for cooking.
def chdir_for_build
build_dir = ENV['RCD_HOST_RUBY_PLATFORM'].to_s =~ /mingw|mswin|cygwin/ ? '/tmp' : '.'
Dir.chdir(build_dir) do
yield
end
end
def process_recipe(name, version, static_p, cross_p)
MiniPortile.new(name, version).tap do |recipe|
recipe.target = portsdir = File.join(ROOT, "ports")
# Prefer host_alias over host in order to use i586-mingw32msvc as
# correct compiler prefix for cross build, but use host if not set.
recipe.host = RbConfig::CONFIG["host_alias"].empty? ? RbConfig::CONFIG["host"] : RbConfig::CONFIG["host_alias"]
recipe.patch_files = Dir[File.join(ROOT, "patches", name, "*.patch")].sort
yield recipe
env = Hash.new do |hash, key|
hash[key] = "#{ENV[key]}" # (ENV[key].dup rescue '')
end
recipe.configure_options.flatten!
recipe.configure_options.delete_if do |option|
case option
when /\A(\w+)=(.*)\z/
env[$1] = $2
true
else
false
end
end
if static_p
recipe.configure_options += [
"--disable-shared",
"--enable-static",
]
env['CFLAGS'] = "-fPIC #{env['CFLAGS']}"
else
recipe.configure_options += [
"--enable-shared",
"--disable-static",
]
end
if cross_p
recipe.configure_options += [
"--target=#{recipe.host}",
"--host=#{recipe.host}",
]
end
if RbConfig::CONFIG['target_cpu'] == 'universal'
%w[CFLAGS LDFLAGS].each do |key|
unless env[key].include?('-arch')
env[key] << ' ' << RbConfig::CONFIG['ARCH_FLAG']
end
end
end
recipe.configure_options += env.map do |key, value|
"#{key}=#{value}"
end
message <<-"EOS"
************************************************************************
IMPORTANT NOTICE:
Building Nokogiri with a packaged version of #{name}-#{version}#{'.' if recipe.patch_files.empty?}
EOS
unless recipe.patch_files.empty?
message "with the following patches applied:\n"
recipe.patch_files.each do |patch|
message "\t- %s\n" % File.basename(patch)
end
end
message <<-"EOS"
Team Nokogiri will keep on doing their best to provide security
updates in a timely manner, but if this is a concern for you and want
to use the system library instead; abort this installation process and
reinstall nokogiri as follows:
gem install nokogiri -- --use-system-libraries
[--with-xml2-config=/path/to/xml2-config]
[--with-xslt-config=/path/to/xslt-config]
If you are using Bundler, tell it to use the option:
bundle config build.nokogiri --use-system-libraries
bundle install
EOS
message <<-"EOS" if name == 'libxml2'
Note, however, that nokogiri is not fully compatible with arbitrary
versions of libxml2 provided by OS/package vendors.
EOS
message <<-"EOS"
************************************************************************
EOS
checkpoint = "#{recipe.target}/#{recipe.name}-#{recipe.version}-#{recipe.host}.installed"
unless File.exist?(checkpoint)
chdir_for_build do
recipe.cook
end
FileUtils.touch checkpoint
end
recipe.activate
end
end
def lib_a(ldflag)
case ldflag
when /\A-l(.+)/
"lib#{$1}.#{$LIBEXT}"
end
end
def using_system_libraries?
arg_config('--use-system-libraries', !!ENV['NOKOGIRI_USE_SYSTEM_LIBRARIES'])
end
#
# main
#
case
when arg_config('--help')
do_help
when arg_config('--clean')
do_clean
end
RbConfig::MAKEFILE_CONFIG['CC'] = ENV['CC'] if ENV['CC']
# use same c compiler for libxml and libxslt
ENV['CC'] = RbConfig::MAKEFILE_CONFIG['CC']
$LIBS << " #{ENV["LIBS"]}"
# Read CFLAGS from ENV and make sure compiling works.
add_cflags(ENV["CFLAGS"])
if windows?
$CFLAGS << " -DXP_WIN -DXP_WIN32 -DUSE_INCLUDED_VASPRINTF"
end
if solaris?
$CFLAGS << " -DUSE_INCLUDED_VASPRINTF"
end
if darwin?
# Let Apple LLVM/clang 5.1 ignore unknown compiler flags
add_cflags("-Wno-error=unused-command-line-argument-hard-error-in-future")
end
if nix?
$CFLAGS << " -g -DXP_UNIX"
end
if RUBY_PLATFORM =~ /mingw/i
# Work around a character escaping bug in MSYS by passing an arbitrary
# double quoted parameter to gcc. See https://sourceforge.net/p/mingw/bugs/2142
$CPPFLAGS << ' "-Idummypath"'
end
if RbConfig::MAKEFILE_CONFIG['CC'] =~ /gcc/
$CFLAGS << " -O3" unless $CFLAGS[/-O\d/]
$CFLAGS << " -Wall -Wcast-qual -Wwrite-strings -Wconversion -Wmissing-noreturn -Winline"
end
case
when using_system_libraries?
message "Building nokogiri using system libraries.\n"
dir_config('zlib')
# Using system libraries means we rely on the system libxml2 with
# regard to the iconv support.
dir_config('xml2').any? or package_config('libxml-2.0')
dir_config('xslt').any? or package_config('libxslt')
dir_config('exslt').any? or package_config('libexslt')
check_libxml_version or abort "ERROR: cannot discover where libxml2 is located on your system. please make sure `pkg-config` is installed."
check_libxml_version("2.6.21") or abort "ERROR: libxml2 version 2.6.21 or later is required!"
check_libxml_version("2.9.3") or warn "WARNING: libxml2 version 2.9.3 or later is highly recommended, but proceeding anyway."
else
message "Building nokogiri using packaged libraries.\n"
# The gem version constraint in the Rakefile is not respected at install time.
# Keep this version in sync with the one in the Rakefile !
require 'rubygems'
gem 'mini_portile2', '~> 2.1.0'
require 'mini_portile2'
message "Using mini_portile version #{MiniPortile::VERSION}\n"
require 'yaml'
static_p = enable_config('static', true) or
message "Static linking is disabled.\n"
dir_config('zlib')
dependencies = YAML.load_file(File.join(ROOT, "dependencies.yml"))
cross_build_p = enable_config("cross-build")
if cross_build_p || windows?
zlib_recipe = process_recipe("zlib", dependencies["zlib"]["version"], static_p, cross_build_p) do |recipe|
recipe.files = [{
url: "http://zlib.net/#{recipe.name}-#{recipe.version}.tar.gz",
md5: dependencies["zlib"]["md5"]
}]
class << recipe
attr_accessor :cross_build_p
def configure
Dir.chdir work_path do
mk = File.read 'win32/Makefile.gcc'
File.open 'win32/Makefile.gcc', 'wb' do |f|
f.puts "BINARY_PATH = #{path}/bin"
f.puts "LIBRARY_PATH = #{path}/lib"
f.puts "INCLUDE_PATH = #{path}/include"
mk.sub!(/^PREFIX\s*=\s*$/, "PREFIX = #{host}-") if cross_build_p
f.puts mk
end
end
end
def configured?
Dir.chdir work_path do
!! (File.read('win32/Makefile.gcc') =~ /^BINARY_PATH/)
end
end
def compile
execute "compile", "make -f win32/Makefile.gcc"
end
def install
execute "install", "make -f win32/Makefile.gcc install"
end
end
recipe.cross_build_p = cross_build_p
end
libiconv_recipe = process_recipe("libiconv", dependencies["libiconv"]["version"], static_p, cross_build_p) do |recipe|
recipe.files = [{
url: "http://ftp.gnu.org/pub/gnu/libiconv/#{recipe.name}-#{recipe.version}.tar.gz",
md5: dependencies["libiconv"]["md5"]
}]
recipe.configure_options += [
"CPPFLAGS=-Wall",
"CFLAGS=-O2 -g",
"CXXFLAGS=-O2 -g",
"LDFLAGS="
]
end
else
if darwin? && !have_header('iconv.h')
abort <<'EOM'.chomp
-----
The file "iconv.h" is missing in your build environment,
which means you haven't installed Xcode Command Line Tools properly.
To install Command Line Tools, try running `xcode-select --install` on
terminal and follow the instructions. If it fails, open Xcode.app,
select from the menu "Xcode" - "Open Developer Tool" - "More Developer
Tools" to open the developer site, download the installer for your OS
version and run it.
-----
EOM
end
end
unless windows?
preserving_globals {
have_library('z', 'gzdopen', 'zlib.h')
} or abort 'zlib is missing; necessary for building libxml2'
end
libxml2_recipe = process_recipe("libxml2", dependencies["libxml2"]["version"], static_p, cross_build_p) do |recipe|
recipe.files = [{
url: "http://xmlsoft.org/sources/#{recipe.name}-#{recipe.version}.tar.gz",
md5: dependencies["libxml2"]["md5"]
}]
recipe.configure_options += [
"--without-python",
"--without-readline",
*(zlib_recipe ? ["--with-zlib=#{zlib_recipe.path}", "CFLAGS=-I#{zlib_recipe.path}/include"] : []),
*(libiconv_recipe ? "--with-iconv=#{libiconv_recipe.path}" : iconv_configure_flags),
"--with-c14n",
"--with-debug",
"--with-threads"
]
end
libxslt_recipe = process_recipe("libxslt", dependencies["libxslt"]["version"], static_p, cross_build_p) do |recipe|
recipe.files = [{
url: "http://xmlsoft.org/sources/#{recipe.name}-#{recipe.version}.tar.gz",
md5: dependencies["libxslt"]["md5"]
}]
recipe.configure_options += [
"--without-python",
"--without-crypto",
"--with-debug",
"--with-libxml-prefix=#{sh_export_path(libxml2_recipe.path)}"
]
end
$CFLAGS << ' ' << '-DNOKOGIRI_USE_PACKAGED_LIBRARIES'
$LIBPATH = ["#{zlib_recipe.path}/lib"] | $LIBPATH if zlib_recipe
$LIBPATH = ["#{libiconv_recipe.path}/lib"] | $LIBPATH if libiconv_recipe
have_lzma = preserving_globals {
have_library('lzma')
}
$libs = $libs.shellsplit.tap do |libs|
[libxml2_recipe, libxslt_recipe].each do |recipe|
libname = recipe.name[/\Alib(.+)\z/, 1]
File.join(recipe.path, "bin", "#{libname}-config").tap do |config|
# call config scripts explicit with 'sh' for compat with Windows
$CPPFLAGS = `sh #{config} --cflags`.strip << ' ' << $CPPFLAGS
`sh #{config} --libs`.strip.shellsplit.each do |arg|
case arg
when /\A-L(.+)\z/
# Prioritize ports' directories
if $1.start_with?(ROOT + '/')
$LIBPATH = [$1] | $LIBPATH
else
$LIBPATH = $LIBPATH | [$1]
end
when /\A-l./
libs.unshift(arg)
else
$LDFLAGS << ' ' << arg.shellescape
end
end
end
# Defining a macro that expands to a C string; double quotes are significant.
$CPPFLAGS << ' ' << "-DNOKOGIRI_#{recipe.name.upcase}_PATH=\"#{recipe.path}\"".inspect
$CPPFLAGS << ' ' << "-DNOKOGIRI_#{recipe.name.upcase}_PATCHES=\"#{recipe.patch_files.map { |path| File.basename(path) }.join(' ')}\"".inspect
case libname
when 'xml2'
# xslt-config --libs or pkg-config libxslt --libs does not include
# -llzma, so we need to add it manually when linking statically.
if static_p && have_lzma
# Add it at the end; GH #988
libs << '-llzma'
end
when 'xslt'
# xslt-config does not have a flag to emit options including
# -lexslt, so add it manually.
libs.unshift('-lexslt')
end
end
end.shelljoin
if static_p
$libs = $libs.shellsplit.map do |arg|
case arg
when '-lxml2'
File.join(libxml2_recipe.path, 'lib', lib_a(arg))
when '-lxslt', '-lexslt'
File.join(libxslt_recipe.path, 'lib', lib_a(arg))
else
arg
end
end.shelljoin
end
end
{
"xml2" => ['xmlParseDoc', 'libxml/parser.h'],
"xslt" => ['xsltParseStylesheetDoc', 'libxslt/xslt.h'],
"exslt" => ['exsltFuncRegister', 'libexslt/exslt.h'],
}.each do |lib, (func, header)|
have_func(func, header) ||
have_library(lib, func, header) ||
have_library("lib#{lib}", func, header) or
asplode("lib#{lib}")
end
have_func('xmlHasFeature') or abort "xmlHasFeature() is missing."
have_func('xmlFirstElementChild')
have_func('xmlRelaxNGSetParserStructuredErrors')
have_func('xmlRelaxNGSetParserStructuredErrors')
have_func('xmlRelaxNGSetValidStructuredErrors')
have_func('xmlSchemaSetValidStructuredErrors')
have_func('xmlSchemaSetParserStructuredErrors')
if ENV['CPUPROFILE']
unless find_library('profiler', 'ProfilerEnable', *LIB_DIRS)
abort "google performance tools are not installed"
end
end
create_makefile('nokogiri/nokogiri')
if enable_config('clean', true)
# Do not clean if run in a development work tree.
File.open('Makefile', 'at') do |mk|
mk.print <<EOF
all: clean-ports
clean-ports: $(DLLIB)
-$(Q)$(RUBY) $(srcdir)/extconf.rb --clean --#{static_p ? 'enable' : 'disable'}-static
EOF
end
end
# :startdoc:
| 27.809524 | 175 | 0.628853 |
03e35bf5d5e3351ea1aae630d94886014b07b831 | 677 | $:.push File.expand_path("lib", __dir__)
# Maintain your gem's version:
require "alchemy_i18n/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "alchemy_i18n"
s.version = AlchemyI18n::VERSION
s.authors = ["Thomas von Deyen"]
s.email = ["[email protected]"]
s.homepage = "https://alchemy-cms.com"
s.summary = "AlchemyCMS translation files"
s.description = "Translation files for AlchemyCMS"
s.license = "MIT"
s.files = Dir["{app,locales,lib,vendor}/**/*", "LICENSE", "README.md"]
s.add_dependency "alchemy_cms", [">= 4.4.0.a", "< 6.0"]
s.add_dependency "rails-i18n"
end
| 30.772727 | 72 | 0.654357 |
87f66e2bf97eab0fce28366bdf931d72792028cf | 18,117 | # frozen_string_literal: true
require "spec_helper"
require "dependabot/dependency"
require "dependabot/dependency_file"
require "dependabot/python/update_checker/pipenv_version_resolver"
namespace = Dependabot::Python::UpdateChecker
RSpec.describe namespace::PipenvVersionResolver do
let(:resolver) do
described_class.new(
dependency: dependency,
dependency_files: dependency_files,
credentials: credentials
)
end
let(:credentials) do
[{
"type" => "git_source",
"host" => "github.com",
"username" => "x-access-token",
"password" => "token"
}]
end
let(:dependency_files) { [pipfile, lockfile] }
let(:pipfile) do
Dependabot::DependencyFile.new(
name: "Pipfile",
content: fixture("pipfiles", pipfile_fixture_name)
)
end
let(:pipfile_fixture_name) { "exact_version" }
let(:lockfile) do
Dependabot::DependencyFile.new(
name: "Pipfile.lock",
content: fixture("lockfiles", lockfile_fixture_name)
)
end
let(:lockfile_fixture_name) { "exact_version.lock" }
let(:dependency) do
Dependabot::Dependency.new(
name: dependency_name,
version: dependency_version,
requirements: dependency_requirements,
package_manager: "pip"
)
end
let(:dependency_name) { "requests" }
let(:dependency_version) { "2.18.0" }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==2.18.0",
groups: ["default"],
source: nil
}]
end
describe "#latest_resolvable_version" do
subject do
resolver.latest_resolvable_version(requirement: updated_requirement)
end
let(:updated_requirement) { ">= 2.18.0, <= 2.18.4" }
context "with a lockfile" do
let(:dependency_files) { [pipfile, lockfile] }
let(:dependency_version) { "2.18.0" }
it { is_expected.to eq(Gem::Version.new("2.18.4")) }
context "when not unlocking the requirement" do
let(:updated_requirement) { "== 2.18.0" }
it { is_expected.to be >= Gem::Version.new("2.18.0") }
end
end
context "without a lockfile (but with a latest version)" do
let(:dependency_files) { [pipfile] }
let(:dependency_version) { nil }
it { is_expected.to eq(Gem::Version.new("2.18.4")) }
end
context "when the latest version isn't allowed" do
let(:updated_requirement) { ">= 2.18.0, <= 2.18.3" }
it { is_expected.to eq(Gem::Version.new("2.18.3")) }
end
context "when the latest version is nil" do
let(:updated_requirement) { ">= 2.18.0" }
it { is_expected.to be >= Gem::Version.new("2.19.0") }
end
context "with a dependency with a hard name" do
let(:pipfile_fixture_name) { "hard_names" }
let(:lockfile_fixture_name) { "hard_names.lock" }
let(:dependency_name) { "discord-py" }
let(:dependency_version) { "0.16.1" }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==0.16.1",
groups: ["default"],
source: nil
}]
end
let(:updated_requirement) { ">= 0.16.1, <= 1.0.0" }
it { is_expected.to be >= Gem::Version.new("0.16.12") }
end
context "when another dependency has been yanked" do
let(:pipfile_fixture_name) { "yanked" }
let(:lockfile_fixture_name) { "yanked.lock" }
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::DependencyFileNotResolvable) do |error|
expect(error.message).to start_with(
"CRITICAL:pipenv.patched.notpip._internal.index:"\
"Could not find a version that satisfies the requirement "\
"pytest==10.4.0"
)
end
end
end
context "when the Python version conflicts with another dependency" do
let(:pipfile_fixture_name) { "unresolvable_python_version" }
let(:dependency_files) { [pipfile] }
let(:dependency_name) { "pytest" }
let(:dependency_version) { "3.4.0" }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==3.4.0",
groups: ["develop"],
source: nil
}]
end
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::DependencyFileNotResolvable) do |error|
expect(error.message).to eq(
"pipenv.patched.notpip._internal.exceptions."\
"UnsupportedPythonVersion: futures requires Python '>=2.6, <3' "\
"but the running Python is 3.7.9"
)
end
end
end
context "with a subdependency" do
let(:dependency_name) { "py" }
let(:dependency_version) { "1.5.3" }
let(:dependency_requirements) { [] }
let(:updated_requirement) { ">= 1.5.3, <= 1.7.0" }
it { is_expected.to eq(Gem::Version.new("1.7.0")) }
context "that no longer appears in the lockfile after updating" do
let(:lockfile_fixture_name) { "unnecessary_subdependency.lock" }
let(:dependency_name) { "setuptools" }
let(:dependency_version) { "40.2.0" }
let(:updated_requirement) { ">= 40.2.0, <= 41.0.0" }
it { is_expected.to be_nil }
end
end
context "with a dependency that can only be built on a mac" do
let(:pipfile_fixture_name) { "unsupported_dep" }
let(:lockfile_fixture_name) { "unsupported_dep.lock" }
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::DependencyFileNotResolvable) do |error|
expect(error.message).to start_with(
"Dependabot detected a dependency that can't be built on linux"
)
end
end
end
context "with a path dependency" do
let(:dependency_files) { [pipfile, lockfile, setupfile] }
let(:setupfile) do
Dependabot::DependencyFile.new(
name: "mydep/setup.py",
content: fixture("setup_files", setupfile_fixture_name)
)
end
let(:setupfile_fixture_name) { "small.py" }
let(:pipfile_fixture_name) { "path_dependency_not_self" }
let(:lockfile_fixture_name) { "path_dependency_not_self.lock" }
it { is_expected.to eq(Gem::Version.new("2.18.4")) }
context "that needs to be sanitized" do
let(:setupfile_fixture_name) { "small_needs_sanitizing.py" }
it { is_expected.to eq(Gem::Version.new("2.18.4")) }
end
context "that imports a setup.cfg" do
let(:dependency_files) { [pipfile, lockfile, setupfile, setup_cfg] }
let(:setupfile_fixture_name) { "with_pbr.py" }
let(:setup_cfg) do
Dependabot::DependencyFile.new(
name: "mydep/setup.cfg",
content: fixture("setup_files", "setup.cfg")
)
end
it { is_expected.to eq(Gem::Version.new("2.18.4")) }
end
end
context "with a required python version" do
let(:pipfile_fixture_name) { "required_python" }
let(:lockfile_fixture_name) { "required_python.lock" }
it { is_expected.to eq(Gem::Version.new("2.18.4")) }
context "that comes from a Poetry file and includes || logic" do
let(:pipfile_fixture_name) { "exact_version" }
let(:dependency_files) { [pipfile, pyproject] }
let(:pyproject) do
Dependabot::DependencyFile.new(
name: "pyproject.toml",
content: fixture("pyproject_files", "pyproject.toml")
)
end
it { is_expected.to eq(Gem::Version.new("2.18.4")) }
end
context "that is invalid" do
let(:pipfile_fixture_name) { "required_python_invalid" }
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::DependencyFileNotResolvable) do |error|
expect(error.message).to start_with(
"Pipenv does not support specifying Python ranges"
)
end
end
end
context "that is unsupported" do
let(:pipfile_fixture_name) { "required_python_unsupported" }
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::DependencyFileNotResolvable) do |error|
expect(error.message).
to start_with("Dependabot detected the following Python")
expect(error.message).to include("3.4.*")
expect(error.message).
to include("supported in Dependabot: 3.9.1, 3.9.0, 3.8.6")
end
end
end
context "that is implicit" do
let(:pipfile_fixture_name) { "required_python_implicit" }
let(:lockfile_fixture_name) { "required_python_implicit.lock" }
let(:dependency_name) { "pytest" }
let(:dependency_version) { "3.4.0" }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==3.4.0",
groups: ["develop"],
source: nil
}]
end
let(:updated_requirement) { ">= 3.4.0, <= 3.8.2" }
it { is_expected.to eq(Gem::Version.new("3.8.2")) }
context "when updating a python-2 only dep" do
let(:dependency_name) { "futures" }
let(:dependency_version) { "3.2.0" }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==3.2.0",
groups: ["default"],
source: nil
}]
end
let(:updated_requirement) { ">= 3.2.0, <= 3.3.0" }
it { is_expected.to be >= Gem::Version.new("3.3.0") }
end
context "due to a version in the lockfile" do
let(:pipfile_fixture_name) { "required_python_implicit_2" }
let(:lockfile_fixture_name) { "required_python_implicit_2.lock" }
it { is_expected.to eq(Gem::Version.new("3.8.2")) }
end
end
context "where updating to the latest would break Python compatibility" do
let(:pipfile_fixture_name) { "required_python_blocking" }
let(:dependency_name) { "django" }
let(:dependency_version) { "1.1.14" }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==1.1.14",
groups: ["default"],
source: nil
}]
end
let(:updated_requirement) { ">= 1.1.14, <= 2.1.4" }
it "updates to the latest resolvable 1.x version" do
expect(subject.to_s).to start_with("1.")
end
end
context "for a resolution that has caused trouble in the past" do
let(:dependency_files) { [pipfile] }
let(:pipfile_fixture_name) { "problematic_resolution" }
let(:dependency_name) { "twilio" }
let(:dependency_version) { nil }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "*",
groups: ["default"],
source: nil
}]
end
let(:updated_requirement) { ">= 3.4.0, <= 6.14.6" }
it { is_expected.to eq(Gem::Version.new("6.14.6")) }
end
end
context "with an unfetchable requirement" do
let(:dependency_files) { [pipfile] }
let(:pipfile_fixture_name) { "bad_requirement" }
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::DependencyFileNotResolvable) do |error|
expect(error.message).to eq(
"packaging.specifiers.InvalidSpecifier: "\
"Invalid specifier: '3.4.0'"
)
end
end
end
context "with extra requirements" do
let(:dependency_name) { "raven" }
let(:dependency_version) { "5.27.1" }
let(:updated_requirement) { ">= 5.27.1, <= 7.0.0" }
let(:pipfile_fixture_name) { "extra_subdependency" }
let(:lockfile_fixture_name) { "extra_subdependency.lock" }
it { is_expected.to be >= Gem::Version.new("6.7.0") }
end
context "with a git source" do
context "for another dependency, that can't be reached" do
let(:pipfile_fixture_name) { "git_source_unreachable" }
let(:lockfile_fixture_name) { "git_source_unreachable.lock" }
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::GitDependenciesNotReachable) do |error|
expect(error.dependency_urls).
to eq(["https://github.com/user/django.git"])
end
end
end
context "for another dependency, that has a bad ref" do
let(:pipfile_fixture_name) { "git_source_bad_ref" }
let(:lockfile_fixture_name) { "git_source_bad_ref.lock" }
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::GitDependencyReferenceNotFound) do |err|
expect(err.dependency).to eq("pythonfinder")
end
end
end
end
context "with an environment variable source" do
let(:pipfile_fixture_name) { "environment_variable_source" }
let(:lockfile_fixture_name) { "environment_variable_source.lock" }
context "with a matching credential" do
let(:credentials) do
[{
"type" => "git_source",
"host" => "github.com",
"username" => "x-access-token",
"password" => "token"
}, {
"type" => "python_index",
"index-url" => "https://pypi.org/simple"
}]
end
it { is_expected.to eq(Gem::Version.new("2.18.4")) }
end
end
context "with a `nil` requirement" do
let(:dependency_files) { [pipfile] }
let(:dependency_version) { nil }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==2.18.0",
groups: ["default"],
source: nil
}, {
file: "requirements.txt",
requirement: nil,
groups: ["default"],
source: nil
}]
end
it { is_expected.to eq(Gem::Version.new("2.18.4")) }
end
context "with a conflict at the latest version" do
let(:pipfile_fixture_name) { "conflict_at_latest" }
let(:lockfile_fixture_name) { "conflict_at_latest.lock" }
let(:dependency_version) { "2.6.0" }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==2.6.0",
groups: ["default"],
source: nil
}]
end
it { is_expected.to be_nil }
end
context "with a conflict at the current version" do
let(:pipfile_fixture_name) { "conflict_at_current" }
let(:lockfile_fixture_name) { "conflict_at_current.lock" }
let(:dependency_version) { "2.18.0" }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==2.18.0",
groups: ["default"],
source: nil
}]
end
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::DependencyFileNotResolvable) do |error|
expect(error.message).to include(
"Could not find a version that matches "\
"chardet<3.1.0,==3.0.0,>=3.0.2\n"
)
end
end
end
context "with a missing system libary" do
# NOTE: Attempt to update an unrelated dependency (tensorflow) to cause
# resolution to fail for rtree which has a system dependency on
# libspatialindex which isn't installed in dependabot-core's Dockerfile.
let(:dependency_files) do
project_dependency_files("pipenv/missing-system-library")
end
let(:updated_requirement) { "==2.3.1" }
let(:dependency_name) { "tensorflow" }
let(:dependency_version) { "2.1.0" }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==2.1.0",
groups: ["default"],
source: nil
}]
end
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::DependencyFileNotResolvable) do |error|
expect(error.message).to include(
"Pipenv failed to install \"rtree\""
)
end
end
end
end
describe "#resolvable?" do
subject { resolver.resolvable?(version: version) }
let(:version) { Gem::Version.new("2.18.4") }
context "that is resolvable" do
let(:version) { Gem::Version.new("2.18.4") }
it { is_expected.to eq(true) }
context "with a subdependency" do
let(:dependency_name) { "py" }
let(:dependency_version) { "1.5.3" }
let(:dependency_requirements) { [] }
let(:version) { Gem::Version.new("1.7.0") }
it { is_expected.to eq(true) }
end
end
context "that is not resolvable" do
let(:version) { Gem::Version.new("99.18.4") }
it { is_expected.to eq(false) }
context "with a subdependency" do
let(:dependency_name) { "py" }
let(:dependency_version) { "1.5.3" }
let(:dependency_requirements) { [] }
it { is_expected.to eq(false) }
end
context "because the original manifest isn't resolvable" do
let(:pipfile_fixture_name) { "conflict_at_current" }
let(:lockfile_fixture_name) { "conflict_at_current.lock" }
let(:version) { Gem::Version.new("99.18.4") }
let(:dependency_requirements) do
[{
file: "Pipfile",
requirement: "==2.18.0",
groups: ["default"],
source: nil
}]
end
it "raises a helpful error" do
expect { subject }.
to raise_error(Dependabot::DependencyFileNotResolvable) do |error|
expect(error.message).to include(
"Could not find a version that matches "\
"chardet<3.1.0,==3.0.0,>=3.0.2\n"
)
end
end
end
end
end
end
| 32.40966 | 80 | 0.578241 |
1129087bb206f6446815d5d1e5d7d318123508f6 | 256 | class CreateProducts < ActiveRecord::Migration
def change
create_table :products do |t|
t.string :title
t.text :description
t.string :picture
t.decimal :price
t.date :date_published
t.timestamps
end
end
end
| 18.285714 | 46 | 0.648438 |
7a220dafaecd167b210807f7cc0f66b17391be87 | 1,058 | require 'rails_helper'
RSpec.describe 'Opening Course from User Dashboard', type: :system do
let!(:default_path) { create(:path, default_path: true) }
let!(:foundations_course) { create(:course, title: 'Foundations', path: default_path) }
let!(:section) { create(:section, course: foundations_course) }
let!(:first_lesson) { create(:lesson, section:) }
let!(:second_lesson) { create(:lesson, section:) }
let!(:user) { create(:user) }
context 'when user has completed a course' do
before do
sign_in(user)
visit lesson_path(first_lesson)
find(:test_id, 'complete-button').click
visit lesson_path(second_lesson)
find(:test_id, 'complete-button').click
visit dashboard_path
end
it 'has button to open course' do
expect(find(:test_id, 'foundations-open-btn')).to have_text('Open')
end
it 'successfully opens course' do
find(:test_id, 'foundations-open-btn').click
expect(page).to have_current_path(path_course_path(default_path, foundations_course))
end
end
end
| 31.117647 | 91 | 0.689981 |
5d9bac038b9b0c08e416480d0f0c45f68515519f | 1,112 | require "spec_helper"
RSpec.describe SentenceValidator do
context 'when the sentence does not include a verb' do
let(:sentence) { ['abcd', 'e'] }
it 'the sentence should be invalid' do
expect(described_class.new(sentence).valid_sentence?) .to eq false
end
end
context 'when the sentence includes a verb' do
context 'when the sentence does not include two articles '\
'or a noun' do
let(:sentence) { ['a', 'bcd'] }
it 'the sentence should be invalid' do
expect(described_class.new(sentence).valid_sentence?) .to eq false
end
end
context 'when the sentence includes a noun but not two articles' do
let(:sentence) { ['bc', 'def', 'e'] }
it 'the sentence should be valid' do
expect(described_class.new(sentence).valid_sentence?) .to eq true
end
end
context 'when the sentence includes two articles but not a noun' do
let(:sentence) { ['a', 'bcd', 'e'] }
it 'the sentence should be valid' do
expect(described_class.new(sentence).valid_sentence?) .to eq true
end
end
end
end
| 27.8 | 74 | 0.649281 |
ff426b0b67ae702ebe301c9356e0d4bf5ffc96c6 | 115 | # require 'rails_i18n/common_pluralizations/west_slavic'
# ::RailsI18n::Pluralization::WestSlavic.with_locale(:cs) | 38.333333 | 57 | 0.817391 |
e2f46137b5a8594dcf59b87182d26ac977a6c48d | 832 | # frozen_string_literal: true
RSpec.describe Hyrax::GrantEditJob do
let(:depositor) { create(:user) }
context "when use_valkyrie is false" do
let(:file_set) { create(:file_set) }
it 'grants a user edit access to a FileSet' do
described_class.perform_now(file_set.id, depositor.user_key, use_valkyrie: false)
file_set.reload
expect(file_set.edit_users).to include depositor.user_key
end
end
context "when use_valkyrie is true" do
let(:file_set) { valkyrie_create(:hyrax_file_set) }
it 'grants a user edit access to a FileSet' do
described_class.perform_now(file_set.id.to_s, depositor.user_key, use_valkyrie: true)
reloaded_file_set = Hyrax.query_service.find_by(id: file_set.id)
expect(reloaded_file_set.edit_users).to include depositor.user_key
end
end
end
| 33.28 | 91 | 0.735577 |
6a4925ab510bd67362f72affd522e40e03875a82 | 2,387 | require "stringio"
require 'rexml/document'
require "test/unit"
require "test/unit/ui/junitxml/testrunner"
require_relative "check"
class TestXmlMultibyteName < Test::Unit::TestCase
include Check
setup do
test_case = Class.new(Test::Unit::TestCase) do
test "成功" do
assert_equal(1, 1)
end
def test_failure
assert_equal(1, 1)
assert_equal(1, 2, "失敗")
end
def test_error
assert_equal(1, 1)
assert_equal(1, 1)
assert_equal(1, 1)
raise "エラー"
end
def test_omission
omit("除外")
end
def test_pending
pend("保留")
end
end
output = StringIO.new
runner = Test::Unit::UI::JUnitXml::TestRunner.new(
test_case.suite, :output => output)
runner.start
output.rewind
@doc = REXML::Document.new(output)
end
test "testsuites" do
testsuites_array = @doc.get_elements("/testsuites")
assert_equal(1, testsuites_array.size)
end
test "testsuite" do
testsuite_array = @doc.get_elements("/testsuites/testsuite")
assert_equal(1, testsuite_array.size)
check_testsuite(testsuite_array.first, "", 5, 1, 1, 2)
end
test "testcase success" do
testcase_array = @doc.get_elements(
"/testsuites/testsuite/testcase[@name='成功']")
assert_equal(1, testcase_array.size)
check_testcase_success(testcase_array.first, "", 1)
end
test "testcase failure" do
testcase_array = @doc.get_elements(
"/testsuites/testsuite/testcase[@name='test_failure()']")
assert_equal(1, testcase_array.size)
check_testcase_failure(testcase_array.first, "", 2, "失敗")
end
test "testcase error" do
testcase_array = @doc.get_elements(
"/testsuites/testsuite/testcase[@name='test_error()']")
assert_equal(1, testcase_array.size)
check_testcase_error(testcase_array.first, "", 3, "エラー")
end
test "testcase omission" do
testcase_array = @doc.get_elements(
"/testsuites/testsuite/testcase[@name='test_omission()']")
assert_equal(1, testcase_array.size)
check_testcase_skipped(testcase_array.first, "", 0, "除外")
end
test "testcase pending" do
testcase_array = @doc.get_elements(
"/testsuites/testsuite/testcase[@name='test_pending()']")
assert_equal(1, testcase_array.size)
check_testcase_skipped(testcase_array.first, "", 0, "保留")
end
end
| 25.945652 | 64 | 0.669041 |
397c5a939d3606795e60e97ed8c575d716aeee4d | 43,529 | require "isolation/abstract_unit"
require 'rack/test'
require 'env_helpers'
class ::MyMailInterceptor
def self.delivering_email(email); email; end
end
class ::MyOtherMailInterceptor < ::MyMailInterceptor; end
class ::MyPreviewMailInterceptor
def self.previewing_email(email); email; end
end
class ::MyOtherPreviewMailInterceptor < ::MyPreviewMailInterceptor; end
class ::MyMailObserver
def self.delivered_email(email); email; end
end
class ::MyOtherMailObserver < ::MyMailObserver; end
module ApplicationTests
class ConfigurationTest < ActiveSupport::TestCase
include ActiveSupport::Testing::Isolation
include Rack::Test::Methods
include EnvHelpers
def new_app
File.expand_path("#{app_path}/../new_app")
end
def copy_app
FileUtils.cp_r(app_path, new_app)
end
def app(env = 'development')
@app ||= begin
ENV['RAILS_ENV'] = env
# FIXME: shush Sass warning spam, not relevant to testing Railties
Kernel.silence_warnings do
require "#{app_path}/config/environment"
end
Rails.application
ensure
ENV.delete 'RAILS_ENV'
end
end
def setup
build_app
boot_rails
supress_default_config
end
def teardown
teardown_app
FileUtils.rm_rf(new_app) if File.directory?(new_app)
end
def supress_default_config
FileUtils.mv("#{app_path}/config/environments", "#{app_path}/config/__environments__")
end
def restore_default_config
FileUtils.rm_rf("#{app_path}/config/environments")
FileUtils.mv("#{app_path}/config/__environments__", "#{app_path}/config/environments")
end
test "Rails.env does not set the RAILS_ENV environment variable which would leak out into rake tasks" do
require "rails"
switch_env "RAILS_ENV", nil do
Rails.env = "development"
assert_equal "development", Rails.env
assert_nil ENV['RAILS_ENV']
end
end
test "By default logs tags are not set in development" do
restore_default_config
with_rails_env "development" do
app 'development'
assert Rails.application.config.log_tags.blank?
end
end
test "By default logs are tagged with :request_id in production" do
restore_default_config
with_rails_env "production" do
app 'production'
assert_equal [:request_id], Rails.application.config.log_tags
end
end
test "lib dir is on LOAD_PATH during config" do
app_file 'lib/my_logger.rb', <<-RUBY
require "logger"
class MyLogger < ::Logger
end
RUBY
add_to_top_of_config <<-RUBY
require 'my_logger'
config.logger = MyLogger.new STDOUT
RUBY
app 'development'
assert_equal 'MyLogger', Rails.application.config.logger.class.name
end
test "a renders exception on pending migration" do
add_to_config <<-RUBY
config.active_record.migration_error = :page_load
config.consider_all_requests_local = true
config.action_dispatch.show_exceptions = true
RUBY
app_file 'db/migrate/20140708012246_create_user.rb', <<-RUBY
class CreateUser < ActiveRecord::Migration::Current
def change
create_table :users
end
end
RUBY
app 'development'
ActiveRecord::Migrator.migrations_paths = ["#{app_path}/db/migrate"]
begin
get "/foo"
assert_equal 500, last_response.status
assert_match "ActiveRecord::PendingMigrationError", last_response.body
ensure
ActiveRecord::Migrator.migrations_paths = nil
end
end
test "Rails.groups returns available groups" do
require "rails"
Rails.env = "development"
assert_equal [:default, "development"], Rails.groups
assert_equal [:default, "development", :assets], Rails.groups(assets: [:development])
assert_equal [:default, "development", :another, :assets], Rails.groups(:another, assets: %w(development))
Rails.env = "test"
assert_equal [:default, "test"], Rails.groups(assets: [:development])
ENV["RAILS_GROUPS"] = "javascripts,stylesheets"
assert_equal [:default, "test", "javascripts", "stylesheets"], Rails.groups
end
test "Rails.application is nil until app is initialized" do
require 'rails'
assert_nil Rails.application
app 'development'
assert_equal AppTemplate::Application.instance, Rails.application
end
test "Rails.application responds to all instance methods" do
app 'development'
assert_respond_to Rails.application, :routes_reloader
assert_equal Rails.application.routes_reloader, AppTemplate::Application.routes_reloader
end
test "Rails::Application responds to paths" do
app 'development'
assert_respond_to AppTemplate::Application, :paths
assert_equal ["#{app_path}/app/views"], AppTemplate::Application.paths["app/views"].expanded
end
test "the application root is set correctly" do
app 'development'
assert_equal Pathname.new(app_path), Rails.application.root
end
test "the application root can be seen from the application singleton" do
app 'development'
assert_equal Pathname.new(app_path), AppTemplate::Application.root
end
test "the application root can be set" do
copy_app
add_to_config <<-RUBY
config.root = '#{new_app}'
RUBY
use_frameworks []
app 'development'
assert_equal Pathname.new(new_app), Rails.application.root
end
test "the application root is Dir.pwd if there is no config.ru" do
File.delete("#{app_path}/config.ru")
use_frameworks []
Dir.chdir("#{app_path}") do
app 'development'
assert_equal Pathname.new("#{app_path}"), Rails.application.root
end
end
test "Rails.root should be a Pathname" do
add_to_config <<-RUBY
config.root = "#{app_path}"
RUBY
app 'development'
assert_instance_of Pathname, Rails.root
end
test "Rails.public_path should be a Pathname" do
add_to_config <<-RUBY
config.paths["public"] = "somewhere"
RUBY
app 'development'
assert_instance_of Pathname, Rails.public_path
end
test "initialize an eager loaded, cache classes app" do
add_to_config <<-RUBY
config.eager_load = true
config.cache_classes = true
RUBY
app 'development'
assert_equal :require, ActiveSupport::Dependencies.mechanism
end
test "application is always added to eager_load namespaces" do
app 'development'
assert_includes Rails.application.config.eager_load_namespaces, AppTemplate::Application
end
test "the application can be eager loaded even when there are no frameworks" do
FileUtils.rm_rf("#{app_path}/app/models/application_record.rb")
FileUtils.rm_rf("#{app_path}/app/mailers/application_mailer.rb")
FileUtils.rm_rf("#{app_path}/config/environments")
add_to_config <<-RUBY
config.eager_load = true
config.cache_classes = true
RUBY
use_frameworks []
assert_nothing_raised do
app 'development'
end
end
test "filter_parameters should be able to set via config.filter_parameters" do
add_to_config <<-RUBY
config.filter_parameters += [ :foo, 'bar', lambda { |key, value|
value = value.reverse if key =~ /baz/
}]
RUBY
assert_nothing_raised do
app 'development'
end
end
test "filter_parameters should be able to set via config.filter_parameters in an initializer" do
app_file 'config/initializers/filter_parameters_logging.rb', <<-RUBY
Rails.application.config.filter_parameters += [ :password, :foo, 'bar' ]
RUBY
app 'development'
assert_equal [:password, :foo, 'bar'], Rails.application.env_config['action_dispatch.parameter_filter']
end
test "config.to_prepare is forwarded to ActionDispatch" do
$prepared = false
add_to_config <<-RUBY
config.to_prepare do
$prepared = true
end
RUBY
assert !$prepared
app 'development'
get "/"
assert $prepared
end
def assert_utf8
assert_equal Encoding::UTF_8, Encoding.default_external
assert_equal Encoding::UTF_8, Encoding.default_internal
end
test "skipping config.encoding still results in 'utf-8' as the default" do
app 'development'
assert_utf8
end
test "config.encoding sets the default encoding" do
add_to_config <<-RUBY
config.encoding = "utf-8"
RUBY
app 'development'
assert_utf8
end
test "config.paths.public sets Rails.public_path" do
add_to_config <<-RUBY
config.paths["public"] = "somewhere"
RUBY
app 'development'
assert_equal Pathname.new(app_path).join("somewhere"), Rails.public_path
end
test "In production mode, config.public_file_server.enabled is off by default" do
restore_default_config
with_rails_env "production" do
app 'production'
assert_not app.config.public_file_server.enabled
end
end
test "In production mode, config.public_file_server.enabled is enabled when RAILS_SERVE_STATIC_FILES is set" do
restore_default_config
with_rails_env "production" do
switch_env "RAILS_SERVE_STATIC_FILES", "1" do
app 'production'
assert app.config.public_file_server.enabled
end
end
end
test "In production mode, config.public_file_server.enabled is disabled when RAILS_SERVE_STATIC_FILES is blank" do
restore_default_config
with_rails_env "production" do
switch_env "RAILS_SERVE_STATIC_FILES", " " do
app 'production'
assert_not app.config.public_file_server.enabled
end
end
end
test "config.serve_static_files is deprecated" do
make_basic_app do |application|
assert_deprecated do
application.config.serve_static_files = true
end
assert application.config.public_file_server.enabled
end
end
test "config.static_cache_control is deprecated" do
make_basic_app do |application|
assert_deprecated do
application.config.static_cache_control = "public, max-age=60"
end
assert_equal application.config.static_cache_control, "public, max-age=60"
end
end
test "Use key_generator when secret_key_base is set" do
make_basic_app do |application|
application.secrets.secret_key_base = 'b3c631c314c0bbca50c1b2843150fe33'
application.config.session_store :disabled
end
class ::OmgController < ActionController::Base
def index
cookies.signed[:some_key] = "some_value"
render text: cookies[:some_key]
end
end
get "/"
secret = app.key_generator.generate_key('signed cookie')
verifier = ActiveSupport::MessageVerifier.new(secret)
assert_equal 'some_value', verifier.verify(last_response.body)
end
test "application verifier can be used in the entire application" do
make_basic_app do |application|
application.secrets.secret_key_base = 'b3c631c314c0bbca50c1b2843150fe33'
application.config.session_store :disabled
end
message = app.message_verifier(:sensitive_value).generate("some_value")
assert_equal 'some_value', Rails.application.message_verifier(:sensitive_value).verify(message)
secret = app.key_generator.generate_key('sensitive_value')
verifier = ActiveSupport::MessageVerifier.new(secret)
assert_equal 'some_value', verifier.verify(message)
end
test "application message verifier can be used when the key_generator is ActiveSupport::LegacyKeyGenerator" do
app_file 'config/initializers/secret_token.rb', <<-RUBY
Rails.application.config.secret_token = "b3c631c314c0bbca50c1b2843150fe33"
RUBY
app_file 'config/secrets.yml', <<-YAML
development:
secret_key_base:
YAML
app 'development'
assert_equal app.env_config['action_dispatch.key_generator'], Rails.application.key_generator
assert_equal app.env_config['action_dispatch.key_generator'].class, ActiveSupport::LegacyKeyGenerator
message = app.message_verifier(:sensitive_value).generate("some_value")
assert_equal 'some_value', Rails.application.message_verifier(:sensitive_value).verify(message)
end
test "warns when secrets.secret_key_base is blank and config.secret_token is set" do
app_file 'config/initializers/secret_token.rb', <<-RUBY
Rails.application.config.secret_token = "b3c631c314c0bbca50c1b2843150fe33"
RUBY
app_file 'config/secrets.yml', <<-YAML
development:
secret_key_base:
YAML
app 'development'
assert_deprecated(/You didn't set `secret_key_base`./) do
app.env_config
end
end
test "raise when secrets.secret_key_base is not a type of string" do
app_file 'config/secrets.yml', <<-YAML
development:
secret_key_base: 123
YAML
app 'development'
assert_raise(ArgumentError) do
app.key_generator
end
end
test "prefer secrets.secret_token over config.secret_token" do
app_file 'config/initializers/secret_token.rb', <<-RUBY
Rails.application.config.secret_token = ""
RUBY
app_file 'config/secrets.yml', <<-YAML
development:
secret_token: 3b7cd727ee24e8444053437c36cc66c3
YAML
app 'development'
assert_equal '3b7cd727ee24e8444053437c36cc66c3', app.secrets.secret_token
end
test "application verifier can build different verifiers" do
make_basic_app do |application|
application.secrets.secret_key_base = 'b3c631c314c0bbca50c1b2843150fe33'
application.config.session_store :disabled
end
default_verifier = app.message_verifier(:sensitive_value)
text_verifier = app.message_verifier(:text)
message = text_verifier.generate('some_value')
assert_equal 'some_value', text_verifier.verify(message)
assert_raises ActiveSupport::MessageVerifier::InvalidSignature do
default_verifier.verify(message)
end
assert_equal default_verifier.object_id, app.message_verifier(:sensitive_value).object_id
assert_not_equal default_verifier.object_id, text_verifier.object_id
end
test "secrets.secret_key_base is used when config/secrets.yml is present" do
app_file 'config/secrets.yml', <<-YAML
development:
secret_key_base: 3b7cd727ee24e8444053437c36cc66c3
YAML
app 'development'
assert_equal '3b7cd727ee24e8444053437c36cc66c3', app.secrets.secret_key_base
end
test "secret_key_base is copied from config to secrets when not set" do
remove_file "config/secrets.yml"
app_file 'config/initializers/secret_token.rb', <<-RUBY
Rails.application.config.secret_key_base = "3b7cd727ee24e8444053437c36cc66c3"
RUBY
app 'development'
assert_equal '3b7cd727ee24e8444053437c36cc66c3', app.secrets.secret_key_base
end
test "config.secret_token over-writes a blank secrets.secret_token" do
app_file 'config/initializers/secret_token.rb', <<-RUBY
Rails.application.config.secret_token = "b3c631c314c0bbca50c1b2843150fe33"
RUBY
app_file 'config/secrets.yml', <<-YAML
development:
secret_key_base:
secret_token:
YAML
app 'development'
assert_equal 'b3c631c314c0bbca50c1b2843150fe33', app.secrets.secret_token
assert_equal 'b3c631c314c0bbca50c1b2843150fe33', app.config.secret_token
end
test "custom secrets saved in config/secrets.yml are loaded in app secrets" do
app_file 'config/secrets.yml', <<-YAML
development:
secret_key_base: 3b7cd727ee24e8444053437c36cc66c3
aws_access_key_id: myamazonaccesskeyid
aws_secret_access_key: myamazonsecretaccesskey
YAML
app 'development'
assert_equal 'myamazonaccesskeyid', app.secrets.aws_access_key_id
assert_equal 'myamazonsecretaccesskey', app.secrets.aws_secret_access_key
end
test "blank config/secrets.yml does not crash the loading process" do
app_file 'config/secrets.yml', <<-YAML
YAML
app 'development'
assert_nil app.secrets.not_defined
end
test "config.secret_key_base over-writes a blank secrets.secret_key_base" do
app_file 'config/initializers/secret_token.rb', <<-RUBY
Rails.application.config.secret_key_base = "iaminallyoursecretkeybase"
RUBY
app_file 'config/secrets.yml', <<-YAML
development:
secret_key_base:
YAML
app 'development'
assert_equal "iaminallyoursecretkeybase", app.secrets.secret_key_base
end
test "uses ActiveSupport::LegacyKeyGenerator as app.key_generator when secrets.secret_key_base is blank" do
app_file 'config/initializers/secret_token.rb', <<-RUBY
Rails.application.config.secret_token = "b3c631c314c0bbca50c1b2843150fe33"
RUBY
app_file 'config/secrets.yml', <<-YAML
development:
secret_key_base:
YAML
app 'development'
assert_equal 'b3c631c314c0bbca50c1b2843150fe33', app.config.secret_token
assert_equal nil, app.secrets.secret_key_base
assert_equal app.key_generator.class, ActiveSupport::LegacyKeyGenerator
end
test "uses ActiveSupport::LegacyKeyGenerator with config.secret_token as app.key_generator when secrets.secret_key_base is blank" do
app_file 'config/initializers/secret_token.rb', <<-RUBY
Rails.application.config.secret_token = ""
RUBY
app_file 'config/secrets.yml', <<-YAML
development:
secret_key_base:
YAML
app 'development'
assert_equal '', app.config.secret_token
assert_equal nil, app.secrets.secret_key_base
assert_raise ArgumentError, /\AA secret is required/ do
app.key_generator
end
end
test "protect from forgery is the default in a new app" do
make_basic_app
class ::OmgController < ActionController::Base
def index
render inline: "<%= csrf_meta_tags %>"
end
end
get "/"
assert last_response.body =~ /csrf\-param/
end
test "default form builder specified as a string" do
app_file 'config/initializers/form_builder.rb', <<-RUBY
class CustomFormBuilder < ActionView::Helpers::FormBuilder
def text_field(attribute, *args)
label(attribute) + super(attribute, *args)
end
end
Rails.configuration.action_view.default_form_builder = "CustomFormBuilder"
RUBY
app_file 'app/models/post.rb', <<-RUBY
class Post
include ActiveModel::Model
attr_accessor :name
end
RUBY
app_file 'app/controllers/posts_controller.rb', <<-RUBY
class PostsController < ApplicationController
def index
render inline: "<%= begin; form_for(Post.new) {|f| f.text_field(:name)}; rescue => e; e.to_s; end %>"
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
RUBY
app 'development'
get "/posts"
assert_match(/label/, last_response.body)
end
test "default method for update can be changed" do
app_file 'app/models/post.rb', <<-RUBY
class Post
include ActiveModel::Model
def to_key; [1]; end
def persisted?; true; end
end
RUBY
token = "cf50faa3fe97702ca1ae"
app_file 'app/controllers/posts_controller.rb', <<-RUBY
class PostsController < ApplicationController
def show
render inline: "<%= begin; form_for(Post.new) {}; rescue => e; e.to_s; end %>"
end
def update
render text: "update"
end
private
def form_authenticity_token(*args); token; end # stub the authenticy token
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
RUBY
app 'development'
params = { authenticity_token: token }
get "/posts/1"
assert_match(/patch/, last_response.body)
patch "/posts/1", params
assert_match(/update/, last_response.body)
patch "/posts/1", params
assert_equal 200, last_response.status
put "/posts/1", params
assert_match(/update/, last_response.body)
put "/posts/1", params
assert_equal 200, last_response.status
end
test "request forgery token param can be changed" do
make_basic_app do |application|
application.config.action_controller.request_forgery_protection_token = '_xsrf_token_here'
end
class ::OmgController < ActionController::Base
def index
render inline: "<%= csrf_meta_tags %>"
end
end
get "/"
assert_match "_xsrf_token_here", last_response.body
end
test "sets ActionDispatch.test_app" do
make_basic_app
assert_equal Rails.application, ActionDispatch.test_app
end
test "sets ActionDispatch::Response.default_charset" do
make_basic_app do |application|
application.config.action_dispatch.default_charset = "utf-16"
end
assert_equal "utf-16", ActionDispatch::Response.default_charset
end
test "registers interceptors with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.interceptors = MyMailInterceptor
RUBY
app 'development'
require "mail"
_ = ActionMailer::Base
assert_equal [::MyMailInterceptor], ::Mail.send(:class_variable_get, "@@delivery_interceptors")
end
test "registers multiple interceptors with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.interceptors = [MyMailInterceptor, "MyOtherMailInterceptor"]
RUBY
app 'development'
require "mail"
_ = ActionMailer::Base
assert_equal [::MyMailInterceptor, ::MyOtherMailInterceptor], ::Mail.send(:class_variable_get, "@@delivery_interceptors")
end
test "registers preview interceptors with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.preview_interceptors = MyPreviewMailInterceptor
RUBY
app 'development'
require "mail"
_ = ActionMailer::Base
assert_equal [ActionMailer::InlinePreviewInterceptor, ::MyPreviewMailInterceptor], ActionMailer::Base.preview_interceptors
end
test "registers multiple preview interceptors with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.preview_interceptors = [MyPreviewMailInterceptor, "MyOtherPreviewMailInterceptor"]
RUBY
app 'development'
require "mail"
_ = ActionMailer::Base
assert_equal [ActionMailer::InlinePreviewInterceptor, MyPreviewMailInterceptor, MyOtherPreviewMailInterceptor], ActionMailer::Base.preview_interceptors
end
test "default preview interceptor can be removed" do
app_file 'config/initializers/preview_interceptors.rb', <<-RUBY
ActionMailer::Base.preview_interceptors.delete(ActionMailer::InlinePreviewInterceptor)
RUBY
app 'development'
require "mail"
_ = ActionMailer::Base
assert_equal [], ActionMailer::Base.preview_interceptors
end
test "registers observers with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.observers = MyMailObserver
RUBY
app 'development'
require "mail"
_ = ActionMailer::Base
assert_equal [::MyMailObserver], ::Mail.send(:class_variable_get, "@@delivery_notification_observers")
end
test "registers multiple observers with ActionMailer" do
add_to_config <<-RUBY
config.action_mailer.observers = [MyMailObserver, "MyOtherMailObserver"]
RUBY
app 'development'
require "mail"
_ = ActionMailer::Base
assert_equal [::MyMailObserver, ::MyOtherMailObserver], ::Mail.send(:class_variable_get, "@@delivery_notification_observers")
end
test "allows setting the queue name for the ActionMailer::DeliveryJob" do
add_to_config <<-RUBY
config.action_mailer.deliver_later_queue_name = 'test_default'
RUBY
app 'development'
require "mail"
_ = ActionMailer::Base
assert_equal 'test_default', ActionMailer::Base.send(:class_variable_get, "@@deliver_later_queue_name")
end
test "valid timezone is setup correctly" do
add_to_config <<-RUBY
config.root = "#{app_path}"
config.time_zone = "Wellington"
RUBY
app 'development'
assert_equal "Wellington", Rails.application.config.time_zone
end
test "raises when an invalid timezone is defined in the config" do
add_to_config <<-RUBY
config.root = "#{app_path}"
config.time_zone = "That big hill over yonder hill"
RUBY
assert_raise(ArgumentError) do
app 'development'
end
end
test "valid beginning of week is setup correctly" do
add_to_config <<-RUBY
config.root = "#{app_path}"
config.beginning_of_week = :wednesday
RUBY
app 'development'
assert_equal :wednesday, Rails.application.config.beginning_of_week
end
test "raises when an invalid beginning of week is defined in the config" do
add_to_config <<-RUBY
config.root = "#{app_path}"
config.beginning_of_week = :invalid
RUBY
assert_raise(ArgumentError) do
app 'development'
end
end
test "config.action_view.cache_template_loading with cache_classes default" do
add_to_config "config.cache_classes = true"
app 'development'
require 'action_view/base'
assert_equal true, ActionView::Resolver.caching?
end
test "config.action_view.cache_template_loading without cache_classes default" do
add_to_config "config.cache_classes = false"
app 'development'
require 'action_view/base'
assert_equal false, ActionView::Resolver.caching?
end
test "config.action_view.cache_template_loading = false" do
add_to_config <<-RUBY
config.cache_classes = true
config.action_view.cache_template_loading = false
RUBY
app 'development'
require 'action_view/base'
assert_equal false, ActionView::Resolver.caching?
end
test "config.action_view.cache_template_loading = true" do
add_to_config <<-RUBY
config.cache_classes = false
config.action_view.cache_template_loading = true
RUBY
app 'development'
require 'action_view/base'
assert_equal true, ActionView::Resolver.caching?
end
test "config.action_view.cache_template_loading with cache_classes in an environment" do
build_app(initializers: true)
add_to_env_config "development", "config.cache_classes = false"
# These requires are to emulate an engine loading Action View before the application
require 'action_view'
require 'action_view/railtie'
require 'action_view/base'
app 'development'
assert_equal false, ActionView::Resolver.caching?
end
test "config.action_dispatch.show_exceptions is sent in env" do
make_basic_app do |application|
application.config.action_dispatch.show_exceptions = true
end
class ::OmgController < ActionController::Base
def index
render text: env["action_dispatch.show_exceptions"]
end
end
get "/"
assert_equal 'true', last_response.body
end
test "config.action_controller.wrap_parameters is set in ActionController::Base" do
app_file 'config/initializers/wrap_parameters.rb', <<-RUBY
ActionController::Base.wrap_parameters format: [:json]
RUBY
app_file 'app/models/post.rb', <<-RUBY
class Post
def self.attribute_names
%w(title)
end
end
RUBY
app_file 'app/controllers/application_controller.rb', <<-RUBY
class ApplicationController < ActionController::Base
protect_from_forgery with: :reset_session # as we are testing API here
end
RUBY
app_file 'app/controllers/posts_controller.rb', <<-RUBY
class PostsController < ApplicationController
def create
render text: params[:post].inspect
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
RUBY
app 'development'
post "/posts.json", '{ "title": "foo", "name": "bar" }', "CONTENT_TYPE" => "application/json"
assert_equal '{"title"=>"foo"}', last_response.body
end
test "config.action_controller.permit_all_parameters = true" do
app_file 'app/controllers/posts_controller.rb', <<-RUBY
class PostsController < ActionController::Base
def create
render text: params[:post].permitted? ? "permitted" : "forbidden"
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
config.action_controller.permit_all_parameters = true
RUBY
app 'development'
post "/posts", {post: {"title" =>"zomg"}}
assert_equal 'permitted', last_response.body
end
test "config.action_controller.action_on_unpermitted_parameters = :raise" do
app_file 'app/controllers/posts_controller.rb', <<-RUBY
class PostsController < ActionController::Base
def create
render text: params.require(:post).permit(:name)
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
config.action_controller.action_on_unpermitted_parameters = :raise
RUBY
app 'development'
assert_equal :raise, ActionController::Parameters.action_on_unpermitted_parameters
post "/posts", {post: {"title" =>"zomg"}}
assert_match "We're sorry, but something went wrong", last_response.body
end
test "config.action_controller.always_permitted_parameters are: controller, action by default" do
app 'development'
assert_equal %w(controller action), ActionController::Parameters.always_permitted_parameters
end
test "config.action_controller.always_permitted_parameters = ['controller', 'action', 'format']" do
add_to_config <<-RUBY
config.action_controller.always_permitted_parameters = %w( controller action format )
RUBY
app 'development'
assert_equal %w( controller action format ), ActionController::Parameters.always_permitted_parameters
end
test "config.action_controller.always_permitted_parameters = ['controller','action','format'] does not raise exeception" do
app_file 'app/controllers/posts_controller.rb', <<-RUBY
class PostsController < ActionController::Base
def create
render text: params.permit(post: [:title])
end
end
RUBY
add_to_config <<-RUBY
routes.prepend do
resources :posts
end
config.action_controller.always_permitted_parameters = %w( controller action format )
config.action_controller.action_on_unpermitted_parameters = :raise
RUBY
app 'development'
assert_equal :raise, ActionController::Parameters.action_on_unpermitted_parameters
post "/posts", {post: {"title" =>"zomg"}, format: "json"}
assert_equal 200, last_response.status
end
test "config.action_controller.action_on_unpermitted_parameters is :log by default on development" do
app 'development'
assert_equal :log, ActionController::Parameters.action_on_unpermitted_parameters
end
test "config.action_controller.action_on_unpermitted_parameters is :log by default on test" do
app 'test'
assert_equal :log, ActionController::Parameters.action_on_unpermitted_parameters
end
test "config.action_controller.action_on_unpermitted_parameters is false by default on production" do
app 'production'
assert_equal false, ActionController::Parameters.action_on_unpermitted_parameters
end
test "config.action_dispatch.ignore_accept_header" do
make_basic_app do |application|
application.config.action_dispatch.ignore_accept_header = true
end
class ::OmgController < ActionController::Base
def index
respond_to do |format|
format.html { render text: "HTML" }
format.xml { render text: "XML" }
end
end
end
get "/", {}, "HTTP_ACCEPT" => "application/xml"
assert_equal 'HTML', last_response.body
get "/", { format: :xml }, "HTTP_ACCEPT" => "application/xml"
assert_equal 'XML', last_response.body
end
test "Rails.application#env_config exists and include some existing parameters" do
make_basic_app
assert_respond_to app, :env_config
assert_equal app.env_config['action_dispatch.parameter_filter'], app.config.filter_parameters
assert_equal app.env_config['action_dispatch.show_exceptions'], app.config.action_dispatch.show_exceptions
assert_equal app.env_config['action_dispatch.logger'], Rails.logger
assert_equal app.env_config['action_dispatch.backtrace_cleaner'], Rails.backtrace_cleaner
assert_equal app.env_config['action_dispatch.key_generator'], Rails.application.key_generator
end
test "config.colorize_logging default is true" do
make_basic_app
assert app.config.colorize_logging
end
test "config.session_store with :active_record_store with activerecord-session_store gem" do
begin
make_basic_app do |application|
ActionDispatch::Session::ActiveRecordStore = Class.new(ActionDispatch::Session::CookieStore)
application.config.session_store :active_record_store
end
ensure
ActionDispatch::Session.send :remove_const, :ActiveRecordStore
end
end
test "config.session_store with :active_record_store without activerecord-session_store gem" do
assert_raise RuntimeError, /activerecord-session_store/ do
make_basic_app do |application|
application.config.session_store :active_record_store
end
end
end
test "config.log_level with custom logger" do
make_basic_app do |application|
application.config.logger = Logger.new(STDOUT)
application.config.log_level = :info
end
assert_equal Logger::INFO, Rails.logger.level
end
test "respond_to? accepts include_private" do
make_basic_app
assert_not Rails.configuration.respond_to?(:method_missing)
assert Rails.configuration.respond_to?(:method_missing, true)
end
test "config.active_record.dump_schema_after_migration is false on production" do
build_app
app 'production'
assert_not ActiveRecord::Base.dump_schema_after_migration
end
test "config.active_record.dump_schema_after_migration is true by default on development" do
app 'development'
assert ActiveRecord::Base.dump_schema_after_migration
end
test "config.annotations wrapping SourceAnnotationExtractor::Annotation class" do
make_basic_app do |application|
application.config.annotations.register_extensions("coffee") do |tag|
/#\s*(#{tag}):?\s*(.*)$/
end
end
assert_not_nil SourceAnnotationExtractor::Annotation.extensions[/\.(coffee)$/]
end
test "rake_tasks block works at instance level" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.ran_block = false
rake_tasks do
config.ran_block = true
end
end
RUBY
app 'development'
assert_not Rails.configuration.ran_block
require 'rake'
require 'rake/testtask'
require 'rdoc/task'
Rails.application.load_tasks
assert Rails.configuration.ran_block
end
test "generators block works at instance level" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.ran_block = false
generators do
config.ran_block = true
end
end
RUBY
app 'development'
assert_not Rails.configuration.ran_block
Rails.application.load_generators
assert Rails.configuration.ran_block
end
test "console block works at instance level" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.ran_block = false
console do
config.ran_block = true
end
end
RUBY
app 'development'
assert_not Rails.configuration.ran_block
Rails.application.load_console
assert Rails.configuration.ran_block
end
test "runner block works at instance level" do
app_file "config/environments/development.rb", <<-RUBY
Rails.application.configure do
config.ran_block = false
runner do
config.ran_block = true
end
end
RUBY
app 'development'
assert_not Rails.configuration.ran_block
Rails.application.load_runner
assert Rails.configuration.ran_block
end
test "loading the first existing database configuration available" do
app_file 'config/environments/development.rb', <<-RUBY
Rails.application.configure do
config.paths.add 'config/database', with: 'config/nonexistent.yml'
config.paths['config/database'] << 'config/database.yml'
end
RUBY
app 'development'
assert_kind_of Hash, Rails.application.config.database_configuration
end
test 'raises with proper error message if no database configuration found' do
FileUtils.rm("#{app_path}/config/database.yml")
app 'development'
err = assert_raises RuntimeError do
Rails.application.config.database_configuration
end
assert_match 'config/database', err.message
end
test 'config.action_mailer.show_previews defaults to true in development' do
app 'development'
assert Rails.application.config.action_mailer.show_previews
end
test 'config.action_mailer.show_previews defaults to false in production' do
app 'production'
assert_equal false, Rails.application.config.action_mailer.show_previews
end
test 'config.action_mailer.show_previews can be set in the configuration file' do
add_to_config <<-RUBY
config.action_mailer.show_previews = true
RUBY
app 'production'
assert_equal true, Rails.application.config.action_mailer.show_previews
end
test "config_for loads custom configuration from yaml files" do
app_file 'config/custom.yml', <<-RUBY
development:
key: 'custom key'
RUBY
add_to_config <<-RUBY
config.my_custom_config = config_for('custom')
RUBY
app 'development'
assert_equal 'custom key', Rails.application.config.my_custom_config['key']
end
test "config_for use the Pathname object if it is provided" do
app_file 'config/custom.yml', <<-RUBY
development:
key: 'custom key'
RUBY
add_to_config <<-RUBY
config.my_custom_config = config_for(Pathname.new(Rails.root.join("config/custom.yml")))
RUBY
app 'development'
assert_equal 'custom key', Rails.application.config.my_custom_config['key']
end
test "config_for raises an exception if the file does not exist" do
add_to_config <<-RUBY
config.my_custom_config = config_for('custom')
RUBY
exception = assert_raises(RuntimeError) do
app 'development'
end
assert_equal "Could not load configuration. No such file - #{app_path}/config/custom.yml", exception.message
end
test "config_for without the environment configured returns an empty hash" do
app_file 'config/custom.yml', <<-RUBY
test:
key: 'custom key'
RUBY
add_to_config <<-RUBY
config.my_custom_config = config_for('custom')
RUBY
app 'development'
assert_equal({}, Rails.application.config.my_custom_config)
end
test "config_for with empty file returns an empty hash" do
app_file 'config/custom.yml', <<-RUBY
RUBY
add_to_config <<-RUBY
config.my_custom_config = config_for('custom')
RUBY
app 'development'
assert_equal({}, Rails.application.config.my_custom_config)
end
test "config_for containing ERB tags should evaluate" do
app_file 'config/custom.yml', <<-RUBY
development:
key: <%= 'custom key' %>
RUBY
add_to_config <<-RUBY
config.my_custom_config = config_for('custom')
RUBY
app 'development'
assert_equal 'custom key', Rails.application.config.my_custom_config['key']
end
test "config_for with syntax error show a more descriptive exception" do
app_file 'config/custom.yml', <<-RUBY
development:
key: foo:
RUBY
add_to_config <<-RUBY
config.my_custom_config = config_for('custom')
RUBY
exception = assert_raises(RuntimeError) do
app 'development'
end
assert_match 'YAML syntax error occurred while parsing', exception.message
end
test "config_for allows overriding the environment" do
app_file 'config/custom.yml', <<-RUBY
test:
key: 'walrus'
production:
key: 'unicorn'
RUBY
add_to_config <<-RUBY
config.my_custom_config = config_for('custom', env: 'production')
RUBY
require "#{app_path}/config/environment"
assert_equal 'unicorn', Rails.application.config.my_custom_config['key']
end
test "api_only is false by default" do
app 'development'
refute Rails.application.config.api_only
end
test "api_only generator config is set when api_only is set" do
add_to_config <<-RUBY
config.api_only = true
RUBY
app 'development'
Rails.application.load_generators
assert Rails.configuration.api_only
end
test "debug_exception_response_format is :api by default if api_only is enabled" do
add_to_config <<-RUBY
config.api_only = true
RUBY
app 'development'
assert_equal :api, Rails.configuration.debug_exception_response_format
end
test "debug_exception_response_format can be override" do
add_to_config <<-RUBY
config.api_only = true
RUBY
app_file 'config/environments/development.rb', <<-RUBY
Rails.application.configure do
config.debug_exception_response_format = :default
end
RUBY
app 'development'
assert_equal :default, Rails.configuration.debug_exception_response_format
end
end
end
| 29.551256 | 157 | 0.679179 |
269ccab982a765f18ccc475e47f994998ba96895 | 1,134 | module VCAP::CloudController
class BuildUpdate
class InvalidBuild < StandardError
end
def initialize
@logger = Steno.logger('cc.action.build_update')
end
def update(build, message)
build.db.transaction do
build.lock!
MetadataUpdate.update(build, message)
if message.state == VCAP::CloudController::BuildModel::FAILED_STATE
build.fail_to_stage!('StagerError', message.error)
elsif message.state == VCAP::CloudController::BuildModel::STAGED_STATE
droplet = build.droplet
droplet.lock!
droplet.docker_receipt_image = message.lifecycle.dig(:data, :image)
droplet.process_types = message.lifecycle.dig(:data, :processTypes)
droplet.mark_as_staged
droplet.save_changes
build.mark_as_staged
build.save_changes
app = build.app
app.update(droplet: droplet)
end
end
@logger.info("Finished updating metadata on build #{build.guid}")
build
rescue Sequel::ValidationFailed => e
raise InvalidBuild.new(e.message)
end
end
end
| 29.076923 | 78 | 0.652557 |
6a439f6cc6356bc84a25a8587cf6c084e801f2cd | 885 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.
# Run `pod lib lint vlc_flutter.podspec' to validate before publishing.
#
Pod::Spec.new do |s|
s.name = 'vlc_flutter'
s.version = '0.0.1'
s.summary = 'A new flutter plugin project.'
s.description = <<-DESC
A new flutter plugin project.
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.platform = :ios, '8.0'
# Flutter.framework does not contain a i386 slice.
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'i386' }
end
| 36.875 | 105 | 0.588701 |
03d8c2d9390b4468d40c456d317f224af315116e | 4,300 | #
# Copyright 2012-2018 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "openssl" unless defined?(OpenSSL)
require "pathname" unless defined?(Pathname)
require "omnibus/logging"
module Omnibus
module Digestable
def self.included(other)
other.send(:include, Logging)
end
#
# Calculate the digest of the file at the given path. Files are read in
# binary chunks to prevent Ruby from exploding.
#
# @param [String] path
# the path of the file to digest
# @param [Symbol] type
# the type of digest to use
#
# @return [String]
# the hexdigest of the file at the path
#
def digest(path, type = :md5)
digest = digest_from_type(type)
update_with_file_contents(digest, path)
digest.hexdigest
end
#
# Calculate the digest of a directory at the given path. Each file in the
# directory is read in binary chunks to prevent excess memory usage.
# Filesystem entries of all types are included in the digest, including
# directories, links, and sockets. The contents of non-file entries are
# represented as:
#
# $type $path
#
# while the contents of regular files are represented as:
#
# file $path
#
# and then appended by the binary contents of the file/
#
# @param [String] path
# the path of the directory to digest
# @param [Symbol] type
# the type of digest to use
# @param [Hash] options
# options to pass through to the FileSyncer when scanning for files
#
# @return [String]
# the hexdigest of the directory
#
def digest_directory(path, type = :md5, options = {})
digest = digest_from_type(type)
log.info(log_key) { "Digesting #{path} with #{type}" }
FileSyncer.all_files_under(path, options).each do |filename|
# Calculate the filename relative to the given path. Since directories
# are SHAed according to their filepath, two difference directories on
# disk would have different SHAs even if they had the same content.
relative = Pathname.new(filename).relative_path_from(Pathname.new(path))
case ftype = File.ftype(filename)
when "file"
update_with_string(digest, "#{ftype} #{relative}")
update_with_file_contents(digest, filename)
else
update_with_string(digest, "#{ftype} #{relative}")
end
end
digest.hexdigest
end
private
#
# Create a new instance of the {Digest} class that corresponds to the given
# type.
#
# @param [#to_s] type
# the type of digest to use
#
# @return [~Digest]
# an instance of the digest class
#
def digest_from_type(type)
id = type.to_s.upcase
instance = OpenSSL::Digest.const_get(id).new
end
#
# Update the digest with the given contents of the file, reading in small
# chunks to reduce memory. This method will update the given +digest+
# parameter, but returns nothing.
#
# @param [Digest] digest
# the digest to update
# @param [String] filename
# the path to the file on disk to read
#
# @return [void]
#
def update_with_file_contents(digest, filename)
File.open(filename) do |io|
while (chunk = io.read(1024 * 8))
digest.update(chunk)
end
end
end
#
# Update the digest with the given string. This method will update the given
# +digest+ parameter, but returns nothing.
#
# @param [Digest] digest
# the digest to update
# @param [String] string
# the string to read
#
# @return [void]
#
def update_with_string(digest, string)
digest.update(string)
end
end
end
| 29.452055 | 80 | 0.64907 |
1da9823812402b1ae83a989e06844453391253a3 | 660 | # frozen_string_literal: true
module Kafka
module Protocol
class DescribeConfigsRequest
def initialize(resources:)
@resources = resources
end
def api_key
DESCRIBE_CONFIGS_API
end
def api_version
0
end
def response_class
Protocol::DescribeConfigsResponse
end
def encode(encoder)
encoder.write_array(@resources) do |type, name, configs|
encoder.write_int8(type)
encoder.write_string(name)
encoder.write_array(configs) do |config|
encoder.write_string(config)
end
end
end
end
end
end
| 18.333333 | 64 | 0.609091 |
213308cd1dc084686e80f849f1992895f707a125 | 2,615 | # Copyright © 2011-2020 MUSC Foundation for Research Development~
# All rights reserved.~
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:~
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.~
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following~
# disclaimer in the documentation and/or other materials provided with the distribution.~
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products~
# derived from this software without specific prior written permission.~
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,~
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT~
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL~
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS~
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR~
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.~
require 'rails_helper'
RSpec.describe CatalogManager::ClinicalProvidersController, type: :controller do
before :each do
@identity = create(:identity, catalog_overlord: true)
@organization_id = create(:provider).id
log_in_catalog_manager_identity(obj: @identity)
end
describe '#create' do
it 'should create a Clinical Provider' do
old_count = ClinicalProvider.count
post :create,
params: { clinical_provider: { identity_id: @identity.id, organization_id: @organization_id } },
xhr: true
expect(ClinicalProvider.count).to eq(old_count + 1)
end
end
describe '#destroy' do
it 'should delete an existing Clinical Provider' do
cm = create(:clinical_provider, identity_id: @identity.id, organization_id: @organization_id)
old_count = ClinicalProvider.count
delete :destroy,
params: { clinical_provider: { identity_id: @identity.id, organization_id: @organization_id } },
xhr: true
expect(ClinicalProvider.count).to eq(old_count - 1)
end
end
end
| 47.545455 | 146 | 0.759082 |
ffdeff23fde490583406e5a1f97d46ff0eef5cd2 | 4,456 | require 'spec_helper'
describe PostReceive do
let(:changes) { "123456 789012 refs/heads/tést\n654321 210987 refs/tags/tag" }
let(:wrongly_encoded_changes) { changes.encode("ISO-8859-1").force_encoding("UTF-8") }
let(:base64_changes) { Base64.encode64(wrongly_encoded_changes) }
let(:project) { create(:project) }
let(:key) { create(:key, user: project.owner) }
let(:key_id) { key.shell_id }
context "as a resque worker" do
it "reponds to #perform" do
expect(PostReceive.new).to respond_to(:perform)
end
end
describe "#process_project_changes" do
before do
allow_any_instance_of(Gitlab::GitPostReceive).to receive(:identify).and_return(project.owner)
end
context "branches" do
let(:changes) { "123456 789012 refs/heads/tést" }
it "calls GitTagPushService" do
expect_any_instance_of(GitPushService).to receive(:execute).and_return(true)
expect_any_instance_of(GitTagPushService).not_to receive(:execute)
PostReceive.new.perform(pwd(project), key_id, base64_changes)
end
end
context "tags" do
let(:changes) { "123456 789012 refs/tags/tag" }
it "calls GitTagPushService" do
expect_any_instance_of(GitPushService).not_to receive(:execute)
expect_any_instance_of(GitTagPushService).to receive(:execute).and_return(true)
PostReceive.new.perform(pwd(project), key_id, base64_changes)
end
end
context "merge-requests" do
let(:changes) { "123456 789012 refs/merge-requests/123" }
it "does not call any of the services" do
expect_any_instance_of(GitPushService).not_to receive(:execute)
expect_any_instance_of(GitTagPushService).not_to receive(:execute)
PostReceive.new.perform(pwd(project), key_id, base64_changes)
end
end
context "gitlab-ci.yml" do
subject { PostReceive.new.perform(pwd(project), key_id, base64_changes) }
context "creates a Ci::Pipeline for every change" do
before do
allow_any_instance_of(Ci::CreatePipelineService).to receive(:commit) do
OpenStruct.new(id: '123456')
end
allow_any_instance_of(Ci::CreatePipelineService).to receive(:branch?).and_return(true)
stub_ci_pipeline_to_return_yaml_file
end
it { expect{ subject }.to change{ Ci::Pipeline.count }.by(2) }
end
context "does not create a Ci::Pipeline" do
before { stub_ci_pipeline_yaml_file(nil) }
it { expect{ subject }.not_to change{ Ci::Pipeline.count } }
end
end
end
context "webhook" do
it "fetches the correct project" do
expect(Project).to receive(:find_with_namespace).with(project.path_with_namespace).and_return(project)
PostReceive.new.perform(pwd(project), key_id, base64_changes)
end
it "triggers wiki index update" do
expect(Project).to receive(:find_with_namespace).with("#{project.path_with_namespace}.wiki").and_return(nil)
expect(Project).to receive(:find_with_namespace).with(project.path_with_namespace).and_return(project)
stub_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
expect_any_instance_of(ProjectWiki).to receive(:index_blobs)
repo_path = "#{pwd(project)}.wiki"
PostReceive.new.perform(repo_path, key_id, base64_changes)
end
it "does not run if the author is not in the project" do
allow_any_instance_of(Gitlab::GitPostReceive).
to receive(:identify_using_ssh_key).
and_return(nil)
expect(project).not_to receive(:execute_hooks)
expect(PostReceive.new.perform(pwd(project), key_id, base64_changes)).to be_falsey
end
it "asks the project to trigger all hooks" do
allow(Project).to receive(:find_with_namespace).and_return(project)
expect(project).to receive(:execute_hooks).twice
expect(project).to receive(:execute_services).twice
PostReceive.new.perform(pwd(project), key_id, base64_changes)
end
it "enqueues a UpdateMergeRequestsWorker job" do
allow(Project).to receive(:find_with_namespace).and_return(project)
expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(project.id, project.owner.id, any_args)
PostReceive.new.perform(pwd(project), key_id, base64_changes)
end
end
def pwd(project)
File.join(Gitlab.config.repositories.storages.default, project.path_with_namespace)
end
end
| 36.52459 | 114 | 0.710278 |
18c4746bf2bb6eac745b7b5e32c4e3fe677ffc02 | 326 | class CreateExperiences < ActiveRecord::Migration[5.2]
def change
create_table :experiences do |t|
t.belongs_to :user, index: true
t.string :title
t.string :company_name
t.date :start_date
t.date :end_date
t.string :location
t.string :description
t.timestamps
end
end
end
| 21.733333 | 54 | 0.662577 |
b9f8699430b7480dd465c5775678aaeb3f53bbc1 | 1,029 | require 'digest/md5'
module Delayed
module Backend
module ActiveRecord
class Job < ::ActiveRecord::Base
attr_accessor :loner
attr_accessor :unique_on
attr_accessor :store_conflict_id_from
validate :check_uniqueness
def check_uniqueness
if loner || unique_on
self.loner_hash = generate_loner_hash
conflict = self.class.where(loner_hash: self.loner_hash).first
unless conflict.nil?
self.errors.add(:base, "Job already exists")
if store_conflict_id_from
self.loner_conflict = conflict.payload_object.send(store_conflict_id_from)
end
end
else
true
end
end
def generate_loner_hash
attrs = Array(unique_on || :id)
hashval = "#{name}::" + attrs.map {|attr| "#{attr}:#{payload_object.send(attr)}"}.join('::')
Digest::MD5.base64digest(hashval)
end
end
end
end
end
| 27.810811 | 103 | 0.584062 |
03af659e0944b805a603be784315a8b37793202b | 1,925 | require 'sinatra'
require 'haml'
require 'kramdown'
require_relative 'beef_config'
get '/' do
@beef = get_beef()
@subtitle = "#{@beef.number_of_links} hot links"
haml :links
end
get '/posts' do
@beef = get_beef()
haml :posts
end
get '/~*.md' do
filepath = "pages/" + params[:splat][0].to_s + ".md"
if File.exists?(filepath) == false
@beef = get_beef()
@msg = "No file exists at this address."
@subtitle = "404"
haml :'404'
else
send_file filepath, :type => :txt
end
end
get '/~:page' do
@beef = get_beef()
if File.exists?("pages/#{params[:page]}.md") == false
@msg = "There is no page at this address."
@subtitle = "404"
haml :'404'
else
@page = @beef.parse_page("#{params[:page]}.md")
@page[:html] = Kramdown::Document.new(@page[:text]).to_html
@subtitle = @page[:title]
haml :page
end
end
get '/*.md' do
filepath = "posts/" + params[:splat][0].to_s + ".md"
if File.exists?(filepath) == false
@beef = get_beef()
@msg = "No file exists at this address."
@subtitle = "404"
haml :'404'
else
send_file filepath, :type => :txt
end
end
get '/feed' do
@beef = get_beef()
content_type 'application/rss+xml'
haml :feed, :layout => false
end
get '/:post' do
@beef = get_beef()
if File.exists?("posts/#{params[:post]}.md") == false
@msg = "There is no post at this address."
@subtitle = "404"
haml :'404'
else
@post = @beef.parse_post("#{params[:post]}.md")
@post[:html] = Kramdown::Document.new(@post[:text]).to_html
@subtitle = @post[:title]
haml :post
end
end
get '/tag/:tag' do
@tag = params[:tag]
@beef = get_beef()
@note = @beef.tag_note(@tag)
@subtitle = "#{@tag} tag"
haml :tag
end
get '/category/:category' do
@category = params[:category]
@beef = get_beef()
@note = @beef.category_note(@category)
@subtitle = "#{@category} category"
haml :category
end
| 21.388889 | 63 | 0.605714 |
1c79a184736340dcddb9c4b845ef2e9568de8655 | 4,918 | module Awspec::Type
class SecurityGroup < ResourceBase
aws_resource Aws::EC2::SecurityGroup
tags_allowed
def resource_via_client
@resource_via_client ||= find_security_group(@display_name)
end
def id
@id ||= resource_via_client.group_id if resource_via_client
end
def opened?(port = nil, protocol = nil, cidr = nil)
return inbound_opened?(port, protocol, cidr) if @inbound
outbound_opened?(port, protocol, cidr)
end
def opened_only?(port = nil, protocol = nil, cidr = nil)
return inbound_opened_only?(port, protocol, cidr) if @inbound
outbound_opened_only?(port, protocol, cidr)
end
def inbound_opened?(port = nil, protocol = nil, cidr = nil)
resource_via_client.ip_permissions.find do |permission|
cidr_opened?(permission, cidr) && protocol_opened?(permission, protocol) && port_opened?(permission, port)
end
end
def inbound_opened_only?(port = nil, protocol = nil, cidr = nil)
permissions = resource_via_client.ip_permissions.select do |permission|
protocol_opened?(permission, protocol) && port_opened?(permission, port)
end
cidrs = []
permissions.each do |permission|
permission.ip_ranges.select { |ip_range| cidrs.push(ip_range.cidr_ip) }
end
cidrs == Array(cidr)
end
def outbound_opened?(port = nil, protocol = nil, cidr = nil)
resource_via_client.ip_permissions_egress.find do |permission|
cidr_opened?(permission, cidr) && protocol_opened?(permission, protocol) && port_opened?(permission, port)
end
end
def outbound_opened_only?(port = nil, protocol = nil, cidr = nil)
permissions = resource_via_client.ip_permissions_egress.select do |permission|
protocol_opened?(permission, protocol) && port_opened?(permission, port)
end
cidrs = []
permissions.each do |permission|
permission.ip_ranges.select { |ip_range| cidrs.push(ip_range.cidr_ip) }
end
cidrs == Array(cidr)
end
def inbound
@inbound = true
self
end
def outbound
@inbound = false
self
end
def ip_permissions_count
resource_via_client.ip_permissions.count
end
alias_method :inbound_permissions_count, :ip_permissions_count
def ip_permissions_egress_count
resource_via_client.ip_permissions_egress.count
end
alias_method :outbound_permissions_count, :ip_permissions_egress_count
def inbound_rule_count
resource_via_client.ip_permissions.reduce(0) do |sum, permission|
sum += permission.ip_ranges.count + permission.user_id_group_pairs.count
end
end
def outbound_rule_count
resource_via_client.ip_permissions_egress.reduce(0) do |sum, permission|
sum += permission.ip_ranges.count + permission.user_id_group_pairs.count
end
end
private
def cidr_opened?(permission, cidr)
return true unless cidr
ret = permission.prefix_list_ids.select do |prefix_list_id|
prefix_list_id.prefix_list_id == cidr
end
return true if ret.count > 0
ret = permission.ip_ranges.select do |ip_range|
# if the cidr is an IP address then do a true CIDR match
if cidr =~ /^\d+\.\d+\.\d+\.\d+/
net = IPAddress::IPv4.new(ip_range.cidr_ip)
net.include?(IPAddress::IPv4.new(cidr))
else
ip_range.cidr_ip == cidr
end
end
return true if ret.count > 0
ret = permission.user_id_group_pairs.select do |sg|
# Compare the sg group_name if the remote group is in another account.
# find_security_group call doesn't return info on a remote security group.
if !sg.user_id.nil? && (sg.user_id != resource_via_client.owner_id)
next (sg.group_name == cidr) || (sg.group_id == cidr)
end
next true if sg.group_id == cidr
sg2 = find_security_group(sg.group_id)
next false if sg2.nil?
next true if sg2.group_name == cidr
sg2.tags.find do |tag|
tag.key == 'Name' && tag.value == cidr
end
end
ret.count > 0
end
def protocol_opened?(permission, protocol)
return true unless protocol
return false if protocol == 'all' && permission.ip_protocol != '-1'
return true if permission.ip_protocol == '-1'
permission.ip_protocol == protocol
end
def port_opened?(permission, port)
return true unless port
return true unless permission.from_port
return true unless permission.to_port
port_between?(port, permission.from_port, permission.to_port)
end
def port_between?(port, from_port, to_port)
if port.is_a?(String) && port.include?('-')
f, t = port.split('-')
from_port == f.to_i && to_port == t.to_i
else
port.between?(from_port, to_port)
end
end
end
end
| 33.006711 | 114 | 0.663888 |
1d6e035005258aa6f17527248888bbf5b386bf1b | 963 | require './lib/move.rb'
class Player
attr_reader :name
def initialize(name, board)
@name = name
@board = board
end
def choose_move
move = Move.new(@board)
unless move.valid?
print "\nInvalid movement, try again!\n"
return choose_move
end
@board.save(move, self)
end
def won?
won = false
moves = @board.grid.select {|key, value| value[:player]&.name == @name }
(1..3).each do |row|
won = true if moves.select{|key, value| key =~ /#{row}/ }.size == 3
end
('a'..'c').each do |column|
won = true if moves.select{|key, value| key =~ /#{column}/ }.size == 3
end
diagonal_moves = moves.select do |key, value|
key == '1a' || key == '2b' || key == '3c'
end
inverse_diagonal = moves.select do |key, value|
key == '3a' || key == '2b' || key == '1c'
end
won = true if diagonal_moves.size == 3 || inverse_diagonal.size == 3
won
end
end | 20.934783 | 76 | 0.559709 |
38650eea0d0f46c3ee274f0a2a265853b6ec6d25 | 464 | # encoding: UTF-8
require 'spec_helper'
describe BreweryDB::Resources::Categories, :resource do
context '#all', :vcr do
let(:response) { described_class.new(config).all }
it 'fetches all of the cagtegories at once' do
response.length.should eq 12
end
end
context '#find', :vcr do
let(:response) { described_class.new(config).find(1) }
it 'fetches only the category asked for' do
response.id.should == 1
end
end
end
| 21.090909 | 58 | 0.670259 |
26861bda61f313cd247138d65e332872559f7234 | 912 | require "spec_helper"
RSpec.describe Snowglobe::RSpecProject, project: true do
describe ".create" do
it "creates a directory for the project" do
expect(project_directory.exist?).to be(true)
end
it "adds a Gemfile with RSpec in it" do
expect("Gemfile").to have_line_starting_with('gem "rspec"')
end
it "sets up the project for testing with RSpec" do
expect(project_directory.join(".rspec").exist?).to be(true)
expect(project_directory.join("spec/spec_helper.rb").exist?).to be(true)
end
it "creates a project where an RSpec test can be run" do
project.write_file("spec/foo_spec.rb", <<~TEST)
require "spec_helper"
describe 'Some test' do
it 'works' do
expect(true).to be(true)
end
end
TEST
expect(project.run_rspec_test_suite)
.to have_run_successfully
end
end
end
| 26.823529 | 78 | 0.649123 |
1876e81896e77bcc3da3611859b4134bc08be9b7 | 176 | require_relative '../test_helper'
class ArkrbTest < TestHelper
describe 'Running the execute method' do
it 'Should return nil' do
assert true
end
end
end
| 13.538462 | 42 | 0.693182 |
abc4c76261df7b17160f981641c19267f5c4b2b8 | 932 | require "rails_helper"
RSpec.describe Api::Users::Comments::Hot::WeekController, context: :as_signed_in_user do
describe ".index" do
it "returns paginated weekly comments sorted by hot score" do
user = context.user
_unrelated_comment = create(:created_last_week_comment, created_by: user)
first_comment = create(:created_this_week_comment, created_by: user, hot_score: 3)
second_comment = create(:created_this_week_comment, created_by: user, hot_score: 2)
third_comment = create(:created_this_week_comment, created_by: user, hot_score: 1)
get "/api/users/#{user.to_param}/comments/hot/week.json?after=#{first_comment.to_param}"
expect(response).to have_http_status(200)
expect(response).to match_json_schema("controllers/api/users/comments/hot/week_controller/index/200")
expect(response).to have_sorted_json_collection(second_comment, third_comment)
end
end
end
| 46.6 | 107 | 0.758584 |
f7d15165dd4093db04b5cdca9b6c5a4137428be0 | 176 | puts IO.read(File.join(File.dirname(__FILE__), 'README'))
puts "\n--\n"
puts "To continue installation, please install a shipping, payment, and fulfillment processor module"
| 29.333333 | 101 | 0.755682 |
d566c48bf7e542a2ecad051f99cb9239b48e74d5 | 1,161 | # frozen_string_literal: true
require_relative "lib/anycable/version"
Gem::Specification.new do |spec|
spec.name = "anycable"
spec.version = AnyCable::VERSION
spec.authors = ["palkan"]
spec.email = ["[email protected]"]
spec.summary = "AnyCable is a polyglot replacement for ActionCable-compatible servers"
spec.description = "AnyCable is a polyglot replacement for ActionCable-compatible servers"
spec.homepage = "http://github.com/anycable/anycable"
spec.license = "MIT"
spec.metadata = {
"bug_tracker_uri" => "http://github.com/anycable/anycable/issues",
"changelog_uri" => "https://github.com/anycable/anycable/blob/master/CHANGELOG.md",
"documentation_uri" => "https://docs.anycable.io/",
"homepage_uri" => "https://anycable.io/",
"source_code_uri" => "http://github.com/anycable/anycable",
"funding_uri" => "https://github.com/sponsors/anycable"
}
spec.executables = []
spec.files = %w[README.md MIT-LICENSE CHANGELOG.md]
spec.require_paths = ["lib"]
spec.required_ruby_version = ">= 2.7.0"
spec.add_dependency "anycable-core", AnyCable::VERSION
spec.add_dependency "grpc", "~> 1.37"
end
| 35.181818 | 92 | 0.708872 |
2171e77e76668c6c4406ebf700d8ab921925b1bb | 2,102 | require 'i18n/tasks/data/file_system'
module I18n::Tasks
module Data
DATA_DEFAULTS = {
adapter: 'I18n::Tasks::Data::FileSystem'
}
# I18n data provider
# @see I18n::Tasks::Data::FileSystem
def data
@data ||= begin
data_config = (config[:data] || {}).deep_symbolize_keys
data_config.merge!(base_locale: base_locale, locales: config[:locales])
adapter_class = data_config[:adapter].presence || data_config[:class].presence || DATA_DEFAULTS[:adapter]
adapter_class = adapter_class.to_s
adapter_class = 'I18n::Tasks::Data::FileSystem' if adapter_class == 'file_system'
data_config.except!(:adapter, :class)
ActiveSupport::Inflector.constantize(adapter_class).new data_config
end
end
def empty_forest
::I18n::Tasks::Data::Tree::Siblings.new
end
def data_forest(locales = self.locales)
locales.inject(empty_forest) do |tree, locale|
tree.merge! data[locale]
end
end
def t(key, locale = base_locale)
data.t(key, locale)
end
def tree(sel)
data[split_key(sel, 2).first][sel].try(:children)
end
def node(key, locale = base_locale)
data[locale]["#{locale}.#{key}"]
end
def build_tree(hash)
I18n::Tasks::Data::Tree::Siblings.from_nested_hash(hash)
end
def t_proc(locale = base_locale)
@t_proc ||= {}
@t_proc[locale] ||= proc { |key| t(key, locale) }
end
# whether the value for key exists in locale (defaults: base_locale)
def key_value?(key, locale = base_locale)
!t(key, locale).nil?
end
# write to store, normalizing all data
def normalize_store!(from = nil, pattern_router = false)
from = self.locales unless from
router = pattern_router ? ::I18n::Tasks::Data::Router::PatternRouter.new(data, data.config) : data.router
data.with_router(router) do
Array(from).each do |target_locale|
# store handles normalization
data[target_locale] = data[target_locale]
end
end
end
end
end
| 29.194444 | 113 | 0.636537 |
e9558075efb7dfcc342a5131ae3cdc17d3ab4534 | 3,738 | class Rubygem < ActiveRecord::Base
include Pacecar
has_many :owners, :through => :ownerships, :source => :user
has_many :ownerships, :dependent => :destroy
has_many :subscribers, :through => :subscriptions, :source => :user
has_many :subscriptions
has_many :versions, :dependent => :destroy do
def latest
# try to find a ruby platform in the latest version
find_by_position_and_platform(0, 'ruby') || first
end
end
has_one :linkset, :dependent => :destroy
validates_presence_of :name
validates_uniqueness_of :name
named_scope :with_versions, :conditions => ["versions_count > 0"]
named_scope :with_one_version, :conditions => ["versions_count = 1"]
named_scope :search, lambda { |query| {
:conditions => ["name ilike :query or versions.description ilike :query",
{:query => "%#{query}%"}],
:include => [:versions],
:order => "name asc" }
}
def validate
if name =~ /^[\d]+$/
errors.add "Name must include at least one letter."
elsif name =~ /[^\d\w_\-\.]/
errors.add "Name can only include letters, numbers, dashes, and underscores."
end
end
def self.total_count
with_versions.count
end
def self.latest(limit=5)
with_one_version.by_created_at(:desc).limited(limit)
end
def self.downloaded(limit=5)
with_versions.by_downloads(:desc).limited(limit)
end
def hosted?
!versions.count.zero?
end
def rubyforge_project
versions.find(:first, :conditions => "rubyforge_project is not null").try(:rubyforge_project)
end
def unowned?
ownerships.find_by_approved(true).blank?
end
def owned_by?(user)
ownerships.find_by_user_id(user.id).try(:approved) if user
end
def to_s
versions.latest.try(:to_title) || name
end
def to_json
{:name => name,
:downloads => downloads,
:version => versions.latest.number,
:authors => versions.latest.authors,
:info => versions.latest.info,
:rubyforge_project => rubyforge_project}.to_json
end
def to_param
name
end
def with_downloads
"#{name} (#{downloads})"
end
def pushable?
new_record? || versions_count.zero?
end
def create_ownership(user)
if unowned? && !user.try(:rubyforge_importer?)
ownerships.create(:user => user, :approved => true)
end
end
def update_versions!(version, spec)
version.update_attributes_from_gem_specification!(spec)
end
def update_dependencies!(version, spec)
version.dependencies.delete_all
spec.dependencies.each do |dependency|
version.dependencies.create_from_gem_dependency!(dependency)
end
end
def update_linkset!(spec)
self.linkset ||= Linkset.new
self.linkset.update_attributes_from_gem_specification!(spec)
self.linkset.save!
end
def update_attributes_from_gem_specification!(version, spec)
self.save!
update_versions! version, spec
update_dependencies! version, spec
update_linkset! spec
end
def reorder_versions
numbers = self.reload.versions.sort.reverse.map(&:number).uniq
Version.without_callbacks(:reorder_versions) do
self.versions.each do |version|
version.update_attribute(:position, numbers.index(version.number))
end
self.versions.update_all(:latest => false)
versions.release.platforms.each do |platform|
versions.release.find_by_platform(platform).update_attributes(:latest => true)
end
end
end
def find_or_initialize_version_from_spec(spec)
version = self.versions.find_or_initialize_by_number_and_platform(spec.version.to_s, spec.original_platform.to_s)
version.rubygem = self
version
end
end
| 26.13986 | 117 | 0.685126 |
ab81bd98b443856d73e2a58cfceac89d2c80e877 | 308 | require File.expand_path('../../../../../../../spec_helper', __FILE__)
describe "Gem::RequestSet::Lockfile::Tokenizer::Token#type" do
it "needs to be reviewed for spec completeness"
end
describe "Gem::RequestSet::Lockfile::Tokenizer::Token#type=" do
it "needs to be reviewed for spec completeness"
end
| 30.8 | 70 | 0.717532 |
795fce12ebe7a9bdba24fa69a9ef61d34d13486b | 372 | # frozen_string_literal: true
require 'rails_helper'
describe Projects::CreateSlackChannelJob do
describe '#perform' do
let(:project) { create(:project) }
it 'calls Projects::Kickoff operation' do
expect(Ops::Projects::GenerateProjectSlackChannel).to receive(:call).with(project: project)
described_class.perform_now(project.id)
end
end
end
| 24.8 | 97 | 0.739247 |
e202789e74c093e8fed586830a915c0ae022bd9c | 1,158 | class LogsController < ApplicationController
def index
#byebug
user = User.find_by(id: params[:user_id])
logs = user.logs.last(10)
render json: logs.to_json(:include => {
#:user => {:only => [:name, :id]},
:emotions => {:only => [:name, :intensity]}
}, :except => [:user_id, :updated_at])
end
def create
#byebug
log = Log.create(user_id: params[:user_id])
Emotion.create(log_id: log.id, name: "Fear", intensity: params["log"]["Fear"])
Emotion.create(log_id: log.id, name: "Anger", intensity: params["log"]["Anger"])
Emotion.create(log_id: log.id, name: "Sadness", intensity: params["log"]["Sadness"])
Emotion.create(log_id: log.id, name: "Anxiety", intensity: params["log"]["Anxiety"])
Emotion.create(log_id: log.id, name: "Happiness", intensity: params["log"]["Happiness"])
Emotion.create(log_id: log.id, name: "Peacefulness", intensity: params["log"]["Peacefulness"])
Emotion.create(log_id: log.id, name: "Gratitude", intensity: params["log"]["Gratitude"])
render json: "Success!"
end
end
| 42.888889 | 102 | 0.598446 |
91fc1f852b6b3a56c3d33e12362fe19393ddde8d | 234 | # frozen_string_literal: true
require 'active_support'
module HexletCode
# Module to build form input fields
module Inputs
extend ActiveSupport::Autoload
autoload :Input
autoload :Text
autoload :Select
end
end
| 16.714286 | 37 | 0.74359 |
28700c6e0836b0e5c3e77655e71914aa20a698e0 | 5,801 | require File.join(File.expand_path(File.dirname(__FILE__)), '../..', 'test_helper.rb')
require 'rbbt/workflow'
require 'rbbt/workflow/task'
require 'rbbt/workflow/step'
require 'rbbt/tsv'
require 'rbbt'
require 'rbbt-util'
class TestStep < Test::Unit::TestCase
def test_step
task = Task.setup do "TEST" end
task2 = Task.setup do raise "Persistence ignored" end
TmpFile.with_file do |tmp|
step = Step.new tmp, task
assert_equal "TEST", step.run
assert File.exist? tmp
step = Step.new tmp, task2
assert_equal "TEST", step.run
end
end
def test_dependency
str = "TEST"
str2 = "TEST2"
TmpFile.with_file do |tmpfile|
task1 = Task.setup :result_type => :string do
Open.write(tmpfile, str);
"done"
end
step1 = Step.new tmpfile + 'step1', task1
task2 = Task.setup :result_type => :string do
Open.read(tmpfile)
end
step2 = Step.new tmpfile + 'step2', task2, [], [step1]
step2.run
assert_equal "TEST", Open.read(tmpfile + 'step2')
task2 = Task.setup :result_type => :string do
str2
end
step2 = Step.new tmpfile + 'step2', task2, [], step1
step1.clean
step2.clean.run
assert_equal "TEST2", Open.read(tmpfile + 'step2')
end
end
def __test_dependency_log_relay
str = "TEST"
TmpFile.with_file do |tmpfile|
task1 = Task.setup :result_type => :string, :name => :task1 do
log(:starting_task1, "Starting Task1")
Open.write(tmpfile, str);
"done"
end
step1 = Step.new tmpfile + 'step1', task1
task2 = Task.setup :result_type => :string, :name => :task1 do
Open.read(tmpfile)
end
step2 = Step.new tmpfile + 'step2', task2, [], [step1]
step2.run
assert step2.messages.include? "Starting Task1"
end
end
def test_log_relay_step
str = "TEST"
TmpFile.with_file do |tmpfile|
task1 = Task.setup :result_type => :string, :name => :task1 do
log(:starting_task1, "Starting Task1")
Open.write(tmpfile, str);
"done"
end
step1 = Step.new tmpfile + 'step1', task1
task2 = Task.setup :result_type => :string, :name => :task1 do
Open.read(tmpfile)
end
step2 = Step.new tmpfile + 'step2', task2, [], [step1]
Step.log_relay_step = step2
step2.run
assert step2.messages.include? "Starting Task1"
end
end
def test_exec
TmpFile.with_file do |lock|
task = Task.setup do "TEST" end
TmpFile.with_file do |tmp|
step = Step.new tmp, task
assert_equal "TEST", step.exec
end
end
end
def __test_fork
TmpFile.with_file do |lock|
task = Task.setup do while not File.exist?(lock) do sleep 1; end; "TEST" end
TmpFile.with_file do |tmp|
step = Step.new tmp, task
job = step.fork
assert !job.done?
assert_raise RuntimeError do step.fork end
sleep 1
Open.write(lock, "open")
assert_equal "TEST", job.join.load
assert job.done?
end
end
end
def __test_abort
TmpFile.with_file do |lock|
task = Task.setup do while not File.exist?(lock) do sleep 1; end; "TEST" end
TmpFile.with_file do |tmp|
step = Step.new tmp, task
job = step.fork
assert !job.done?
step.clean.fork
job.abort
assert_equal :aborted, job.status
Open.write(lock, "open")
job.clean.fork
job.join
assert job.done?
end
end
end
def test_files
TmpFile.with_file do |lock|
task = Task.setup do
Open.write(file("test"),"TEST")
end
TmpFile.with_file do |tmp|
step = Step.new tmp, task
#job = step.fork
#while not job.done? do sleep 1 end
step.run
assert_equal "TEST", Open.read(step.file("test"))
end
end
end
def test_messages
TmpFile.with_file do |lock|
task = Task.setup do
message "WRITE"
Open.write(file("test"),"TEST")
Open.write(path,"done")
nil
end
TmpFile.with_file do |tmp|
step = Step.new tmp, task
job = step
step.run
while not job.done? do sleep 1 end
assert_equal "TEST", Open.read(job.file("test"))
assert job.messages.include? "WRITE"
end
end
end
def test_subdir
TmpFile.with_file do |lock|
task = Task.setup do
message "WRITE"
Open.write(file("test"),"TEST")
end
TmpFile.with_file do |tmp|
step = Step.new File.join(tmp, 'subdir1', 'subdir2'), task
#job = step.fork
#while not job.done? do sleep 1 end
step.run
assert_equal "TEST", Open.read(step.file("test"))
assert step.messages.include? "WRITE"
end
end
end
def test_semaphore
TmpFile.with_file do |semaphore|
begin
semaphore = "/" << semaphore.gsub('/','_')
RbbtSemaphore.create_semaphore(semaphore, 2)
task = Task.setup do
5.times do
puts "Process: #{Process.pid}"
sleep rand
end
end
jobs = []
10.times do
TmpFile.with_file do |tmp|
step = Step.new tmp, task
jobs << step.fork(semaphore)
end
end
Step.wait_for_jobs(jobs)
ensure
RbbtSemaphore.delete_semaphore(semaphore)
end
end
end
def __test_load_return_description
require 'rbbt/workflow'
Workflow.require_workflow "Study"
study = Study.setup("LICA-FR")
job = study.recurrent_mutations(:job)
iii job.load.organism
iii study.recurrent_mutations.organism
end
end
| 25.00431 | 86 | 0.585761 |
aca1d0b02be320a479eec77c9dbe05409645feda | 574 | # This is a helper type to encapsulate iControl
# enumerations in a way which easily gives us
# access to the member as well as the value,
# since the SOAP API and the Savon serializers
# seem to use the string version of the member
# name rather than the value.
#
# Additional iControl enumerations can be generated
# using nimbletest.com/live with '\t' as a column
# separator and the following substitution pattern:
#
# # $2
# $0 = EnumItem.new('$0', '$1')
#
EnumItem = Struct.new(:member, :value) do
def to_s
self.member
end
def to_i
self.value
end
end
| 23.916667 | 51 | 0.71777 |
ed9b43f5ff572aeb48119db6d511dfd311112737 | 1,106 | # We're not including this in clerk-rails/app/helpers because it is injected
# into ActionController::Base via initializes/add_application_helpers and cannot be in the autoload path
# https://stackoverflow.com/questions/29636334/a-copy-of-xxx-has-been-removed-from-the-module-tree-but-is-still-active
module ClerkRails
module Helpers
module View
end
module Controller
def authenticate_account!
if account_signed_in?
if !current_account.verified_email_address
redirect_to verify_email_address_url and return
end
else
redirect_to sign_in_url and return
end
end
end
module ViewAndController
def account_signed_in?
!current_account.nil?
end
def current_account
@clerk_current_account ||= begin
if cookies[:clerk_session] || cookies[:__session]
Clerk::SessionToken.find_account(
cookie: cookies[:clerk_session] || cookies[:__session]
)
else
nil
end
end
end
end
end
end
| 27.65 | 118 | 0.648282 |
5d7e5f89834de62a068dbfbb3c54083757731a98 | 1,812 | require 'minitest/autorun'
require 'minitest/pride'
require 'uri'
require 'json'
require 'fakeweb'
require 'sailthru'
FakeWeb.allow_net_connect = false
class Minitest::Test
include Sailthru::Helpers
def setup
FakeWeb.clean_registry
end
def fixture_file(filename)
return '' if filename == ''
File.read(fixture_file_path(filename))
end
def fixture_file_path(filename)
File.expand_path(File.dirname(__FILE__) + '/fixtures/' + filename)
end
def sailthru_api_base_url(url)
url
end
def sailthru_api_call_url(url, action)
url += '/' if !url.end_with?('/')
sailthru_api_base_url(url + action)
end
def stub_get(url, filename)
options = { :body => fixture_file(filename), :content_type => 'application/json' }
FakeWeb.register_uri(:get, URI.parse(url), options)
end
def stub_delete(url, filename)
options = { :body => fixture_file(filename), :content_type => 'application/json' }
FakeWeb.register_uri(:delete, URI.parse(url), options)
end
def stub_post(url, filename)
FakeWeb.register_uri(:post, URI.parse(url), :body => fixture_file(filename), :content_type => 'application/json')
end
def stub_exception(url, filename)
FakeWeb.register_uri(:any, URI.parse(url), :exception => StandardError)
end
def create_query_string(secret, params)
params['sig'] = get_signature_hash(params, secret)
params.map{ |key, value| "#{CGI::escape(key.to_s)}=#{CGI::escape(value.to_s)}" }.join("&")
end
def create_json_payload(api_key, secret, params)
data = {}
data['api_key'] = api_key
data['format'] = 'json'
data['json'] = params.to_json
data['sig'] = get_signature_hash(data, secret)
data.map{ |key, value| "#{CGI::escape(key.to_s)}=#{CGI::escape(value.to_s)}" }.join("&")
end
end
| 26.26087 | 117 | 0.681567 |
e226581f4a4203009f15e033a3906d4bba266d96 | 1,177 | require_dependency 'core/application_record'
require_dependency 'core/person/staff'
module Core
module Sefaz
class Allotment < ApplicationRecord
self.table_name = 'extranet.sefaz_allotments'
belongs_to :staff, required: false, class_name: ::Core::Person::Staff
belongs_to :send_status, required: false, class_name: ::Core::Sefaz::SendStatus
belongs_to :send_staff, required: false, class_name: ::Core::Person::Staff
has_many :exemptions
enum exemption_type: {itbi: 1, itcd: 2}
enum send_type: ["cancelamento parcial", "pedido de isenção","cancelamento total"]
scope :protocolo, -> (protocolo) {where(protocol_return: protocolo)}
scope :date_create, -> (date_create) {where("created_at::date = to_date(?, 'dd/MM/YYYY')", date_create)}
scope :notifiers, -> (notifiers) {where(notifiers: notifiers)}
scope :send_status, -> (send_status) {where(send_status_id: send_status)}
scope :send_type, -> (send_type) {where(send_type: send_type)}
scope :cpf, -> (cpf) {joins(:exemptions).where('sefaz_exemptions.cpf = ?', cpf.gsub('-','').gsub('.',''))}
end
end
end
| 42.035714 | 112 | 0.671198 |
1de6c74d9d4adf40f1ac344b55d1eada17951b13 | 226 | FactoryBot.define do
factory :admin_contact, class: Schools::OnBoarding::AdminContact do
email { '[email protected]' }
email_secondary { '[email protected]' }
phone { '+441234567890' }
end
end
| 28.25 | 69 | 0.716814 |
f8fcde4d0db56f3c6eff5cec46639083dbeacb20 | 3,928 | # Processes boundwith records from SCSB dump files coming from Alma and updates the
# DumpFile for submission to the S3 Bucket.
class RecapBoundwithsProcessingJob < RecapDumpFileProcessingJob
attr_reader :dump
def perform(dump)
@dump = dump
# Extract boundwith from all dump files, process, and save in temp file
process_boundwiths
return unless boundwith_records.present?
# Transfer it to S3.
return tempfile.path if RecapTransferService.transfer(file_path: tempfile.path)
raise(StandardError, "Error uploading file to S3: #{tempfile.path}")
end
private
def process_boundwiths
# Cache boundwith records from dump files
boundwith_records.each(&:cache)
# Save processed boundwith records in the dump file
write_records("boundwiths" => find_related)
end
# Extract boundwith records from dumpfiles
# @return [Array<AlmaAdapter::ScsbDumpRecord>]
def boundwith_records
@boundwith_records ||= dump.dump_files.map do |dump_file|
extract_records(dump_file.path) do |scsb_record|
# Skip record if it is not a boundwith
next unless scsb_record.boundwith?
scsb_record
end.values
end.flatten.compact
end
# Find related host and constituent records and add
# to set of boundwith records.
# @return [Array<AlmaAdapter::ScsbDumpRecord>]
def find_related
cache_miss_ids = []
# Group marc records by host record id
grouped_records = boundwith_records.group_by do |r|
r.constituent? ? r.host_id : r.id
end
# Iterate through each key in grouped_records
grouped_records.each do |host_id, records|
host_record = records.find(&:host?)
constituent_records = records.find_all(&:constituent?)
unless host_record
begin
# Get host record id from a constituent[773w] and
# retrieve from Alma or cache
host_record = constituent_records.first.host_record
# Add missing host record to group
grouped_records[host_id] << host_record
rescue AlmaAdapter::ScsbDumpRecord::CacheMiss => e
# Store mmsids of records missing from the cache
cache_miss_ids << e.message
next
end
end
# Fetch constituent record ids from host[774w] and retrieve from Alma or
# cache; skipping any constituents already in the dump file
begin
skip_ids = constituent_records.map { |r| r.marc_record["001"].value }
missing_constituents = host_record.constituent_records(skip_ids: skip_ids)
# Add missing constituent records to group
grouped_records[host_id] << missing_constituents
rescue AlmaAdapter::ScsbDumpRecord::CacheMiss => e
cache_miss_ids << e.message
end
end
# If the cache is missing records, raise an exception that lists all the ids
raise(AlmaAdapter::ScsbDumpRecord::CacheMiss, cache_error_message(cache_miss_ids)) unless cache_miss_ids.empty?
grouped_records.values.flatten.compact
end
def cache_error_message(ids)
"Records not found in the cache. Create a set of the missing " \
"records in Alma, publish using the DRDS ReCAP Records publishing " \
"profile, and load into the cache using the `cache_file` rake task." \
"Missing mmsids: #{ids.join(',')}"
end
# Generate boundwith dump file name using
# existing dump file name as a template.
# @return [String]
def boundwiths_file_name
basename = File.basename(dump.dump_files.first.path)
basename.gsub(/new_[0-9]*/, 'boundwiths')
end
def tempfile
@tempfile ||= begin
basename = File.basename(boundwiths_file_name).split(".")
extensions = "." + basename[1..-1].join(".")
Tempfile.new([basename[0], extensions])
end
end
end
| 37.056604 | 117 | 0.672607 |
ed97560a68abf1e2dd1c3a0f7dda8f0f5597508d | 281 | class Object
# An object is blank if it's false, empty, or a whitespace string. For example, '', ' ', nil, [], and {} are all blank.
def blank?
respond_to?(:empty?) ? !!empty? : !self
end
# An object is present if it's not blank.
def present?
!blank?
end
end
| 21.615385 | 121 | 0.615658 |
2625ed38336e552edd94e80537220777153a4bfb | 738 | require './config/environment'
class ApplicationController < Sinatra::Base
configure do
set :public_folder, 'public'
set :views, 'app/views'
enable :sessions
set :session_secret, "secret"
end
get '/' do
erb :welcome
end
helpers do
def current_shopper
@current_shopper ||= Shopper.find_by(id: session[:shopper_id]) if session[:shopper_id]
end
def current_client
@current_client ||= Client.find_by(id: session[:client_id]) if session[:client_id]
end
def logged_in?
!!current_shopper || !!current_client
end
def authenticate_user
redirect to '/' if !logged_in?
end
def error_parser(hash)
"#{hash[0].to_s} #{hash[1][0]}"
end
end
end
| 18.923077 | 92 | 0.649051 |
0346b4c6293223975f7ddf542d82ae9191be17d1 | 8,270 | # coding: utf-8
# frozen_string_literal: false
# Copyright Ayumu Nojima (野島 歩) and Martin J. Dürst ([email protected])
require 'test/unit'
require 'unicode_normalize/normalize'
class TestUnicodeNormalize < Test::Unit::TestCase
UNICODE_VERSION = RbConfig::CONFIG['UNICODE_VERSION']
path = File.expand_path("../enc/unicode/data/#{UNICODE_VERSION}", __dir__)
UNICODE_DATA_PATH = File.directory?("#{path}/ucd") ? "#{path}/ucd" : path
def self.expand_filename(basename)
File.expand_path("#{basename}.txt", UNICODE_DATA_PATH)
end
end
%w[NormalizationTest].all? {|f|
File.exist?(TestUnicodeNormalize.expand_filename(f))
} and
class TestUnicodeNormalize
NormTest = Struct.new :source, :NFC, :NFD, :NFKC, :NFKD, :line
def self.read_tests
lines = IO.readlines(expand_filename('NormalizationTest'), encoding: 'utf-8')
firstline = lines.shift
define_method "test_0_normalizationtest_firstline" do
assert_include(firstline, "NormalizationTest-#{UNICODE_VERSION}.txt")
end
lines
.collect.with_index { |linedata, linenumber| [linedata, linenumber]}
.reject { |line| line[0] =~ /^[\#@]/ }
.collect do |line|
NormTest.new(*(line[0].split(';').take(5).collect do |code_string|
code_string.split(/\s/).collect { |cp| cp.to_i(16) }.pack('U*')
end + [line[1]+1]))
end
end
def to_codepoints(string)
string.codepoints.collect { |cp| cp.to_s(16).upcase.rjust(4, '0') }
end
begin
@@tests ||= read_tests
rescue Errno::ENOENT => e
@@tests ||= []
end
def self.generate_test_normalize(target, normalization, source, prechecked)
define_method "test_normalize_to_#{target}_from_#{source}_with_#{normalization}" do
expected = actual = test = nil
mesg = proc {"#{to_codepoints(expected)} expected but was #{to_codepoints(actual)} on line #{test[:line]} (#{normalization})"}
@@tests.each do |t|
test = t
if prechecked.nil? or test[prechecked]==test[source]
expected = test[target]
actual = test[source].unicode_normalize(normalization)
assert_equal expected, actual, mesg
end
end
end
end
# source; NFC; NFD; NFKC; NFKD
# NFC
# :NFC == toNFC(:source) == toNFC(:NFC) == toNFC(:NFD)
generate_test_normalize :NFC, :nfc, :source, nil
generate_test_normalize :NFC, :nfc, :NFC, :source
generate_test_normalize :NFC, :nfc, :NFD, :source
# :NFKC == toNFC(:NFKC) == toNFC(:NFKD)
generate_test_normalize :NFKC, :nfc, :NFKC, nil
generate_test_normalize :NFKC, :nfc, :NFKD, :NFKC
#
# NFD
# :NFD == toNFD(:source) == toNFD(:NFC) == toNFD(:NFD)
generate_test_normalize :NFD, :nfd, :source, nil
generate_test_normalize :NFD, :nfd, :NFC, :source
generate_test_normalize :NFD, :nfd, :NFD, :source
# :NFKD == toNFD(:NFKC) == toNFD(:NFKD)
generate_test_normalize :NFKD, :nfd, :NFKC, nil
generate_test_normalize :NFKD, :nfd, :NFKD, :NFKC
#
# NFKC
# :NFKC == toNFKC(:source) == toNFKC(:NFC) == toNFKC(:NFD) == toNFKC(:NFKC) == toNFKC(:NFKD)
generate_test_normalize :NFKC, :nfkc, :source, nil
generate_test_normalize :NFKC, :nfkc, :NFC, :source
generate_test_normalize :NFKC, :nfkc, :NFD, :source
generate_test_normalize :NFKC, :nfkc, :NFKC, :NFC
generate_test_normalize :NFKC, :nfkc, :NFKD, :NFD
#
# NFKD
# :NFKD == toNFKD(:source) == toNFKD(:NFC) == toNFKD(:NFD) == toNFKD(:NFKC) == toNFKD(:NFKD)
generate_test_normalize :NFKD, :nfkd, :source, nil
generate_test_normalize :NFKD, :nfkd, :NFC, :source
generate_test_normalize :NFKD, :nfkd, :NFD, :source
generate_test_normalize :NFKD, :nfkd, :NFKC, :NFC
generate_test_normalize :NFKD, :nfkd, :NFKD, :NFD
def self.generate_test_check_true(source, normalization)
define_method "test_check_true_#{source}_as_#{normalization}" do
test = nil
mesg = proc {"#{to_codepoints(test[source])} should check as #{normalization} but does not on line #{test[:line]}"}
@@tests.each do |t|
test = t
actual = test[source].unicode_normalized?(normalization)
assert_equal true, actual, mesg
end
end
end
def self.generate_test_check_false(source, compare, normalization)
define_method "test_check_false_#{source}_as_#{normalization}" do
test = nil
mesg = proc {"#{to_codepoints(test[source])} should not check as #{normalization} but does on line #{test[:line]}"}
@@tests.each do |t|
test = t
if test[source] != test[compare]
actual = test[source].unicode_normalized?(normalization)
assert_equal false, actual, mesg
end
end
end
end
generate_test_check_true :NFC, :nfc
generate_test_check_true :NFD, :nfd
generate_test_check_true :NFKC, :nfc
generate_test_check_true :NFKC, :nfkc
generate_test_check_true :NFKD, :nfd
generate_test_check_true :NFKD, :nfkd
generate_test_check_false :source, :NFD, :nfd
generate_test_check_false :NFC, :NFD, :nfd
generate_test_check_false :NFKC, :NFKD, :nfd
generate_test_check_false :source, :NFC, :nfc
generate_test_check_false :NFD, :NFC, :nfc
generate_test_check_false :NFKD, :NFKC, :nfc
generate_test_check_false :source, :NFKD, :nfkd
generate_test_check_false :NFC, :NFKD, :nfkd
generate_test_check_false :NFD, :NFKD, :nfkd
generate_test_check_false :NFKC, :NFKD, :nfkd
generate_test_check_false :source, :NFKC, :nfkc
generate_test_check_false :NFC, :NFKC, :nfkc
generate_test_check_false :NFD, :NFKC, :nfkc
generate_test_check_false :NFKD, :NFKC, :nfkc
end
class TestUnicodeNormalize
def test_non_UTF_8
assert_equal "\u1E0A".encode('UTF-16BE'), "D\u0307".encode('UTF-16BE').unicode_normalize(:nfc)
assert_equal true, "\u1E0A".encode('UTF-16BE').unicode_normalized?(:nfc)
assert_equal false, "D\u0307".encode('UTF-16BE').unicode_normalized?(:nfc)
end
def test_singleton_with_accents
assert_equal "\u0136", "\u212A\u0327".unicode_normalize(:nfc)
end
def test_partial_jamo_compose
assert_equal "\uAC01", "\uAC00\u11A8".unicode_normalize(:nfc)
end
def test_partial_jamo_decompose
assert_equal "\u1100\u1161\u11A8", "\uAC00\u11A8".unicode_normalize(:nfd)
end
# preventive tests for (non-)bug #14934
def test_no_trailing_jamo
assert_equal "\u1100\u1176\u11a8", "\u1100\u1176\u11a8".unicode_normalize(:nfc)
assert_equal "\uae30\u11a7", "\u1100\u1175\u11a7".unicode_normalize(:nfc)
assert_equal "\uae30\u11c3", "\u1100\u1175\u11c3".unicode_normalize(:nfc)
end
def test_hangul_plus_accents
assert_equal "\uAC00\u0323\u0300", "\uAC00\u0300\u0323".unicode_normalize(:nfc)
assert_equal "\uAC00\u0323\u0300", "\u1100\u1161\u0300\u0323".unicode_normalize(:nfc)
assert_equal "\u1100\u1161\u0323\u0300", "\uAC00\u0300\u0323".unicode_normalize(:nfd)
assert_equal "\u1100\u1161\u0323\u0300", "\u1100\u1161\u0300\u0323".unicode_normalize(:nfd)
end
def test_raise_exception_for_non_unicode_encoding
assert_raise(Encoding::CompatibilityError) { "abc".force_encoding('ISO-8859-1').unicode_normalize }
assert_raise(Encoding::CompatibilityError) { "abc".force_encoding('ISO-8859-1').unicode_normalize! }
assert_raise(Encoding::CompatibilityError) { "abc".force_encoding('ISO-8859-1').unicode_normalized? }
end
def test_reiwa
assert_equal "\u4EE4\u548C", "\u32FF".unicode_normalize(:nfkc)
end
def test_us_ascii
ascii_string = 'abc'.encode('US-ASCII')
assert_equal ascii_string, ascii_string.unicode_normalize
assert_equal ascii_string, ascii_string.unicode_normalize(:nfd)
assert_equal ascii_string, ascii_string.unicode_normalize(:nfkc)
assert_equal ascii_string, ascii_string.unicode_normalize(:nfkd)
assert_equal ascii_string, ascii_string.dup.unicode_normalize!
assert_equal ascii_string, ascii_string.dup.unicode_normalize!(:nfd)
assert_equal ascii_string, ascii_string.dup.unicode_normalize!(:nfkc)
assert_equal ascii_string, ascii_string.dup.unicode_normalize!(:nfkd)
assert_equal true, ascii_string.unicode_normalized?
assert_equal true, ascii_string.unicode_normalized?(:nfd)
assert_equal true, ascii_string.unicode_normalized?(:nfkc)
assert_equal true, ascii_string.unicode_normalized?(:nfkd)
end
end
| 38.826291 | 132 | 0.708222 |
ac90e62fabf086afefed1669d05a8e8d015aebc4 | 33,342 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2018_12_01
#
# ServiceEndpointPolicyDefinitions
#
class ServiceEndpointPolicyDefinitions
include MsRestAzure
#
# Creates and initializes a new instance of the ServiceEndpointPolicyDefinitions class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [NetworkManagementClient] reference to the NetworkManagementClient
attr_reader :client
#
# Deletes the specified ServiceEndpoint policy definitions.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the Service Endpoint
# Policy.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def delete(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:nil)
response = delete_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:custom_headers).value!
nil
end
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the Service Endpoint
# Policy.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def delete_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:nil)
# Send request
promise = begin_delete_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Get the specified service endpoint policy definitions from service endpoint
# policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy name.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ServiceEndpointPolicyDefinition] operation results.
#
def get(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:nil)
response = get_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get the specified service endpoint policy definitions from service endpoint
# policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy name.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:nil)
get_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:custom_headers).value!
end
#
# Get the specified service endpoint policy definitions from service endpoint
# policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy name.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition name.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_endpoint_policy_name is nil' if service_endpoint_policy_name.nil?
fail ArgumentError, 'service_endpoint_policy_definition_name is nil' if service_endpoint_policy_definition_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceEndpointPolicyName' => service_endpoint_policy_name,'serviceEndpointPolicyDefinitionName' => service_endpoint_policy_definition_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_12_01::Models::ServiceEndpointPolicyDefinition.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Creates or updates a service endpoint policy definition in the specified
# service endpoint policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition name.
# @param service_endpoint_policy_definitions [ServiceEndpointPolicyDefinition]
# Parameters supplied to the create or update service endpoint policy
# operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ServiceEndpointPolicyDefinition] operation results.
#
def create_or_update(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, service_endpoint_policy_definitions, custom_headers:nil)
response = create_or_update_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, service_endpoint_policy_definitions, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition name.
# @param service_endpoint_policy_definitions [ServiceEndpointPolicyDefinition]
# Parameters supplied to the create or update service endpoint policy
# operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def create_or_update_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, service_endpoint_policy_definitions, custom_headers:nil)
# Send request
promise = begin_create_or_update_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, service_endpoint_policy_definitions, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::Network::Mgmt::V2018_12_01::Models::ServiceEndpointPolicyDefinition.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Gets all service endpoint policy definitions in a service end point policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<ServiceEndpointPolicyDefinition>] operation results.
#
def list_by_resource_group(resource_group_name, service_endpoint_policy_name, custom_headers:nil)
first_page = list_by_resource_group_as_lazy(resource_group_name, service_endpoint_policy_name, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Gets all service endpoint policy definitions in a service end point policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_group_with_http_info(resource_group_name, service_endpoint_policy_name, custom_headers:nil)
list_by_resource_group_async(resource_group_name, service_endpoint_policy_name, custom_headers:custom_headers).value!
end
#
# Gets all service endpoint policy definitions in a service end point policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy name.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_group_async(resource_group_name, service_endpoint_policy_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_endpoint_policy_name is nil' if service_endpoint_policy_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceEndpointPolicyName' => service_endpoint_policy_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_12_01::Models::ServiceEndpointPolicyDefinitionListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes the specified ServiceEndpoint policy definitions.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the Service Endpoint
# Policy.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_delete(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:nil)
response = begin_delete_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:custom_headers).value!
nil
end
#
# Deletes the specified ServiceEndpoint policy definitions.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the Service Endpoint
# Policy.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_delete_with_http_info(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:nil)
begin_delete_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:custom_headers).value!
end
#
# Deletes the specified ServiceEndpoint policy definitions.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the Service Endpoint
# Policy.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_delete_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_endpoint_policy_name is nil' if service_endpoint_policy_name.nil?
fail ArgumentError, 'service_endpoint_policy_definition_name is nil' if service_endpoint_policy_definition_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceEndpointPolicyName' => service_endpoint_policy_name,'serviceEndpointPolicyDefinitionName' => service_endpoint_policy_definition_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 204 || status_code == 202 || status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Creates or updates a service endpoint policy definition in the specified
# service endpoint policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition name.
# @param service_endpoint_policy_definitions [ServiceEndpointPolicyDefinition]
# Parameters supplied to the create or update service endpoint policy
# operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ServiceEndpointPolicyDefinition] operation results.
#
def begin_create_or_update(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, service_endpoint_policy_definitions, custom_headers:nil)
response = begin_create_or_update_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, service_endpoint_policy_definitions, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Creates or updates a service endpoint policy definition in the specified
# service endpoint policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition name.
# @param service_endpoint_policy_definitions [ServiceEndpointPolicyDefinition]
# Parameters supplied to the create or update service endpoint policy
# operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_create_or_update_with_http_info(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, service_endpoint_policy_definitions, custom_headers:nil)
begin_create_or_update_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, service_endpoint_policy_definitions, custom_headers:custom_headers).value!
end
#
# Creates or updates a service endpoint policy definition in the specified
# service endpoint policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy.
# @param service_endpoint_policy_definition_name [String] The name of the
# service endpoint policy definition name.
# @param service_endpoint_policy_definitions [ServiceEndpointPolicyDefinition]
# Parameters supplied to the create or update service endpoint policy
# operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_create_or_update_async(resource_group_name, service_endpoint_policy_name, service_endpoint_policy_definition_name, service_endpoint_policy_definitions, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_endpoint_policy_name is nil' if service_endpoint_policy_name.nil?
fail ArgumentError, 'service_endpoint_policy_definition_name is nil' if service_endpoint_policy_definition_name.nil?
fail ArgumentError, 'service_endpoint_policy_definitions is nil' if service_endpoint_policy_definitions.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Network::Mgmt::V2018_12_01::Models::ServiceEndpointPolicyDefinition.mapper()
request_content = @client.serialize(request_mapper, service_endpoint_policy_definitions)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceEndpointPolicyName' => service_endpoint_policy_name,'serviceEndpointPolicyDefinitionName' => service_endpoint_policy_definition_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 201
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_12_01::Models::ServiceEndpointPolicyDefinition.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
# Deserialize Response
if status_code == 201
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_12_01::Models::ServiceEndpointPolicyDefinition.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets all service endpoint policy definitions in a service end point policy.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ServiceEndpointPolicyDefinitionListResult] operation results.
#
def list_by_resource_group_next(next_page_link, custom_headers:nil)
response = list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets all service endpoint policy definitions in a service end point policy.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_group_next_with_http_info(next_page_link, custom_headers:nil)
list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Gets all service endpoint policy definitions in a service end point policy.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_group_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_12_01::Models::ServiceEndpointPolicyDefinitionListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets all service endpoint policy definitions in a service end point policy.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_endpoint_policy_name [String] The name of the service endpoint
# policy name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ServiceEndpointPolicyDefinitionListResult] which provide lazy access
# to pages of the response.
#
def list_by_resource_group_as_lazy(resource_group_name, service_endpoint_policy_name, custom_headers:nil)
response = list_by_resource_group_async(resource_group_name, service_endpoint_policy_name, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 49.838565 | 253 | 0.736578 |
1ac6e912d662aa6320dcf853f04f6dffb5a85c9c | 577 | class MakeTargetRepresentativeWellConnectedAndImportanceGlobalStats < ActiveRecord::Migration[5.0]
def change
rename_column :aichi11_targets, :representative_terrestrial, :representative_global
remove_column :aichi11_targets, :representative_marine, :float
rename_column :aichi11_targets, :well_connected_terrestrial, :well_connected_global
remove_column :aichi11_targets, :well_connected_marine, :float
rename_column :aichi11_targets, :importance_terrestrial, :importance_global
remove_column :aichi11_targets, :importance_marine, :float
end
end
| 44.384615 | 98 | 0.830156 |
e24e5ff8805fd4ab3b500246d31f00d82ab8dfc8 | 100 | class DropPurchases < ActiveRecord::Migration[4.2]
def change
drop_table :purchases
end
end
| 16.666667 | 50 | 0.75 |
4ae972522dfaf4ca0d66b5ca3553df2e9c7dacf7 | 1,941 | require 'spec_helper'
module Librato
describe Metrics do
describe "#authorize" do
context "when given two arguments" do
it "should store them on simple" do
Metrics.authenticate '[email protected]', 'api_key'
Metrics.client.email.should == '[email protected]'
Metrics.client.api_key.should == 'api_key'
end
end
end
describe "#faraday_adapter" do
it "should return current default adapter" do
Metrics.faraday_adapter.should_not be nil
end
end
describe "#faraday_adapter=" do
before(:all) { @current_adapter = Metrics.faraday_adapter }
after(:all) { Metrics.faraday_adapter = @current_adapter }
it "should allow setting of faraday adapter" do
Metrics.faraday_adapter = :excon
Metrics.faraday_adapter.should == :excon
Metrics.faraday_adapter = :patron
Metrics.faraday_adapter.should == :patron
end
end
describe "#persistence" do
it "should allow configuration of persistence method" do
Metrics.persistence = :test
Metrics.persistence.should == :test
end
end
describe "#submit" do
before(:all) do
Librato::Metrics.persistence = :test
Librato::Metrics.authenticate '[email protected]', 'foo'
end
after(:all) { Librato::Metrics.client.flush_authentication }
it "should persist metrics immediately" do
Metrics.persistence = :test
Metrics.submit(:foo => 123).should eql true
Metrics.persister.persisted.should == {:gauges => [{:name => 'foo', :value => 123}]}
end
it "should tolerate multiple metrics" do
lambda{ Librato::Metrics.submit :foo => 123, :bar => 456 }.should_not raise_error
expected = {:gauges => [{:name => 'foo', :value => 123}, {:name => 'bar', :value => 456}]}
Librato::Metrics.persister.persisted.should equal_unordered(expected)
end
end
end
end | 30.328125 | 97 | 0.64915 |
1d3ade8650a6596db6051e9264357e4345747252 | 912 | describe Finder do
let(:finder) { Finder.new }
describe "given a valid file" do
it "performs a search" do
result = finder.search('spec/test_files/langs.yml')
expect(result.count).to eql(3)
end
it "performs a search for languages with symbols in their names" do
result = finder.search('spec/test_files/langs_sym.yml')
expect(result.count).to eql(3)
end
it "fails for a non-existent language" do
expect{
finder.search('spec/test_files/nonexistent_langs.yml')
}.to raise_error(SystemExit)
end
end
describe "given an invalid file" do
it "fails for a non-existent file" do
expect{
finder.search('spec/test_files/foo.yml')
}.to raise_error(SystemExit)
end
it "fails for an invalid file" do
expect{
finder.search('spec/test_files/foo.txt')
}.to raise_error(SystemExit)
end
end
end | 26.057143 | 71 | 0.653509 |
7abd2de76b4c220fbc2bb6d02efaeca1363e0300 | 13,157 | =begin
PureCloud Platform API
With the PureCloud Platform API, you can control all aspects of your PureCloud environment. With the APIs you can access the system configuration, manage conversations and more.
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
License: UNLICENSED
https://help.mypurecloud.com/articles/terms-and-conditions/
Terms of Service: https://help.mypurecloud.com/articles/terms-and-conditions/
=end
require 'date'
module PureCloud
# Defines the phone numbers, operating hours, and the Architect flows to execute for an IVR.
class IVR
# The globally unique identifier for the object.
attr_accessor :id
# The name of the entity.
attr_accessor :name
# The resource's description.
attr_accessor :description
# The current version of the resource.
attr_accessor :version
# The date the resource was created. Date time is represented as an ISO-8601 string. For example: yyyy-MM-ddTHH:mm:ss.SSSZ
attr_accessor :date_created
# The date of the last modification to the resource. Date time is represented as an ISO-8601 string. For example: yyyy-MM-ddTHH:mm:ss.SSSZ
attr_accessor :date_modified
# The ID of the user that last modified the resource.
attr_accessor :modified_by
# The ID of the user that created the resource.
attr_accessor :created_by
# Indicates if the resource is active, inactive, or deleted.
attr_accessor :state
# The application that last modified the resource.
attr_accessor :modified_by_app
# The application that created the resource.
attr_accessor :created_by_app
# The phone number(s) to contact the IVR by. Each phone number must be unique and not in use by another resource. For example, a user and an iVR cannot have the same phone number.
attr_accessor :dnis
# The Architect flow to execute during the hours an organization is open.
attr_accessor :open_hours_flow
# The Architect flow to execute during the hours an organization is closed.
attr_accessor :closed_hours_flow
# The Architect flow to execute during an organization's holiday hours.
attr_accessor :holiday_hours_flow
# The schedule group defining the open and closed hours for an organization. If this is provided, an open flow and a closed flow must be specified as well.
attr_accessor :schedule_group
# The URI for this object
attr_accessor :self_uri
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'id' => :'id',
:'name' => :'name',
:'description' => :'description',
:'version' => :'version',
:'date_created' => :'dateCreated',
:'date_modified' => :'dateModified',
:'modified_by' => :'modifiedBy',
:'created_by' => :'createdBy',
:'state' => :'state',
:'modified_by_app' => :'modifiedByApp',
:'created_by_app' => :'createdByApp',
:'dnis' => :'dnis',
:'open_hours_flow' => :'openHoursFlow',
:'closed_hours_flow' => :'closedHoursFlow',
:'holiday_hours_flow' => :'holidayHoursFlow',
:'schedule_group' => :'scheduleGroup',
:'self_uri' => :'selfUri'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'id' => :'String',
:'name' => :'String',
:'description' => :'String',
:'version' => :'Integer',
:'date_created' => :'DateTime',
:'date_modified' => :'DateTime',
:'modified_by' => :'String',
:'created_by' => :'String',
:'state' => :'String',
:'modified_by_app' => :'String',
:'created_by_app' => :'String',
:'dnis' => :'Array<String>',
:'open_hours_flow' => :'UriReference',
:'closed_hours_flow' => :'UriReference',
:'holiday_hours_flow' => :'UriReference',
:'schedule_group' => :'UriReference',
:'self_uri' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'name')
self.name = attributes[:'name']
end
if attributes.has_key?(:'description')
self.description = attributes[:'description']
end
if attributes.has_key?(:'version')
self.version = attributes[:'version']
end
if attributes.has_key?(:'dateCreated')
self.date_created = attributes[:'dateCreated']
end
if attributes.has_key?(:'dateModified')
self.date_modified = attributes[:'dateModified']
end
if attributes.has_key?(:'modifiedBy')
self.modified_by = attributes[:'modifiedBy']
end
if attributes.has_key?(:'createdBy')
self.created_by = attributes[:'createdBy']
end
if attributes.has_key?(:'state')
self.state = attributes[:'state']
end
if attributes.has_key?(:'modifiedByApp')
self.modified_by_app = attributes[:'modifiedByApp']
end
if attributes.has_key?(:'createdByApp')
self.created_by_app = attributes[:'createdByApp']
end
if attributes.has_key?(:'dnis')
if (value = attributes[:'dnis']).is_a?(Array)
self.dnis = value
end
end
if attributes.has_key?(:'openHoursFlow')
self.open_hours_flow = attributes[:'openHoursFlow']
end
if attributes.has_key?(:'closedHoursFlow')
self.closed_hours_flow = attributes[:'closedHoursFlow']
end
if attributes.has_key?(:'holidayHoursFlow')
self.holiday_hours_flow = attributes[:'holidayHoursFlow']
end
if attributes.has_key?(:'scheduleGroup')
self.schedule_group = attributes[:'scheduleGroup']
end
if attributes.has_key?(:'selfUri')
self.self_uri = attributes[:'selfUri']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
if @name.nil?
return false
end
allowed_values = ["active", "inactive", "deleted"]
if @state && !allowed_values.include?(@state)
return false
end
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] state Object to be assigned
def state=(state)
allowed_values = ["active", "inactive", "deleted"]
if state && !allowed_values.include?(state)
fail ArgumentError, "invalid value for 'state', must be one of #{allowed_values}."
end
@state = state
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id &&
name == o.name &&
description == o.description &&
version == o.version &&
date_created == o.date_created &&
date_modified == o.date_modified &&
modified_by == o.modified_by &&
created_by == o.created_by &&
state == o.state &&
modified_by_app == o.modified_by_app &&
created_by_app == o.created_by_app &&
dnis == o.dnis &&
open_hours_flow == o.open_hours_flow &&
closed_hours_flow == o.closed_hours_flow &&
holiday_hours_flow == o.holiday_hours_flow &&
schedule_group == o.schedule_group &&
self_uri == o.self_uri
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id, name, description, version, date_created, date_modified, modified_by, created_by, state, modified_by_app, created_by_app, dnis, open_hours_flow, closed_hours_flow, holiday_hours_flow, schedule_group, self_uri].hash
end
# build the object from hash
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
else
#TODO show warning in debug mode
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
else
# data not found in attributes(hash), not an issue as the data can be optional
end
end
self
end
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /^(true|t|yes|y|1)$/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
_model = Object.const_get("PureCloud").const_get(type).new
_model.build_from_hash(value)
end
end
def to_s
to_hash.to_s
end
# to_body is an alias to to_body (backward compatibility))
def to_body
to_hash
end
# return the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Method to output non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 20.525741 | 225 | 0.529604 |
878aed61fefd84a727074bb199fa9199ab804194 | 1,604 | # See the Pagy documentation: https://ddnexus.github.io/pagy/extras/overflow
# frozen_string_literal: true
class Pagy
VARS[:overflow] = :last_page
def overflow?; @overflow end
module Overflow
def initialize(vars)
super
rescue OverflowError
@overflow = true # add the overflow flag
case @vars[:overflow]
when :exception
raise # same as without the extra
when :last_page
initial_page = @vars[:page] # save the very initial page (even after re-run)
super(vars.merge!(page: @last)) # re-run with the last page
@vars[:page] = initial_page # restore the inital page
when :empty_page
@offset = @items = @from = @to = 0 # vars relative to the actual page
@prev = @last # prev relative to the actual page
extend(Series) # special series for :empty_page
else
raise ArgumentError, "expected :overflow variable in [:last_page, :empty_page, :exception]; got #{@vars[:overflow].inspect}"
end
end
end
module Series
def series(size=@vars[:size])
@page = @last # series for last page
super(size).tap do |s| # call original series
s[s.index(@page.to_s)] = @page # string to integer (i.e. no current page)
@page = @vars[:page] # restore the actual page
end
end
end
prepend Overflow
end
| 32.734694 | 132 | 0.540524 |
4a14e78b0223be89b03b3e95b9d4ce210a5c6b8c | 3,919 | class MapnikAT2 < Formula
desc "Toolkit for developing mapping applications"
homepage "http://www.mapnik.org/"
url "https://s3.amazonaws.com/mapnik/dist/v2.2.0/mapnik-v2.2.0.tar.bz2"
sha256 "9b30de4e58adc6d5aa8478779d0a47fdabe6bf8b166b67a383b35f5aa5d6c1b0"
revision 4
bottle do
sha256 "07181d3a7481f2127ce851527918cb23eab6ee3058bab4d3ce59dc3aea63b12f" => :sierra
sha256 "2eeeb0066e3fcf317e5eada889297b659e513a5142bad69b5e44cd809d873cc1" => :el_capitan
sha256 "c96f4f7d7bbbf5bda62b501877b0bac1b285ed84074e67e09f84a75530ec6390" => :yosemite
end
keg_only :versioned_formula
# compile error in bindings/python/mapnik_text_placement.cpp
# https://github.com/mapnik/mapnik/issues/1973
patch :DATA
# boost 1.56 compatibility
# concatenated from https://github.com/mapnik/mapnik/issues/2428
patch do
url "https://gist.githubusercontent.com/tdsmith/22aeb0bfb9691de91463/raw/3064c193466a041d82e011dc5601312ccadc9e15/mapnik-boost-megadiff.diff"
sha256 "40e83052ae892aa0b134c09d8610ebd891619895bb5f3e5d937d0c48ed42d1a6"
end
depends_on "pkg-config" => :build
depends_on "freetype"
depends_on "libpng"
depends_on "libtiff"
depends_on "proj"
depends_on "icu4c"
depends_on "jpeg"
depends_on "[email protected]"
depends_on "[email protected]"
depends_on "gdal" => :optional
depends_on "postgresql" => :optional
depends_on "cairo" => :optional
depends_on "py2cairo" if build.with? "cairo"
def install
icu = Formula["icu4c"].opt_prefix
boost = Formula["boost159"].opt_prefix
proj = Formula["proj"].opt_prefix
jpeg = Formula["jpeg"].opt_prefix
libpng = Formula["libpng"].opt_prefix
libtiff = Formula["libtiff"].opt_prefix
freetype = Formula["freetype"].opt_prefix
# mapnik compiles can take ~1.5 GB per job for some .cpp files
# so lets be cautious by limiting to CPUS/2
jobs = ENV.make_jobs.to_i
jobs /= 2 if jobs > 2
args = ["CC=\"#{ENV.cc}\"",
"CXX=\"#{ENV.cxx}\"",
"JOBS=#{jobs}",
"PREFIX=#{prefix}",
"ICU_INCLUDES=#{icu}/include",
"ICU_LIBS=#{icu}/lib",
"PYTHON_PREFIX=#{prefix}", # Install to Homebrew's site-packages
"JPEG_INCLUDES=#{jpeg}/include",
"JPEG_LIBS=#{jpeg}/lib",
"PNG_INCLUDES=#{libpng}/include",
"PNG_LIBS=#{libpng}/lib",
"TIFF_INCLUDES=#{libtiff}/include",
"TIFF_LIBS=#{libtiff}/lib",
"BOOST_INCLUDES=#{boost}/include",
"BOOST_LIBS=#{boost}/lib",
"PROJ_INCLUDES=#{proj}/include",
"PROJ_LIBS=#{proj}/lib",
"FREETYPE_CONFIG=#{freetype}/bin/freetype-config"]
if build.with? "cairo"
args << "CAIRO=True" # cairo paths will come from pkg-config
else
args << "CAIRO=False"
end
args << "GDAL_CONFIG=#{Formula["gdal"].opt_bin}/gdal-config" if build.with? "gdal"
args << "PG_CONFIG=#{Formula["postgresql"].opt_bin}/pg_config" if build.with? "postgresql"
system "python", "scons/scons.py", "configure", *args
system "python", "scons/scons.py", "install"
end
test do
system bin/"mapnik-config", "-v"
end
end
__END__
diff --git a/bindings/python/mapnik_text_placement.cpp b/bindings/python/mapnik_text_placement.cpp
index 0520132..4897c28 100644
--- a/bindings/python/mapnik_text_placement.cpp
+++ b/bindings/python/mapnik_text_placement.cpp
@@ -194,7 +194,11 @@ struct ListNodeWrap: formatting::list_node, wrapper<formatting::list_node>
ListNodeWrap(object l) : formatting::list_node(), wrapper<formatting::list_node>()
{
stl_input_iterator<formatting::node_ptr> begin(l), end;
- children_.insert(children_.end(), begin, end);
+ while (begin != end)
+ {
+ children_.push_back(*begin);
+ ++begin;
+ }
}
/* TODO: Add constructor taking variable number of arguments.
| 35.954128 | 145 | 0.673386 |
5dc9940a079792a82f86eec195763014b7b150ed | 59 | # encoding: utf-8
class FormForSpecs < SpecController
end
| 11.8 | 35 | 0.779661 |
79359a95372ba660f24f3ea31f472471e2a1b296 | 8,627 | =begin
#NSX-T Data Center Policy API
#VMware NSX-T Data Center Policy REST API
OpenAPI spec version: 3.1.0.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.17
=end
require 'date'
module NSXTPolicy
# Label that will be displayed for a UI element.
class Label
# Text to be displayed at the label.
attr_accessor :text
# If true, displays the label only on hover
attr_accessor :hover
# Hyperlink of the specified UI page that provides details.
attr_accessor :navigation
# If the condition is met then the label will be applied. Examples of expression syntax are provided under example_request section of CreateWidgetConfiguration API.
attr_accessor :condition
# Icons to be applied at dashboard for the label
attr_accessor :icons
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'text' => :'text',
:'hover' => :'hover',
:'navigation' => :'navigation',
:'condition' => :'condition',
:'icons' => :'icons'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'text' => :'String',
:'hover' => :'BOOLEAN',
:'navigation' => :'String',
:'condition' => :'String',
:'icons' => :'Array<Icon>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'text')
self.text = attributes[:'text']
end
if attributes.has_key?(:'hover')
self.hover = attributes[:'hover']
else
self.hover = false
end
if attributes.has_key?(:'navigation')
self.navigation = attributes[:'navigation']
end
if attributes.has_key?(:'condition')
self.condition = attributes[:'condition']
end
if attributes.has_key?(:'icons')
if (value = attributes[:'icons']).is_a?(Array)
self.icons = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @text.nil?
invalid_properties.push('invalid value for "text", text cannot be nil.')
end
if @text.to_s.length > 255
invalid_properties.push('invalid value for "text", the character length must be smaller than or equal to 255.')
end
if [email protected]? && @navigation.to_s.length > 1024
invalid_properties.push('invalid value for "navigation", the character length must be smaller than or equal to 1024.')
end
if [email protected]? && @condition.to_s.length > 1024
invalid_properties.push('invalid value for "condition", the character length must be smaller than or equal to 1024.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @text.nil?
return false if @text.to_s.length > 255
return false if [email protected]? && @navigation.to_s.length > 1024
return false if [email protected]? && @condition.to_s.length > 1024
true
end
# Custom attribute writer method with validation
# @param [Object] text Value to be assigned
def text=(text)
if text.nil?
fail ArgumentError, 'text cannot be nil'
end
if text.to_s.length > 255
fail ArgumentError, 'invalid value for "text", the character length must be smaller than or equal to 255.'
end
@text = text
end
# Custom attribute writer method with validation
# @param [Object] navigation Value to be assigned
def navigation=(navigation)
if !navigation.nil? && navigation.to_s.length > 1024
fail ArgumentError, 'invalid value for "navigation", the character length must be smaller than or equal to 1024.'
end
@navigation = navigation
end
# Custom attribute writer method with validation
# @param [Object] condition Value to be assigned
def condition=(condition)
if !condition.nil? && condition.to_s.length > 1024
fail ArgumentError, 'invalid value for "condition", the character length must be smaller than or equal to 1024.'
end
@condition = condition
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
text == o.text &&
hover == o.hover &&
navigation == o.navigation &&
condition == o.condition &&
icons == o.icons
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[text, hover, navigation, condition, icons].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXTPolicy.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.270175 | 168 | 0.622812 |
6aad59b7b42d1cb4767a766e79be2736e96cc830 | 2,776 |
#
# Specify all country specific PayPal instruction texts / link in this module
#
# Feel free to add country-specific links to this module when ever you feel that there's
# a better page than the default page available
#
module PaypalCountryHelper
FEE_URL = {
# List all the contries that have the new fee page available
"us" => "https://www.paypal.com/us/webapps/mpp/paypal-fees",
"de" => "https://www.paypal.com/de/webapps/mpp/paypal-fees",
"br" => "https://www.paypal.com/br/webapps/mpp/paypal-fees",
"fr" => "https://www.paypal.com/fr/webapps/mpp/paypal-fees",
"au" => "https://www.paypal.com/au/webapps/mpp/paypal-seller-fees",
"no" => "https://www.paypal.com/no/webapps/mpp/paypal-fees",
"nz" => "https://www.paypal.com/nz/webapps/mpp/paypal-fees"
}
FEE_URL.default = "https://www.paypal.com/cgi-bin/marketingweb?cmd=_display-xborder-fees-outside"
POPUP_URL = {
# List all the contries that have the popup URL available
"us" => "https://www.paypal.com/us/webapps/mpp/paypal-popup",
"de" => "https://www.paypal.com/de/webapps/mpp/paypal-popup",
"fr" => "https://www.paypal.com/fr/webapps/mpp/paypal-popup",
"au" => "https://www.paypal.com/au/webapps/mpp/paypal-popup",
# List all the countries that should use the home URL, because popup is not available
# (and default English popup is not good)
"br" => "https://www.paypal.com/br/webapps/mpp/home",
"no" => "https://www.paypal.com/no/webapps/mpp/home",
"nz" => "https://www.paypal.com/nz/webapps/mpp/home"
}
POPUP_URL.default = "https://www.paypal.com/webapps/mpp/paypal-popup"
CREATE_ACCOUNT_URL = {
"au" => "https://www.paypal.com/au/webapps/mpp/account-selection",
}
CREATE_ACCOUNT_URL.default = "https://www.paypal.com/%{country_code}/webapps/mpp/home"
RECEIVE_FUNDS_INFO_LABEL_TR_KEY = {
"au" => "paypal_accounts.paypal_receive_funds_info_label_australia_only",
}
RECEIVE_FUNDS_INFO_LABEL_TR_KEY.default = "paypal_accounts.paypal_receive_funds_info_label"
RECEIVE_FUNDS_INFO_TR_KEY = {
"au" => "paypal_accounts.paypal_receive_funds_info_australia_only",
}
RECEIVE_FUNDS_INFO_TR_KEY.default = "paypal_accounts.paypal_receive_funds_info"
module_function
def fee_link(country_code)
FEE_URL[country_code.to_s.downcase]
end
def popup_link(country_code)
POPUP_URL[country_code.to_s.downcase]
end
def create_paypal_account_url(country_code)
CREATE_ACCOUNT_URL[country_code.to_s.downcase] % {country_code: country_code}
end
def receive_funds_info_label_tr_key(country_code)
RECEIVE_FUNDS_INFO_LABEL_TR_KEY[country_code.to_s.downcase]
end
def receive_funds_info_tr_key(country_code)
RECEIVE_FUNDS_INFO_TR_KEY[country_code.to_s.downcase]
end
end
| 33.445783 | 99 | 0.723703 |
33923c4b66fedca7449e33f343a514e8a9811f8a | 5,896 | # frozen_string_literal: true
require "active_support"
require "active_support/testing/autorun"
require "active_support/testing/method_call_assertions"
require "active_support/testing/stream"
require "active_record/fixtures"
require "cases/validations_repair_helper"
module ActiveRecord
# = Active Record Test Case
#
# Defines some test assertions to test against SQL queries.
class TestCase < ActiveSupport::TestCase # :nodoc:
include ActiveSupport::Testing::MethodCallAssertions
include ActiveSupport::Testing::Stream
include ActiveRecord::TestFixtures
include ActiveRecord::ValidationsRepairHelper
self.fixture_path = FIXTURES_ROOT
self.use_instantiated_fixtures = false
self.use_transactional_tests = true
def create_fixtures(*fixture_set_names, &block)
ActiveRecord::FixtureSet.create_fixtures(ActiveRecord::TestCase.fixture_path, fixture_set_names, fixture_class_names, &block)
end
def teardown
SQLCounter.clear_log
end
def capture_sql
ActiveRecord::Base.connection.materialize_transactions
SQLCounter.clear_log
yield
SQLCounter.log.dup
end
def assert_sql(*patterns_to_match, &block)
capture_sql(&block)
ensure
failed_patterns = []
patterns_to_match.each do |pattern|
failed_patterns << pattern unless SQLCounter.log_all.any? { |sql| pattern === sql }
end
assert failed_patterns.empty?, "Query pattern(s) #{failed_patterns.map(&:inspect).join(', ')} not found.#{SQLCounter.log.size == 0 ? '' : "\nQueries:\n#{SQLCounter.log.join("\n")}"}"
end
def assert_queries(num = 1, options = {})
ignore_none = options.fetch(:ignore_none) { num == :any }
ActiveRecord::Base.connection.materialize_transactions
SQLCounter.clear_log
x = yield
the_log = ignore_none ? SQLCounter.log_all : SQLCounter.log
if num == :any
assert_operator the_log.size, :>=, 1, "1 or more queries expected, but none were executed."
else
mesg = "#{the_log.size} instead of #{num} queries were executed.#{the_log.size == 0 ? '' : "\nQueries:\n#{the_log.join("\n")}"}"
assert_equal num, the_log.size, mesg
end
x
end
def assert_no_queries(options = {}, &block)
options.reverse_merge! ignore_none: true
assert_queries(0, options, &block)
end
def assert_column(model, column_name, msg = nil)
assert has_column?(model, column_name), msg
end
def assert_no_column(model, column_name, msg = nil)
assert_not has_column?(model, column_name), msg
end
def has_column?(model, column_name)
model.reset_column_information
model.column_names.include?(column_name.to_s)
end
def with_has_many_inversing(model = ActiveRecord::Base)
old = model.has_many_inversing
model.has_many_inversing = true
yield
ensure
model.has_many_inversing = old
if model != ActiveRecord::Base && !old
model.singleton_class.remove_method(:has_many_inversing) # reset the class_attribute
end
end
def with_automatic_scope_inversing(*reflections)
old = reflections.map { |reflection| reflection.klass.automatic_scope_inversing }
reflections.each do |reflection|
reflection.klass.automatic_scope_inversing = true
reflection.remove_instance_variable(:@inverse_name) if reflection.instance_variable_defined?(:@inverse_name)
reflection.remove_instance_variable(:@inverse_of) if reflection.instance_variable_defined?(:@inverse_of)
end
yield
ensure
reflections.each_with_index do |reflection, i|
reflection.klass.automatic_scope_inversing = old[i]
reflection.remove_instance_variable(:@inverse_name) if reflection.instance_variable_defined?(:@inverse_name)
reflection.remove_instance_variable(:@inverse_of) if reflection.instance_variable_defined?(:@inverse_of)
end
end
def reset_callbacks(klass, kind)
old_callbacks = {}
old_callbacks[klass] = klass.send("_#{kind}_callbacks").dup
klass.subclasses.each do |subclass|
old_callbacks[subclass] = subclass.send("_#{kind}_callbacks").dup
end
yield
ensure
klass.send("_#{kind}_callbacks=", old_callbacks[klass])
klass.subclasses.each do |subclass|
subclass.send("_#{kind}_callbacks=", old_callbacks[subclass])
end
end
def with_postgresql_datetime_type(type)
adapter = ActiveRecord::ConnectionAdapters::PostgreSQLAdapter
adapter.remove_instance_variable(:@native_database_types) if adapter.instance_variable_defined?(:@native_database_types)
datetime_type_was = adapter.datetime_type
adapter.datetime_type = type
yield
ensure
adapter = ActiveRecord::ConnectionAdapters::PostgreSQLAdapter
adapter.datetime_type = datetime_type_was
adapter.remove_instance_variable(:@native_database_types) if adapter.instance_variable_defined?(:@native_database_types)
end
end
class PostgreSQLTestCase < TestCase
def self.run(*args)
super if current_adapter?(:PostgreSQLAdapter)
end
end
class Mysql2TestCase < TestCase
def self.run(*args)
super if current_adapter?(:Mysql2Adapter)
end
end
class SQLite3TestCase < TestCase
def self.run(*args)
super if current_adapter?(:SQLite3Adapter)
end
end
class SQLCounter
class << self
attr_accessor :ignored_sql, :log, :log_all
def clear_log; self.log = []; self.log_all = []; end
end
clear_log
def call(name, start, finish, message_id, values)
return if values[:cached]
sql = values[:sql]
self.class.log_all << sql
self.class.log << sql unless ["SCHEMA", "TRANSACTION"].include? values[:name]
end
end
ActiveSupport::Notifications.subscribe("sql.active_record", SQLCounter.new)
end
| 33.5 | 188 | 0.709125 |
7ae5118cea8a0fbe3da986138de471e78e3482e5 | 1,676 | class Libtextcat < Formula
desc "N-gram-based text categorization library"
homepage "https://software.wise-guys.nl/libtextcat/"
url "https://software.wise-guys.nl/download/libtextcat-2.2.tar.gz"
mirror "https://src.fedoraproject.org/repo/pkgs/libtextcat/libtextcat-2.2.tar.gz/128cfc86ed5953e57fe0f5ae98b62c2e/libtextcat-2.2.tar.gz"
sha256 "5677badffc48a8d332e345ea4fe225e3577f53fc95deeec8306000b256829655"
license "BSD-3-Clause"
bottle do
cellar :any
rebuild 1
sha256 "9e178bd2a1479fb8d7be57c03b0bad722fbb94221d50b4b807bd6c89126492f2" => :catalina
sha256 "02d7f744996abfda8bd85b4580c5a92a8bd89ad6cc06e2848caa9b3b0e858144" => :mojave
sha256 "7997ea512b672f165e1e53e941147e9a520a9ab5d71b8b22e4a71622690e7cdb" => :high_sierra
sha256 "24fe8791549204d8ef6e596fc327fbd3a645c729b440ba31ef47cf545f6f5b30" => :sierra
sha256 "afa51f83d0a3c96ffc6f6c35011c864347f31d2c3aea987102c59f0257177072" => :el_capitan
sha256 "1a63f24b16949843f6a3f6c17d9467208a471cfa6bf1b193738fa94c2d320f02" => :yosemite
sha256 "e7880fa731747f117a943fd99bd41da25bae3e6440316d782c4798cf3f55e0b7" => :mavericks
sha256 "90b38de4353d0e026a9b6259b698e34d0091c94d8d54579bef4f07de7fb97869" => :x86_64_linux
end
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
(include/"libtextcat/").install Dir["src/*.h"]
share.install "langclass/LM", "langclass/ShortTexts", "langclass/conf.txt"
end
test do
system "#{bin}/createfp < #{prefix}/README"
end
end
| 46.555556 | 138 | 0.74821 |
21f60fc239d15bb6b9be7fd315fb1ecbaa043b27 | 1,895 | class CommentsController < ApplicationController
before_action :set_comment, only: [:show, :edit, :update, :destroy]
# GET /comments
# GET /comments.json
def index
@comments = Comment.all
end
# GET /comments/1
# GET /comments/1.json
def show
end
# GET /comments/new
def new
@comment = Comment.new
end
# GET /comments/1/edit
def edit
end
# POST /comments
# POST /comments.json
def create
@comment = Comment.new(comment_params)
respond_to do |format|
if @comment.save
format.html { redirect_to @comment, notice: 'Comment was successfully created.' }
format.json { render :show, status: :created, location: @comment }
else
format.html { render :new }
format.json { render json: @comment.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /comments/1
# PATCH/PUT /comments/1.json
def update
respond_to do |format|
if @comment.update(comment_params)
format.html { redirect_to @comment, notice: 'Comment was successfully updated.' }
format.json { render :show, status: :ok, location: @comment }
else
format.html { render :edit }
format.json { render json: @comment.errors, status: :unprocessable_entity }
end
end
end
# DELETE /comments/1
# DELETE /comments/1.json
def destroy
@comment.destroy
respond_to do |format|
format.html { redirect_to comments_url, notice: 'Comment was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_comment
@comment = Comment.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def comment_params
params.require(:comment).permit(:content)
end
end
| 25.266667 | 93 | 0.660686 |
abfcd779b5ff3377aeb29bf68da2aa7fb47bc700 | 400 | class Revision < ActiveRecord::Base
belongs_to :page
belongs_to :user
validates_presence_of :content
# Returns the revision number as a human-readable number
# between 1 and parent's total revision count
def hid
i = page.revisions.length - 1
until i == 0 || page.revisions[i] == self
i -= 1
end
i+1
end
def first?
page.revisions.first == self
end
end
| 17.391304 | 58 | 0.6625 |
39adfd6f29649d837489abb3d2da955243421972 | 2,727 | require 'spec_helper'
describe BasketballTeam do
context "upon creation" do
it "orders by city" do
cavs = BasketballTeam.create!({:name => "Cavaliers",
:city => "Cleveland"});
hawks = BasketballTeam.create!({:name => "Hawks",
:city => "Atlanta"});
expect(BasketballTeam.ordered_by_city).to eq([hawks, cavs])
end
it "can be created successfully with appropriate params" do
cavs = BasketballTeam.create!({:name => "Cavaliers",
:city => "Cleveland"});
expect(BasketballTeam.find(cavs.id)).to eq(cavs)
end
it "validates uniqueness of name" do
cavs1 = BasketballTeam.create!({:name => "Cavaliers", :city => "Cleveland"})
cavs2 = BasketballTeam.new({:name => "Cavaliers", :city => "Dallas"})
expect(cavs2.valid?).not_to eq(true)
end
end
context "without name or city" do
let(:incomplete_team) {BasketballTeam.new}
it "validates presence of name" do
expect(incomplete_team).to have(1).error_on(:name)
end
it "validates presence of city" do
expect(incomplete_team).to have_at_least(1).error_on(:city)
end
it "fails validation with no name expecting a specific message" do
expect(incomplete_team.errors_on(:name)).to include("can't be blank")
end
end
it "should have many teams" do
association = BasketballTeam.reflect_on_association(:basketball_players)
expect(association.macro).to eq(:has_many)
end
context "with players" do
subject(:team) {BasketballTeam.create!({:name => "Cavaliers", :city => "Cleveland"})}
let!(:kyrie) {BasketballPlayer.create!({name: "Kyrie Irving", team_id: team.id})}
let!(:anderson) {BasketballPlayer.create!({name: "Anderson Varejao", team_id: team.id})}
it "sets up a has_many relationship correctly" do
expect(team.basketball_players).to eq([kyrie, anderson])
end
end
context "scopes" do
context "#playoffs" do
it "has the correct values hash" do
expect(BasketballTeam.playoff_teams.where_values_hash).to eq({"playoffs" => true})
end
context "returns correct data" do
before(:all) do
@cavs = BasketballTeam.create!({:name => "Cavaliers", :city => "Cleveland", :playoffs => true})
@suns = BasketballTeam.create!({:name => "Suns", :city => "Phoenix", :playoffs => false})
end
it "returns good teams" do
expect(BasketballTeam.playoff_teams).to include(@cavs)
end
it "does not return terrible teams" do
expect(BasketballTeam.playoff_teams).not_to include(@suns)
end
end
end
end
end
| 32.464286 | 105 | 0.628896 |
086cf4c21504c8ef40fdc6f77762afa0ca54fbd3 | 43 | class Ag::Resident < ApplicationRecord
end
| 14.333333 | 38 | 0.813953 |
1a1c0f9e9d3ac0e9a055d526adccd0320129bd1e | 501 | # Be sure to restart your server when you modify this file.
# Version of your assets, change this if you want to expire all your assets.
Rails.application.config.assets.version = '1.0'
# Add additional assets to the asset load path.
# Rails.application.config.assets.paths << Emoji.images_path
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in the app/assets
# folder are already added.
# Rails.application.config.assets.precompile += %w( admin.js admin.css ) | 41.75 | 76 | 0.766467 |
79456b64bab661b339630ae24606af4d30fbbd7f | 392 | # frozen_string_literal: true
require_relative '../../test_helper'
class TestFakerTvShowsRickAndMorty < Test::Unit::TestCase
def setup
@tester = Faker::TvShows::RickAndMorty
end
def test_character
assert @tester.character.match(/\w+/)
end
def test_location
assert @tester.location.match(/\w+/)
end
def test_quote
assert @tester.quote.match(/\w+/)
end
end
| 17.818182 | 57 | 0.704082 |
2611bad014737cb28376d8976d370187b252f770 | 3,204 | module Hardware
class CPU
INTEL_32BIT_ARCHS = [:i386].freeze
INTEL_64BIT_ARCHS = [:x86_64].freeze
PPC_32BIT_ARCHS = [:ppc, :ppc32, :ppc7400, :ppc7450, :ppc970].freeze
PPC_64BIT_ARCHS = [:ppc64].freeze
class << self
OPTIMIZATION_FLAGS = {
native: "-march=native",
nehalem: "-march=nehalem",
core2: "-march=core2",
core: "-march=prescott",
armv6: "-march=armv6",
armv8: "-march=armv8-a",
}.freeze
def optimization_flags
OPTIMIZATION_FLAGS
end
def arch_32_bit
if arm?
:arm
elsif intel?
:i386
elsif ppc?
:ppc32
else
:dunno
end
end
def arch_64_bit
if arm?
:arm64
elsif intel?
:x86_64
elsif ppc?
:ppc64
else
:dunno
end
end
def arch
case bits
when 32
arch_32_bit
when 64
arch_64_bit
else
:dunno
end
end
def universal_archs
[arch].extend ArchitectureListExtension
end
def type
case RUBY_PLATFORM
when /x86_64/, /i\d86/ then :intel
when /arm/ then :arm
when /ppc\d+/ then :ppc
else :dunno
end
end
def family
:dunno
end
def cores
return @cores if @cores
@cores = Utils.popen_read("getconf", "_NPROCESSORS_ONLN").chomp.to_i
@cores = 1 unless $CHILD_STATUS.success?
@cores
end
def bits
@bits ||= case RUBY_PLATFORM
when /x86_64/, /ppc64/, /aarch64|arm64/ then 64
when /i\d86/, /ppc/, /arm/ then 32
end
end
def sse4?
RUBY_PLATFORM.to_s.include?("x86_64")
end
def is_32_bit?
bits == 32
end
def is_64_bit?
bits == 64
end
def intel?
type == :intel
end
def ppc?
type == :ppc
end
def arm?
type == :arm
end
def features
[]
end
def feature?(name)
features.include?(name)
end
def can_run?(arch)
if is_32_bit?
arch_32_bit == arch
elsif intel?
(INTEL_32BIT_ARCHS + INTEL_64BIT_ARCHS).include?(arch)
elsif ppc?
(PPC_32BIT_ARCHS + PPC_64BIT_ARCHS).include?(arch)
else
false
end
end
end
end
class << self
def cores_as_words
case Hardware::CPU.cores
when 1 then "single"
when 2 then "dual"
when 4 then "quad"
when 6 then "hexa"
when 8 then "octa"
when 12 then "dodeca"
else
Hardware::CPU.cores
end
end
def oldest_cpu
if Hardware::CPU.intel?
if Hardware::CPU.is_64_bit?
:core2
else
:core
end
elsif Hardware::CPU.arm?
if Hardware::CPU.is_64_bit?
:armv8
else
:armv6
end
else
Hardware::CPU.family
end
end
alias generic_oldest_cpu oldest_cpu
end
end
require "extend/os/hardware"
| 18.736842 | 76 | 0.503121 |
7ab1600ef86f741c1d18eba778c5132ec8fde14b | 473 | Rails.application.routes.draw do
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
get '/welcome' => 'welcome#index'
resources :games do
resources :levels do
post :regenerate
post :north
post :south
post :west
post :east
end
end
root to: 'welcome#index'
get "/auth/google_login/callback" => "sessions#create"
get "/signout" => "sessions#destroy", :as => :signout
end
| 22.52381 | 101 | 0.659619 |
d58b070b55907e49167eef7123063d396cc32737 | 27,697 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2018_11_01
#
# ExpressRouteGateways
#
class ExpressRouteGateways
include MsRestAzure
#
# Creates and initializes a new instance of the ExpressRouteGateways class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [NetworkManagementClient] reference to the NetworkManagementClient
attr_reader :client
#
# Lists ExpressRoute gateways under a given subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExpressRouteGatewayList] operation results.
#
def list_by_subscription(custom_headers:nil)
response = list_by_subscription_async(custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists ExpressRoute gateways under a given subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_subscription_with_http_info(custom_headers:nil)
list_by_subscription_async(custom_headers:custom_headers).value!
end
#
# Lists ExpressRoute gateways under a given subscription.
#
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_subscription_async(custom_headers:nil)
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteGateways'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::ExpressRouteGatewayList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists ExpressRoute gateways in a given resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExpressRouteGatewayList] operation results.
#
def list_by_resource_group(resource_group_name, custom_headers:nil)
response = list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists ExpressRoute gateways in a given resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_group_with_http_info(resource_group_name, custom_headers:nil)
list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value!
end
#
# Lists ExpressRoute gateways in a given resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_group_async(resource_group_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::ExpressRouteGatewayList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Creates or updates a ExpressRoute gateway in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param put_express_route_gateway_parameters [ExpressRouteGateway] Parameters
# required in an ExpressRoute gateway PUT operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExpressRouteGateway] operation results.
#
def create_or_update(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:nil)
response = create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param put_express_route_gateway_parameters [ExpressRouteGateway] Parameters
# required in an ExpressRoute gateway PUT operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:nil)
# Send request
promise = begin_create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::ExpressRouteGateway.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Fetches the details of a ExpressRoute gateway in a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExpressRouteGateway] operation results.
#
def get(resource_group_name, express_route_gateway_name, custom_headers:nil)
response = get_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Fetches the details of a ExpressRoute gateway in a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, express_route_gateway_name, custom_headers:nil)
get_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers).value!
end
#
# Fetches the details of a ExpressRoute gateway in a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, express_route_gateway_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'express_route_gateway_name is nil' if express_route_gateway_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'expressRouteGatewayName' => express_route_gateway_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::ExpressRouteGateway.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes the specified ExpressRoute gateway in a resource group. An
# ExpressRoute gateway resource can only be deleted when there are no
# connection subresources.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def delete(resource_group_name, express_route_gateway_name, custom_headers:nil)
response = delete_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers).value!
nil
end
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def delete_async(resource_group_name, express_route_gateway_name, custom_headers:nil)
# Send request
promise = begin_delete_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Creates or updates a ExpressRoute gateway in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param put_express_route_gateway_parameters [ExpressRouteGateway] Parameters
# required in an ExpressRoute gateway PUT operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExpressRouteGateway] operation results.
#
def begin_create_or_update(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:nil)
response = begin_create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Creates or updates a ExpressRoute gateway in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param put_express_route_gateway_parameters [ExpressRouteGateway] Parameters
# required in an ExpressRoute gateway PUT operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_create_or_update_with_http_info(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:nil)
begin_create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:custom_headers).value!
end
#
# Creates or updates a ExpressRoute gateway in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param put_express_route_gateway_parameters [ExpressRouteGateway] Parameters
# required in an ExpressRoute gateway PUT operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'express_route_gateway_name is nil' if express_route_gateway_name.nil?
fail ArgumentError, 'put_express_route_gateway_parameters is nil' if put_express_route_gateway_parameters.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Network::Mgmt::V2018_11_01::Models::ExpressRouteGateway.mapper()
request_content = @client.serialize(request_mapper, put_express_route_gateway_parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'expressRouteGatewayName' => express_route_gateway_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 201
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::ExpressRouteGateway.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
# Deserialize Response
if status_code == 201
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::ExpressRouteGateway.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes the specified ExpressRoute gateway in a resource group. An
# ExpressRoute gateway resource can only be deleted when there are no
# connection subresources.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_delete(resource_group_name, express_route_gateway_name, custom_headers:nil)
response = begin_delete_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers).value!
nil
end
#
# Deletes the specified ExpressRoute gateway in a resource group. An
# ExpressRoute gateway resource can only be deleted when there are no
# connection subresources.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_delete_with_http_info(resource_group_name, express_route_gateway_name, custom_headers:nil)
begin_delete_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers).value!
end
#
# Deletes the specified ExpressRoute gateway in a resource group. An
# ExpressRoute gateway resource can only be deleted when there are no
# connection subresources.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_delete_async(resource_group_name, express_route_gateway_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'express_route_gateway_name is nil' if express_route_gateway_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'expressRouteGatewayName' => express_route_gateway_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 202 || status_code == 200 || status_code == 204
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
end
end
| 46.008306 | 170 | 0.716937 |
1d9a47ad20bea8ec6bd86eeeea28af28b20ec54a | 841 | class RemoveIndexesNcbiNodes < ActiveRecord::Migration[5.2]
def up
remove_column :ncbi_nodes, :kingdom_r, :string
remove_column :ncbi_nodes, :phylum_r, :string
remove_column :ncbi_nodes, :class_r, :string
remove_column :ncbi_nodes, :order_r, :string
remove_index :ncbi_nodes, name: :ncbi_nodes_expr_idx
remove_index :ncbi_nodes, name: :ncbi_nodes_expr_idx1
remove_index :ncbi_nodes, name: :ncbi_nodes_expr_idx2
add_index :ncbi_nodes, :hierarchy_names, using: :gin
add_index :ncbi_nodes, :ids, using: :gin
end
def down
add_column :ncbi_nodes, :kingdom_r, :string
add_column :ncbi_nodes, :phylum_r, :string
add_column :ncbi_nodes, :class_r, :string
add_column :ncbi_nodes, :order_r, :string
remove_index :ncbi_nodes, :hierarchy_names
remove_index :ncbi_nodes, :ids
end
end
| 36.565217 | 59 | 0.741974 |
1831c2de153b8b2c849a88ac46aa3782863c2045 | 1,155 | class Libcue < Formula
desc "Cue sheet parser library for C"
homepage "https://github.com/lipnitsk/libcue"
url "https://github.com/lipnitsk/libcue/archive/v2.2.1.tar.gz"
sha256 "f27bc3ebb2e892cd9d32a7bee6d84576a60f955f29f748b9b487b173712f1200"
bottle do
cellar :any
sha256 "209e548399503830e0f786c6faef21836aa350d67db644b9ad291703ebe2e9c5" => :high_sierra
sha256 "14a6edb39d2887ad6beeb34dad944501d01f70480a529cb7e50d838833404f4f" => :sierra
sha256 "27f8ab5419958ea5817e5e44b68f24ea2a0c27d12a664556b12f6789866d0da5" => :el_capitan
sha256 "c06e978e72a9220195abe2ee63b3b5caca3b7d23fa90565b6277b0c24dc41775" => :x86_64_linux
end
depends_on "cmake" => :build
depends_on "bison" => :build unless OS.mac?
depends_on "flex" => :build unless OS.mac?
def install
system "cmake", ".", "-DBUILD_SHARED_LIBS=ON", *std_cmake_args
system "make", "install"
(pkgshare/"tests").install Dir["t/*"]
end
test do
cp_r (pkgshare/"tests").children, testpath
Dir["*.c"].each do |f|
system ENV.cc, f, "-o", "test", "-L#{lib}", "-lcue", "-I#{include}"
system "./test"
rm "test"
end
end
end
| 33.970588 | 94 | 0.716017 |
39a1d4dde86c1402093fd99b9fa3fd1766934226 | 4,341 | # frozen_string_literal: true
module Reality::Describers::Wikidata::Impl
module Modules
# Returns pages having coordinates that are located in a certain area.
#
# The "submodule" (MediaWiki API term) is included in action after setting some param, providing
# additional tweaking for this param. Example (for {Reality::Describers::Wikidata::Impl::Actions::Query} and
# its submodules):
#
# ```ruby
# api.query # returns Actions::Query
# .prop(:revisions) # adds prop=revisions to action URL, and includes Modules::Revisions into action
# .limit(10) # method of Modules::Revisions, adds rvlimit=10 to URL
# ```
#
# All submodule's parameters are documented as its public methods, see below.
#
module Geosearch
# Coordinate around which to search.
#
# @param value [String]
# @return [self]
def coord(value)
merge(gscoord: value.to_s)
end
# Title of page around which to search.
#
# @param value [String]
# @return [self]
def page(value)
merge(gspage: value.to_s)
end
# Bounding box to search in: pipe (|) separated coordinates of top left and bottom right corners.
#
# @param value [String]
# @return [self]
def bbox(value)
merge(gsbbox: value.to_s)
end
# Search radius in meters.
#
# @param value [Integer]
# @return [self]
def radius(value)
merge(gsradius: value.to_s)
end
# Restrict search to objects no larger than this, in meters.
#
# @param value [Integer]
# @return [self]
def maxdim(value)
merge(gsmaxdim: value.to_s)
end
# Maximum number of pages to return.
#
# @param value [Integer, "max"]
# @return [self]
def limit(value)
merge(gslimit: value.to_s)
end
# Globe to search on (by default "earth").
#
# @param value [String] One of "earth".
# @return [self]
def globe(value)
_globe(value) or fail ArgumentError, "Unknown value for globe: #{value}"
end
# @private
def _globe(value)
defined?(super) && super || ["earth"].include?(value.to_s) && merge(gsglobe: value.to_s)
end
# Namespaces to search.
#
# @param values [Array<String>] Allowed values: "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "120", "121", "122", "123", "828", "829", "1198", "1199", "2300", "2301", "2302", "2303", "2600".
# @return [self]
def namespace(*values)
values.inject(self) { |res, val| res._namespace(val) or fail ArgumentError, "Unknown value for namespace: #{val}" }
end
# @private
def _namespace(value)
defined?(super) && super || ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "120", "121", "122", "123", "828", "829", "1198", "1199", "2300", "2301", "2302", "2303", "2600"].include?(value.to_s) && merge(gsnamespace: value.to_s, replace: false)
end
# Which additional coordinate properties to return.
#
# @param values [Array<String>] Allowed values: "type", "name", "dim", "country", "region", "globe".
# @return [self]
def prop(*values)
values.inject(self) { |res, val| res._prop(val) or fail ArgumentError, "Unknown value for prop: #{val}" }
end
# @private
def _prop(value)
defined?(super) && super || ["type", "name", "dim", "country", "region", "globe"].include?(value.to_s) && merge(gsprop: value.to_s, replace: false)
end
# Whether to return only primary coordinates ("primary"), secondary ("secondary") or both ("all").
#
# @param value [String] One of "primary", "secondary", "all".
# @return [self]
def primary(value)
_primary(value) or fail ArgumentError, "Unknown value for primary: #{value}"
end
# @private
def _primary(value)
defined?(super) && super || ["primary", "secondary", "all"].include?(value.to_s) && merge(gsprimary: value.to_s)
end
# Whether debug information should be returned.
#
# @return [self]
def debug()
merge(gsdebug: 'true')
end
end
end
end
| 33.392308 | 291 | 0.571297 |
182c3f83e465bdc0a251415a48ef76e68a3529be | 1,049 | require 'rspec'
require_relative '../lib/dynamic_key'
describe 'AgoraDynamicKey::RTMTokenBuilder' do
let(:rtm_token_params) do
{
app_id: "970CA35de60c44645bbae8a215061b33",
app_certificate: "5CFd2fd1755d40ecb72977518be15d3b",
account: "test_user",
salt: 1,
role: AgoraDynamicKey::RTMTokenBuilder::Role::RTM_USER,
privilege_expired_ts: 1446455471
}
end
let(:valid_token) do
"006970CA35de60c44645bbae8a215061b33IAAsR0qgiCxv0vrpRcpkz5BrbfEWCBZ6kvR6t7qG/wJIQob86ogAAAAAEAABAAAAR/QQAAEA6AOvKDdW"
end
context 'build_token' do
it 'should equal valid token' do
# token = AgoraDynamicKey::RTMTokenBuilder.build_token rtm_token_params
token = AgoraDynamicKey::AccessToken.new rtm_token_params.merge(:channel_name => rtm_token_params[:account])
token.salt = 1
token.expired_ts = 1111111
token.grant AgoraDynamicKey::Privilege::RTM_LOGIN, rtm_token_params[:privilege_expired_ts]
result = token.build!
expect(result).to eq(valid_token)
end
end
end | 31.787879 | 121 | 0.743565 |
7990046786835e691567f2b29e29c16fbe2e6415 | 4,524 | require "rest-client"
require "webmock/rspec"
require_relative "../lib/user"
RSpec.describe User do
let(:fake_uaa_client) do
@fake_uaa_client = RestClient::Resource.new("http://fake-uaa.internal", headers: {
"Authorization" => "fake-token",
"Content-Type" => "application/json",
})
end
it "checks if a role for a user exists in an environment" do
u = User.new(
"email" => "[email protected]",
"username" => "000000000000000000000",
"roles" => {
"dev" => [{ "role" => "some_role" }],
"prod" => [{ "role" => "some_other_role" }],
},
)
expect(u.has_role_for_env?("dev", "some_role")).to be(true)
expect(u.has_role_for_env?("dev", "some_other_role")).to be(false)
expect(u.has_role_for_env?("prod", "some_other_role")).to be(true)
expect(u.has_role_for_env?("prod", "some_role")).to be(false)
expect(u.has_role_for_env?("some_env_that_does_not_exist", "some_role_that_does_not_exist")).to be(false)
end
it "does not give any roles to users without roles" do
u = User.new(
"email" => "[email protected]",
"username" => "000000000000000000000",
"roles" => {},
)
expect(u.has_role_for_env?("dev", "some_role")).to be(false)
expect(u.has_role_for_env?("dev", "some_other_role")).to be(false)
expect(u.has_role_for_env?("prod", "some_other_role")).to be(false)
expect(u.has_role_for_env?("prod", "some_role")).to be(false)
expect(u.has_role_for_env?("some_env_that_does_not_exist", "some_role_that_does_not_exist")).to be(false)
end
it "checks if exists in UAA" do
stub_request(:get, "http://fake-uaa.internal/Users?filter=origin%20eq%20%22google%22%20and%20userName%20eq%20%22000000000000000000000%22")
.to_return(body: JSON.generate(
resources: [{
id: "00000000-0000-0000-0000-000000000000-user",
}],
totalResults: 1,
))
u = User.new(
"email" => "[email protected]",
"username" => "000000000000000000000",
"roles" => { "dev" => [{ "role" => "some_role" }] },
)
expect(u.exists?(fake_uaa_client)).to be true
assert_requested(:get, "http://fake-uaa.internal/Users?filter=origin%20eq%20%22google%22%20and%20userName%20eq%20%22000000000000000000000%22")
stub_request(:get, "http://fake-uaa.internal/Users?filter=origin%20eq%20%22google%22%20and%20userName%20eq%20%22999999999999999999999%22")
.to_return(status: 404, body: JSON.generate({}))
u2 = User.new(
"email" => "[email protected]",
"username" => "999999999999999999999",
)
expect(u2.exists?(fake_uaa_client)).to be false
end
it "creates the entity" do
stub_request(:post, "http://fake-uaa.internal/Users").to_return(status: 201)
u = User.new(
"email" => "[email protected]",
"username" => "000000000000000000000",
"roles" => { "dev" => [{ "role" => "some_role" }] },
)
expect(u.create(fake_uaa_client)).to be true
assert_requested(:post, "http://fake-uaa.internal/Users", times: 1) do |req|
JSON.parse(req.body)["userName"] == "000000000000000000000"
end
stub_request(:post, "http://fake-uaa.internal/Users")
.to_return(status: 400, body: JSON.generate({}))
u2 = User.new(
"email" => "rich.richardson",
"username" => "999999999999999999999",
)
expect { u2.create(fake_uaa_client) }.to raise_error(Exception, /Bad Request/)
stub_request(:post, "http://fake-uaa.internal/Users")
.to_return(status: 206, body: JSON.generate({}))
u3 = User.new(
"email" => "[email protected]",
"username" => "000000000000000000000",
)
expect(u3.create(fake_uaa_client)).to be false
end
it "throw errors when UAA API returns >=400 response" do
stub_request(:post, "http://fake-uaa.internal/Users")
.to_return(status: 201, body: JSON.generate(
resources: [{
id: "00000000-0000-0000-0000-000000000000-user",
}],
totalResults: 1,
))
stub_request(:get, "http://fake-uaa.internal/Users?filter=origin%20eq%20%22google%22%20and%20userName%20eq%20%22000000000000000000000%22")
.to_return(status: 404, body: JSON.generate({}))
u = User.new(
"email" => "[email protected]",
"username" => "000000000000000000000",
"roles" => { "dev" => [{ "role" => "some_role" }] },
)
expect { u.get_user(fake_uaa_client) }.to raise_error(Exception)
end
end
| 35.34375 | 146 | 0.640805 |
87aca9eafea6e26a9f836d85d0d9a4fcbf6206e4 | 3,852 | module Admitad
module AffiliatePrograms
class Success < Admitad::Success
attribute :message, String
attribute :success, String
end
class Action < Admitad::Success
attribute :hold_time, Integer
attribute :payment_size, String
attribute :type, String
attribute :name, String
attribute :id, Integer
end
class Traffic < Admitad::Success
attribute :name, String
attribute :enabled, Boolean
end
class Category < Admitad::Success
attribute :id, Integer
attribute :parent, Category
attribute :name, String
attribute :language, String
end
class AffiliateProgram < Admitad::Success
attribute :goto_cookie_lifetime, Integer
attribute :rating, String
attribute :exclusive, Boolean
attribute :image, String
attribute :actions, Array[Action]
attribute :avg_money_transfer_time, Integer
attribute :currency, String
attribute :activation_date, String
attribute :cr, Float
attribute :max_hold_time, String
attribute :id, Integer
attribute :avg_hold_time, Integer
attribute :ecpc, Float
attribute :connection_status, String
attribute :gotolink, String
attribute :site_url, String
attribute :regions, Array
attribute :actions_detail, Array
attribute :epc_trend, String
attribute :geotargeting, Boolean
attribute :products_xml_link, String
attribute :status, String
attribute :coupon_iframe_denied, Boolean
attribute :traffics, Array[Traffic]
attribute :description, String
attribute :cr_trend, String
attribute :raw_description, String
attribute :modified_date, String
attribute :denynewwms, Boolean
attribute :moderation, Boolean
attribute :categories, Array[Category]
attribute :name, String
attribute :retag, Boolean
attribute :products_csv_link, String
attribute :feeds_info, Array
attribute :landing_code, String
attribute :ecpc_trend, String
attribute :landing_title, String
attribute :epc, Float
attribute :allow_deeplink, Boolean
attribute :show_products_links, Boolean
class << self
def find(id)
create(Wrapper.affiliate_programs_where(id: id))
end
def where(**params)
params[:w_id] = params.delete(:ad_space_id)
params[:c_id] = params.delete(:affiliate_program_id)
attributes = if params[:w_id]
Wrapper.affiliate_programs_for_ad_space(params[:w_id], params)
else
Wrapper.affiliate_programs_where(params)
end
Response.create(attributes)
end
def attach(affiliate_program, ad_space)
AffiliatePrograms::Success.create(Wrapper.connect_affiliate_program(ad_space, affiliate_program))
end
def detach(affiliate_program, ad_space)
AffiliatePrograms::Success.create(Wrapper.disconnect_affiliate_program(ad_space, affiliate_program))
end
end
def attach(ad_space)
self.class.attach(self, ad_space)
end
def detach(ad_space)
self.class.detach(self, ad_space)
end
end
class Response < Admitad::Success
attribute :results, Array[AffiliateProgram]
attribute :_meta, Hash
def self.create(attributes)
if Constants::ERRORS.any? { |error| attributes.key?(error) }
Error.new(attributes)
elsif attributes['results']
new(attributes)
elsif attributes['success']
Success.new(attributes)
else
AffiliateProgram.new(attributes)
end
end
alias affiliate_programs results
alias metadata _meta
end
end
end
| 30.330709 | 110 | 0.656282 |
5daf6d4fca94d85eb59aae7828222ebf04763b7b | 77 | class User < ActiveRecord::Base
has_many :topics
has_secure_password
end
| 15.4 | 31 | 0.792208 |
ed3cfd6b02323d8ad030f0ea2d1a094320f08c15 | 1,132 | module Banzai
module Pipeline
class GfmPipeline < BasePipeline
def self.filters
@filters ||= FilterArray[
Filter::SyntaxHighlightFilter,
Filter::SanitizationFilter,
Filter::UploadLinkFilter,
Filter::ImageLinkFilter,
Filter::EmojiFilter,
Filter::TableOfContentsFilter,
Filter::AutolinkFilter,
Filter::ExternalLinkFilter,
Filter::UserReferenceFilter,
Filter::IssueReferenceFilter,
Filter::ExternalIssueReferenceFilter,
Filter::MergeRequestReferenceFilter,
Filter::SnippetReferenceFilter,
Filter::CommitRangeReferenceFilter,
Filter::CommitReferenceFilter,
Filter::LabelReferenceFilter,
Filter::MilestoneReferenceFilter,
Filter::TaskListFilter
]
end
def self.transform_context(context)
context.merge(
only_path: true,
# EmojiFilter
asset_host: Gitlab::Application.config.asset_host,
asset_root: Gitlab.config.gitlab.base_url
)
end
end
end
end
| 26.952381 | 60 | 0.627208 |
4a8191c2b0afb5040b0c0ad355e5607ec8c86197 | 443 | Pod::Spec.new do |s|
s.name = "ZMBLESDK"
s.version = "1.3.0"
s.summary = "Bluetooth"
s.homepage = "https://github.com/humoroutlaw/ZMBLESDK"
s.license = { :type => 'MIT' }
s.author = "humoroutlaw"
s.platform = :ios, "9.0"
s.source = {:git => "https://github.com/humoroutlaw/ZMBLESDK.git", :tag => s.version}
s.vendored_frameworks = 'ZMBLESDK/ZMBLESDK.framework'
s.requires_arc = true
end
| 34.076923 | 88 | 0.598194 |
bfe1daa6d053d87ba4947ed01077412b69e86241 | 8,005 | # frozen_string_literal: true
require "spec_helper"
describe "Backup::Packager" do
let(:packager) { Backup::Packager }
it "should include Utilities::Helpers" do
expect(packager.instance_eval("class << self; self; end")
.include?(Backup::Utilities::Helpers)).to eq(true)
end
describe "#package!" do
let(:model) { double }
let(:package) { double }
let(:encryptor) { double }
let(:splitter) { double }
let(:pipeline) { double }
let(:procedure) { double }
let(:s) { sequence "" }
context "when pipeline command is successful" do
it "should setup variables and perform packaging procedures" do
expect(model).to receive(:package).ordered.and_return(package)
expect(model).to receive(:encryptor).ordered.and_return(encryptor)
expect(model).to receive(:splitter).ordered.and_return(splitter)
expect(Backup::Pipeline).to receive(:new).ordered.and_return(pipeline)
expect(Backup::Logger).to receive(:info).ordered.with(
"Packaging the backup files..."
)
expect(packager).to receive(:procedure).ordered.and_return(procedure)
expect(procedure).to receive(:call).ordered
expect(pipeline).to receive(:success?).ordered.and_return(true)
expect(Backup::Logger).to receive(:info).ordered.with(
"Packaging Complete!"
)
packager.package!(model)
expect(packager.instance_variable_get(:@package)).to be(package)
expect(packager.instance_variable_get(:@encryptor)).to be(encryptor)
expect(packager.instance_variable_get(:@splitter)).to be(splitter)
expect(packager.instance_variable_get(:@pipeline)).to be(pipeline)
end
end # context 'when pipeline command is successful'
context "when pipeline command is not successful" do
it "should raise an error" do
expect(model).to receive(:package).ordered.and_return(package)
expect(model).to receive(:encryptor).ordered.and_return(encryptor)
expect(model).to receive(:splitter).ordered.and_return(splitter)
expect(Backup::Pipeline).to receive(:new).ordered.and_return(pipeline)
expect(Backup::Logger).to receive(:info).ordered.with(
"Packaging the backup files..."
)
expect(packager).to receive(:procedure).ordered.and_return(procedure)
expect(procedure).to receive(:call).ordered
expect(pipeline).to receive(:success?).ordered.and_return(false)
expect(pipeline).to receive(:error_messages).ordered.and_return("pipeline_errors")
expect do
packager.package!(model)
end.to raise_error(
Backup::Packager::Error,
"Packager::Error: Failed to Create Backup Package\n" \
" pipeline_errors"
)
expect(packager.instance_variable_get(:@package)).to be(package)
expect(packager.instance_variable_get(:@encryptor)).to be(encryptor)
expect(packager.instance_variable_get(:@splitter)).to be(splitter)
expect(packager.instance_variable_get(:@pipeline)).to be(pipeline)
end
end # context 'when pipeline command is successful'
end # describe '#package!'
describe "#procedure" do
module Fake
def self.stack_trace
@stack_trace ||= []
end
class Encryptor
def encrypt_with
Fake.stack_trace << :encryptor_before
yield "encryption_command", ".enc"
Fake.stack_trace << :encryptor_after
end
end
class Splitter
def split_with
Fake.stack_trace << :splitter_before
yield "splitter_command"
Fake.stack_trace << :splitter_after
end
end
class Package
attr_accessor :trigger, :extension
def basename
"base_filename." + extension
end
end
end
let(:package) { Fake::Package.new }
let(:encryptor) { Fake::Encryptor.new }
let(:splitter) { Fake::Splitter.new }
let(:pipeline) { double }
let(:s) { sequence "" }
before do
Fake.stack_trace.clear
expect(packager).to receive(:utility).with(:tar).and_return("tar")
packager.instance_variable_set(:@package, package)
packager.instance_variable_set(:@pipeline, pipeline)
package.trigger = "model_trigger".dup
package.extension = "tar".dup
end
context "when no encryptor or splitter are defined" do
it "should package the backup without encryption into a single file" do
expect(packager).to receive(:utility).with(:cat).and_return("cat")
packager.instance_variable_set(:@encryptor, nil)
packager.instance_variable_set(:@splitter, nil)
expect(pipeline).to receive(:add).ordered.with(
"tar -cf - -C '#{Backup::Config.tmp_path}' 'model_trigger'", [0, 1]
)
expect(pipeline).to receive(:<<).ordered.with(
"cat > #{File.join(Backup::Config.tmp_path, "base_filename.tar")}"
)
expect(pipeline).to receive(:run).ordered
packager.send(:procedure).call
end
end
context "when only an encryptor is configured" do
it "should package the backup with encryption" do
expect(packager).to receive(:utility).with(:cat).and_return("cat")
packager.instance_variable_set(:@encryptor, encryptor)
packager.instance_variable_set(:@splitter, nil)
expect(pipeline).to receive(:add).ordered.with(
"tar -cf - -C '#{Backup::Config.tmp_path}' 'model_trigger'", [0, 1]
)
expect(pipeline).to receive(:<<).ordered.with("encryption_command")
expect(pipeline).to receive(:<<).ordered.with(
"cat > #{File.join(Backup::Config.tmp_path, "base_filename.tar.enc")}"
)
expect(pipeline).to receive(:run).ordered do
Fake.stack_trace << :command_executed
true
end
packager.send(:procedure).call
expect(Fake.stack_trace).to eq([
:encryptor_before, :command_executed, :encryptor_after
])
end
end
context "when only a splitter is configured" do
it "should package the backup without encryption through the splitter" do
expect(packager).to receive(:utility).with(:cat).never
packager.instance_variable_set(:@encryptor, nil)
packager.instance_variable_set(:@splitter, splitter)
expect(pipeline).to receive(:add).ordered.with(
"tar -cf - -C '#{Backup::Config.tmp_path}' 'model_trigger'", [0, 1]
)
expect(pipeline).to receive(:<<).ordered.with("splitter_command")
expect(pipeline).to receive(:run).ordered do
Fake.stack_trace << :command_executed
true
end
packager.send(:procedure).call
expect(Fake.stack_trace).to eq([
:splitter_before, :command_executed, :splitter_after
])
end
end
context "when both an encryptor and a splitter are configured" do
it "should package the backup with encryption through the splitter" do
expect(packager).to receive(:utility).with(:cat).never
packager.instance_variable_set(:@encryptor, encryptor)
packager.instance_variable_set(:@splitter, splitter)
expect(pipeline).to receive(:add).ordered.with(
"tar -cf - -C '#{Backup::Config.tmp_path}' 'model_trigger'", [0, 1]
)
expect(pipeline).to receive(:<<).ordered.with("encryption_command")
expect(pipeline).to receive(:<<).ordered.with("splitter_command")
expect(pipeline).to receive(:run).ordered do
Fake.stack_trace << :command_executed
true
end
packager.send(:procedure).call
expect(Fake.stack_trace).to eq([
:encryptor_before, :splitter_before,
:command_executed,
:splitter_after, :encryptor_after
])
expect(package.extension).to eq("tar.enc")
end
end
end # describe '#procedure'
end
| 36.386364 | 90 | 0.642349 |
abcf7446d8cc8b08aa6782c651b1955047ad94e9 | 1,100 | require 'spec_helper'
set :os, :family => 'base'
describe user('root') do
it { should exist }
end
describe user('root') do
it { should belong_to_group 'root' }
end
describe user('root') do
it { should belong_to_primary_group 'root' }
end
describe user('root') do
it { should have_uid 0 }
end
describe user('root') do
it { should have_login_shell '/bin/bash' }
end
describe user('root') do
it { should have_home_directory '/root' }
end
describe user('root') do
it { should have_authorized_key 'ssh-rsa ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGH [email protected]' }
end
describe user('root') do
its(:minimum_days_between_password_change) { should == 0 }
end
describe user('root') do
its(:maximum_days_between_password_change) { should == 0 }
end
| 27.5 | 432 | 0.803636 |
6a9a9913a71297f8fd21f70417d2051ddc94e97d | 368 | module PasswordBlacklist
class Checker
def initialize
file_path = File.expand_path('../../../data/Top95Thousand-probable.txt', __FILE__)
@data = File.read(file_path)
end
def blacklisted?(password)
[email protected](Regexp.escape(password.downcase))
end
def inspect
"#<#{self.class}:0x#{__id__.to_s(16)}>"
end
end
end
| 20.444444 | 88 | 0.641304 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.