hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1cdf24917255b79e1f61bee864f188a272afa5c4 | 247 | # frozen_string_literal: true
module API
module Entities
module Nuget
class PackagesMetadata < Grape::Entity
expose :count
expose :items, using: ::API::Entities::Nuget::PackagesMetadataItem
end
end
end
end
| 19 | 74 | 0.672065 |
91712c4224a87e7fd5dbaa08c29f95b357c6ed8f | 123 | def clean_trace(trace, options = {})
options[:indent] ||= 6
trace.gsub(/^ {#{options[:indent]}}/, "").strip + "\n"
end
| 24.6 | 56 | 0.577236 |
1d8789e172c67968940a30f7b483f879849b698d | 4,123 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::NetApp::Mgmt::V2019_07_01
module Models
#
# Volume patch resource
#
class VolumePatch
include MsRestAzure
# @return [String] Resource location
attr_accessor :location
# @return [String] Resource Id
attr_accessor :id
# @return [String] Resource name
attr_accessor :name
# @return [String] Resource type
attr_accessor :type
# @return Resource tags
attr_accessor :tags
# @return [ServiceLevel] serviceLevel. The service level of the file
# system. Possible values include: 'Standard', 'Premium', 'Ultra'.
# Default value: 'Premium' .
attr_accessor :service_level
# @return [Integer] usageThreshold. Maximum storage quota allowed for a
# file system in bytes. This is a soft quota used for alerting only.
# Minimum size is 100 GiB. Upper limit is 100TiB. Specified in bytes.
# Default value: 107374182400 .
attr_accessor :usage_threshold
# @return [VolumePatchPropertiesExportPolicy] exportPolicy. Set of export
# policy rules
attr_accessor :export_policy
#
# Mapper for VolumePatch class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'volumePatch',
type: {
name: 'Composite',
class_name: 'VolumePatch',
model_properties: {
location: {
client_side_validation: true,
required: false,
serialized_name: 'location',
type: {
name: 'String'
}
},
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Object'
}
},
service_level: {
client_side_validation: true,
required: false,
serialized_name: 'properties.serviceLevel',
default_value: 'Premium',
type: {
name: 'String'
}
},
usage_threshold: {
client_side_validation: true,
required: false,
serialized_name: 'properties.usageThreshold',
default_value: 107374182400,
constraints: {
InclusiveMaximum: 109951162777600,
InclusiveMinimum: 107374182400
},
type: {
name: 'Number'
}
},
export_policy: {
client_side_validation: true,
required: false,
serialized_name: 'properties.exportPolicy',
type: {
name: 'Composite',
class_name: 'VolumePatchPropertiesExportPolicy'
}
}
}
}
}
end
end
end
end
| 29.45 | 79 | 0.486296 |
b950dcdd2da488c59e04fee479654fb6838aa5cf | 3,603 | # encoding: utf-8
class CreateCallCenterViolationTypes < ActiveRecord::Migration
def up
create_table :call_center_violation_types do |t|
t.string :name
t.references :violation_category
t.timestamps
end
CallCenter::ViolationType.create name: 'ВЫБЕРИТЕ ТИП НАРУШЕНИЯ. ЖЕЛАТЕЛЬНО НЕ "ПРОЧИЕ".'
ActiveRecord::Base.transaction do
vc = CallCenter::ViolationCategory.find_by_name "0. Серьезные/часто встречающиеся нарушения"
[ "0.1. Наблюдателя/члена комиссии выгоняют с участка (2.14 в типологии «Голоса»)",
"0.2. Наблюдателю угрожают",
"0.3. Вброс бюллетеней (2.8)",
"0.4. Карусель (2.8)",
"0.5. Массовое голосование вне помещения",
"0.6. Ограничение на перемещение наблюдателей по участку (2.11)",
"0.7. Запрет на фото/видеосъемку (2.12)",
"0.8. Присутствие на участке посторонних лиц",
"0.9. Члены УИК подписывали незаполненный протокол (3.21)",
"0.10. При подсчете данные не заносились сразу в увеличенную форму после каждого этапа подсчета (3.12)",
"0.11. Отказ в принятии жалобы (3.19)",
"0.12. Не выдавали копию протокола (3.23)",
"0.13. Прочие нарушения",
"0.14 Голосование по допспискам"].each do |violation_name|
CallCenter::ViolationType.create(name: violation_name , violation_category_id: vc.id)
end
vc = CallCenter::ViolationCategory.find_by_name "1. Нарушения при открытии участка"
[ "1.1. Трудности при доступе на участок",
"1.2. Председатель УИК не предъявил пустые ящики для голосования",
"1.3. Наблюдателям и членам с ПСГ не дали ознакомится со списком избирателей",
"1.4. Членам УИК бюллетени выдаются не под роспись",
"1.5. Трудности в получении информации" ].each do |violation_name|
CallCenter::ViolationType.create(name: violation_name, violation_category_id: vc.id)
end
vc = CallCenter::ViolationCategory.find_by_name "2. Нарушения при голосовании"
[ "2.2. Есть агитационные материалы на участке",
"2.3. Нет сводного плаката",
"2.4. У наблюдателей нет возможности видеть места выдачи бюллетеней, избирательные ящики, кабинки для голосования",
"2.5. Нарушена процедура выдачи бюллетеней",
"2.6. Подвоз избирателей",
"2.7. Списки избирателей не прошиты",
"2.9. Групповое голосование по открепительным",
"2.10. Давление на избирателей",
"2.13. Переносные ящики не в поле зрения наблюдателей",
"2.15. Для выездного голосования используются списки, составленные организациями",
"2.16. Не дают ознакомиться с реестром на голосование вне помещения",
"2.17. Не дают присутствовать при голосовании вне помещения"].each do |violation_name|
CallCenter::ViolationType.create(name: violation_name, violation_category_id: vc.id)
end
vc = CallCenter::ViolationCategory.find_by_name "3. Нарушения при подсчете"
[ "3.6. Не оглашались данные подсчета по каждой книге избирателей",
"3.7. Наблюдателю отказали в возможности удостоверится в правильности подсчетов по спискам",
"3.8. Не объявлялось количество заявлений на голосование вне помещения перед вскрытием каждого переносного ящика",
"3.9, 3.10, 3.12, 3.13. Прочие нарушения при подсчете",
"3.11. Наблюдатели не могли видеть отметки в бюллетенях при подсчете голосов"].each do |violation_name|
CallCenter::ViolationType.create(name: violation_name, violation_category_id: vc.id)
end
end
end
def down
drop_table :call_center_violation_types
end
end
| 49.356164 | 123 | 0.698862 |
bbc4ba578980a4efee5432637d6e1e7d0de1ba39 | 2,101 | class Libsamplerate < Formula
desc "Library for sample rate conversion of audio data"
homepage "http://www.mega-nerd.com/SRC"
url "http://www.mega-nerd.com/SRC/libsamplerate-0.1.9.tar.gz"
sha256 "0a7eb168e2f21353fb6d84da152e4512126f7dc48ccb0be80578c565413444c1"
bottle do
cellar :any
sha256 "9889af1465dd3eccbed2f532a94ce85543ec0e79984f879bdc60ad9e89478fc2" => :mojave
sha256 "b7e0343483287deebebd335de03c5c3a4597334440cc01dc1dbd5d14cc6505d8" => :high_sierra
sha256 "69443b5047dc7e71b74ec29359b1d05e3e6c659751b73a3c2e8e0ad4dd63a6f1" => :sierra
sha256 "97e0ba8a07df0684580bfec1a7fc5760d1f90e9102330ced19cdb7c37c4ae0ca" => :el_capitan
sha256 "5f3623588a4fb9b2d886547719d0a3b68df725882d329152ee1de7c4841404ed" => :yosemite
end
depends_on "pkg-config" => :build
depends_on "fftw" => :optional
depends_on "libsndfile" => :optional
# configure adds `/Developer/Headers/FlatCarbon` to the include, but this is
# very deprecated. Correct the use of Carbon.h to the non-flat location.
# See: https://github.com/Homebrew/homebrew/pull/10875
patch :DATA
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
# https://github.com/Homebrew/homebrew/issues/47133
# Unless formula is built with libsndfile, the example program is broken.
rm_f "#{bin}/sndfile-resample" if build.without? "libsndfile"
end
def caveats
s = ""
if build.without? "libsndfile"
s += <<~EOS
Unless this formula is built with libsndfile, the example program,
"sndfile-resample", is broken and hence, removed from installation.
EOS
end
s
end
end
__END__
--- a/examples/audio_out.c 2011-07-12 16:57:31.000000000 -0700
+++ b/examples/audio_out.c 2012-03-11 20:48:57.000000000 -0700
@@ -168,7 +168,7 @@
#if (defined (__MACH__) && defined (__APPLE__)) /* MacOSX */
-#include <Carbon.h>
+#include <Carbon/Carbon.h>
#include <CoreAudio/AudioHardware.h>
#define MACOSX_MAGIC MAKE_MAGIC ('M', 'a', 'c', ' ', 'O', 'S', ' ', 'X')
| 35.610169 | 93 | 0.71347 |
e9caab6700e2d46689032f84d09f42c5654f2797 | 5,352 | module Unix::Exec
include Beaker::CommandFactory
def reboot
if self['platform'] =~ /solaris/
exec(Beaker::Command.new("reboot"), :expect_connection_failure => true)
else
exec(Beaker::Command.new("/sbin/shutdown -r now"), :expect_connection_failure => true)
end
end
def echo(msg, abs=true)
(abs ? '/bin/echo' : 'echo') + " #{msg}"
end
def touch(file, abs=true)
(abs ? '/bin/touch' : 'touch') + " #{file}"
end
def path
'/bin:/usr/bin'
end
def get_ip
if self['platform'].include?('solaris') || self['platform'].include?('osx')
execute("ifconfig -a inet| awk '/broadcast/ {print $2}' | cut -d/ -f1 | head -1").strip
else
execute("ip a|awk '/global/{print$2}' | cut -d/ -f1 | head -1").strip
end
end
# Create the provided directory structure on the host
# @param [String] dir The directory structure to create on the host
# @return [Boolean] True, if directory construction succeeded, otherwise False
def mkdir_p dir
cmd = "mkdir -p #{dir}"
result = exec(Beaker::Command.new(cmd), :acceptable_exit_codes => [0, 1])
result.exit_code == 0
end
# Recursively remove the path provided
# @param [String] path The path to remove
def rm_rf path
execute("rm -rf #{path}")
end
# Move the origin to destination. The destination is removed prior to moving.
# @param [String] orig The origin path
# @param [String] dest the destination path
# @param [Boolean] rm Remove the destination prior to move
def mv orig, dest, rm=true
rm_rf dest unless !rm
execute("mv #{orig} #{dest}")
end
# Converts the provided environment file to a new shell script in /etc/profile.d, then sources that file.
# This is for sles based hosts.
# @param [String] env_file The ssh environment file to read from
def mirror_env_to_profile_d env_file
if self[:platform] =~ /sles-/
@logger.debug("mirroring environment to /etc/profile.d on sles platform host")
cur_env = exec(Beaker::Command.new("cat #{env_file}")).stdout
shell_env = ''
cur_env.each_line do |env_line|
shell_env << "export #{env_line}"
end
#here doc it over
exec(Beaker::Command.new("cat << EOF > #{self[:profile_d_env_file]}\n#{shell_env}EOF"))
#set permissions
exec(Beaker::Command.new("chmod +x #{self[:profile_d_env_file]}"))
#keep it current
exec(Beaker::Command.new("source #{self[:profile_d_env_file]}"))
else
#noop
@logger.debug("will not mirror environment to /etc/profile.d on non-sles platform host")
end
end
#Add the provided key/val to the current ssh environment
#@param [String] key The key to add the value to
#@param [String] val The value for the key
#@example
# host.add_env_var('PATH', '/usr/bin:PATH')
def add_env_var key, val
key = key.to_s.upcase
env_file = self[:ssh_env_file]
escaped_val = Regexp.escape(val).gsub('/', '\/').gsub(';', '\;')
#see if the key/value pair already exists
if exec(Beaker::Command.new("grep #{key}=.*#{escaped_val} #{env_file}"), :accept_all_exit_codes => true ).exit_code == 0
return #nothing to do here, key value pair already exists
#see if the key already exists
elsif exec(Beaker::Command.new("grep #{key} #{env_file}"), :accept_all_exit_codes => true ).exit_code == 0
exec(Beaker::SedCommand.new(self['platform'], "s/#{key}=/#{key}=#{escaped_val}:/", env_file))
else
exec(Beaker::Command.new("echo \"#{key}=#{val}\" >> #{env_file}"))
end
#update the profile.d to current state
#match it to the contents of ssh_env_file
mirror_env_to_profile_d(env_file)
end
#Delete the provided key/val from the current ssh environment
#@param [String] key The key to delete the value from
#@param [String] val The value to delete for the key
#@example
# host.delete_env_var('PATH', '/usr/bin:PATH')
def delete_env_var key, val
key = key.to_s.upcase
env_file = self[:ssh_env_file]
val = Regexp.escape(val).gsub('/', '\/').gsub(';', '\;')
#if the key only has that single value remove the entire line
exec(Beaker::SedCommand.new(self['platform'], "/#{key}=#{val}$/d", env_file))
#value in middle of list
exec(Beaker::SedCommand.new(self['platform'], "s/#{key}=\\(.*\\)[;:]#{val}/#{key}=\\1/", env_file))
#value in start of list
exec(Beaker::SedCommand.new(self['platform'], "s/#{key}=#{val}[;:]/#{key}=/", env_file))
#update the profile.d to current state
#match it to the contents of ssh_env_file
mirror_env_to_profile_d(env_file)
end
#Return the value of a specific env var
#@param [String] key The key to look for
#@example
# host.get_env_var('path')
def get_env_var key
key = key.to_s.upcase
exec(Beaker::Command.new("env | grep #{key}"), :accept_all_exit_codes => true).stdout.chomp
end
#Delete the environment variable from the current ssh environment
#@param [String] key The key to delete
#@example
# host.clear_env_var('PATH')
def clear_env_var key
key = key.to_s.upcase
env_file = self[:ssh_env_file]
#remove entire line
exec(Beaker::SedCommand.new(self['platform'], "/#{key}=.*$/d", env_file))
#update the profile.d to current state
#match it to the contents of ssh_env_file
mirror_env_to_profile_d(env_file)
end
end
| 36.657534 | 124 | 0.657885 |
183bff2f75470eb39c93b19fe42ae103ac542b46 | 1,563 | module Spree
module Api
module V2
module Storefront
class ProductsController < ::Spree::Api::V2::ResourceController
private
def sorted_collection
collection_sorter.new(collection, current_currency, params, allowed_sort_attributes).call
end
def collection
@collection ||= collection_finder.new(scope: scope, params: params, current_currency: current_currency).execute
end
def resource
@resource ||= scope.find_by(slug: params[:id]) || scope.find(params[:id])
end
def collection_sorter
Spree::Api::Dependencies.storefront_products_sorter.constantize
end
def collection_finder
Spree::Api::Dependencies.storefront_products_finder.constantize
end
def collection_serializer
Spree::Api::Dependencies.storefront_product_serializer.constantize
end
def resource_serializer
Spree::Api::Dependencies.storefront_product_serializer.constantize
end
def model_class
Spree::Product
end
def scope_includes
{
master: :default_price,
variants: [],
variant_images: [],
taxons: [],
product_properties: :property,
option_types: :option_values,
variants_including_master: %i[default_price option_values]
}
end
end
end
end
end
end
| 27.910714 | 123 | 0.588612 |
2655e09b754e3b7c2e995b5e92f2c711334362c7 | 1,190 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v3/errors/keyword_plan_ad_group_error.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v3/errors/keyword_plan_ad_group_error.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v3.errors.KeywordPlanAdGroupErrorEnum" do
end
add_enum "google.ads.googleads.v3.errors.KeywordPlanAdGroupErrorEnum.KeywordPlanAdGroupError" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :INVALID_NAME, 2
value :DUPLICATE_NAME, 3
end
end
end
module Google
module Ads
module GoogleAds
module V3
module Errors
KeywordPlanAdGroupErrorEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.errors.KeywordPlanAdGroupErrorEnum").msgclass
KeywordPlanAdGroupErrorEnum::KeywordPlanAdGroupError = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.errors.KeywordPlanAdGroupErrorEnum.KeywordPlanAdGroupError").enummodule
end
end
end
end
end
| 37.1875 | 218 | 0.766387 |
91867a32403424cc84fc4c06f96ad8b7f1ee67d5 | 7,811 | # encoding: UTF-8
module TZInfo
module Definitions
module America
module Yellowknife
include TimezoneDefinition
timezone 'America/Yellowknife' do |tz|
tz.offset :o0, 0, 0, :'-00'
tz.offset :o1, -25200, 0, :MST
tz.offset :o2, -25200, 3600, :MWT
tz.offset :o3, -25200, 3600, :MPT
tz.offset :o4, -25200, 7200, :MDDT
tz.offset :o5, -25200, 3600, :MDT
tz.transition 1935, 1, :o1, 4855607, 2
tz.transition 1942, 2, :o2, 19443199, 8
tz.transition 1945, 8, :o3, 58360379, 24
tz.transition 1945, 9, :o1, 14590373, 6
tz.transition 1965, 4, :o4, 58533019, 24
tz.transition 1965, 10, :o1, 58537555, 24
tz.transition 1980, 4, :o5, 325674000
tz.transition 1980, 10, :o1, 341395200
tz.transition 1981, 4, :o5, 357123600
tz.transition 1981, 10, :o1, 372844800
tz.transition 1982, 4, :o5, 388573200
tz.transition 1982, 10, :o1, 404899200
tz.transition 1983, 4, :o5, 420022800
tz.transition 1983, 10, :o1, 436348800
tz.transition 1984, 4, :o5, 452077200
tz.transition 1984, 10, :o1, 467798400
tz.transition 1985, 4, :o5, 483526800
tz.transition 1985, 10, :o1, 499248000
tz.transition 1986, 4, :o5, 514976400
tz.transition 1986, 10, :o1, 530697600
tz.transition 1987, 4, :o5, 544611600
tz.transition 1987, 10, :o1, 562147200
tz.transition 1988, 4, :o5, 576061200
tz.transition 1988, 10, :o1, 594201600
tz.transition 1989, 4, :o5, 607510800
tz.transition 1989, 10, :o1, 625651200
tz.transition 1990, 4, :o5, 638960400
tz.transition 1990, 10, :o1, 657100800
tz.transition 1991, 4, :o5, 671014800
tz.transition 1991, 10, :o1, 688550400
tz.transition 1992, 4, :o5, 702464400
tz.transition 1992, 10, :o1, 720000000
tz.transition 1993, 4, :o5, 733914000
tz.transition 1993, 10, :o1, 752054400
tz.transition 1994, 4, :o5, 765363600
tz.transition 1994, 10, :o1, 783504000
tz.transition 1995, 4, :o5, 796813200
tz.transition 1995, 10, :o1, 814953600
tz.transition 1996, 4, :o5, 828867600
tz.transition 1996, 10, :o1, 846403200
tz.transition 1997, 4, :o5, 860317200
tz.transition 1997, 10, :o1, 877852800
tz.transition 1998, 4, :o5, 891766800
tz.transition 1998, 10, :o1, 909302400
tz.transition 1999, 4, :o5, 923216400
tz.transition 1999, 10, :o1, 941356800
tz.transition 2000, 4, :o5, 954666000
tz.transition 2000, 10, :o1, 972806400
tz.transition 2001, 4, :o5, 986115600
tz.transition 2001, 10, :o1, 1004256000
tz.transition 2002, 4, :o5, 1018170000
tz.transition 2002, 10, :o1, 1035705600
tz.transition 2003, 4, :o5, 1049619600
tz.transition 2003, 10, :o1, 1067155200
tz.transition 2004, 4, :o5, 1081069200
tz.transition 2004, 10, :o1, 1099209600
tz.transition 2005, 4, :o5, 1112518800
tz.transition 2005, 10, :o1, 1130659200
tz.transition 2006, 4, :o5, 1143968400
tz.transition 2006, 10, :o1, 1162108800
tz.transition 2007, 3, :o5, 1173603600
tz.transition 2007, 11, :o1, 1194163200
tz.transition 2008, 3, :o5, 1205053200
tz.transition 2008, 11, :o1, 1225612800
tz.transition 2009, 3, :o5, 1236502800
tz.transition 2009, 11, :o1, 1257062400
tz.transition 2010, 3, :o5, 1268557200
tz.transition 2010, 11, :o1, 1289116800
tz.transition 2011, 3, :o5, 1300006800
tz.transition 2011, 11, :o1, 1320566400
tz.transition 2012, 3, :o5, 1331456400
tz.transition 2012, 11, :o1, 1352016000
tz.transition 2013, 3, :o5, 1362906000
tz.transition 2013, 11, :o1, 1383465600
tz.transition 2014, 3, :o5, 1394355600
tz.transition 2014, 11, :o1, 1414915200
tz.transition 2015, 3, :o5, 1425805200
tz.transition 2015, 11, :o1, 1446364800
tz.transition 2016, 3, :o5, 1457859600
tz.transition 2016, 11, :o1, 1478419200
tz.transition 2017, 3, :o5, 1489309200
tz.transition 2017, 11, :o1, 1509868800
tz.transition 2018, 3, :o5, 1520758800
tz.transition 2018, 11, :o1, 1541318400
tz.transition 2019, 3, :o5, 1552208400
tz.transition 2019, 11, :o1, 1572768000
tz.transition 2020, 3, :o5, 1583658000
tz.transition 2020, 11, :o1, 1604217600
tz.transition 2021, 3, :o5, 1615712400
tz.transition 2021, 11, :o1, 1636272000
tz.transition 2022, 3, :o5, 1647162000
tz.transition 2022, 11, :o1, 1667721600
tz.transition 2023, 3, :o5, 1678611600
tz.transition 2023, 11, :o1, 1699171200
tz.transition 2024, 3, :o5, 1710061200
tz.transition 2024, 11, :o1, 1730620800
tz.transition 2025, 3, :o5, 1741510800
tz.transition 2025, 11, :o1, 1762070400
tz.transition 2026, 3, :o5, 1772960400
tz.transition 2026, 11, :o1, 1793520000
tz.transition 2027, 3, :o5, 1805014800
tz.transition 2027, 11, :o1, 1825574400
tz.transition 2028, 3, :o5, 1836464400
tz.transition 2028, 11, :o1, 1857024000
tz.transition 2029, 3, :o5, 1867914000
tz.transition 2029, 11, :o1, 1888473600
tz.transition 2030, 3, :o5, 1899363600
tz.transition 2030, 11, :o1, 1919923200
tz.transition 2031, 3, :o5, 1930813200
tz.transition 2031, 11, :o1, 1951372800
tz.transition 2032, 3, :o5, 1962867600
tz.transition 2032, 11, :o1, 1983427200
tz.transition 2033, 3, :o5, 1994317200
tz.transition 2033, 11, :o1, 2014876800
tz.transition 2034, 3, :o5, 2025766800
tz.transition 2034, 11, :o1, 2046326400
tz.transition 2035, 3, :o5, 2057216400
tz.transition 2035, 11, :o1, 2077776000
tz.transition 2036, 3, :o5, 2088666000
tz.transition 2036, 11, :o1, 2109225600
tz.transition 2037, 3, :o5, 2120115600
tz.transition 2037, 11, :o1, 2140675200
tz.transition 2038, 3, :o5, 19723975, 8
tz.transition 2038, 11, :o1, 14794409, 6
tz.transition 2039, 3, :o5, 19726887, 8
tz.transition 2039, 11, :o1, 14796593, 6
tz.transition 2040, 3, :o5, 19729799, 8
tz.transition 2040, 11, :o1, 14798777, 6
tz.transition 2041, 3, :o5, 19732711, 8
tz.transition 2041, 11, :o1, 14800961, 6
tz.transition 2042, 3, :o5, 19735623, 8
tz.transition 2042, 11, :o1, 14803145, 6
tz.transition 2043, 3, :o5, 19738535, 8
tz.transition 2043, 11, :o1, 14805329, 6
tz.transition 2044, 3, :o5, 19741503, 8
tz.transition 2044, 11, :o1, 14807555, 6
tz.transition 2045, 3, :o5, 19744415, 8
tz.transition 2045, 11, :o1, 14809739, 6
tz.transition 2046, 3, :o5, 19747327, 8
tz.transition 2046, 11, :o1, 14811923, 6
tz.transition 2047, 3, :o5, 19750239, 8
tz.transition 2047, 11, :o1, 14814107, 6
tz.transition 2048, 3, :o5, 19753151, 8
tz.transition 2048, 11, :o1, 14816291, 6
tz.transition 2049, 3, :o5, 19756119, 8
tz.transition 2049, 11, :o1, 14818517, 6
tz.transition 2050, 3, :o5, 19759031, 8
tz.transition 2050, 11, :o1, 14820701, 6
end
end
end
end
end
| 45.947059 | 51 | 0.583024 |
39dab1cc71aa3be015665cd540188efebe18849c | 531 | # frozen_string_literal: true
require "active_support/core_ext/hash/deep_merge"
require "active_support/core_ext/hash/except"
require "active_support/core_ext/hash/slice"
begin
require "i18n"
rescue LoadError => e
$stderr.puts "The i18n gem is not available. Please add it to your Gemfile and run bundle install"
raise e
end
require "active_support/lazy_load_hooks"
ActiveSupport.run_load_hooks(:i18n)
I18n.load_path << File.expand_path("locale/en.yml", __dir__)
I18n.load_path << File.expand_path("locale/en.rb", __dir__)
| 31.235294 | 100 | 0.792844 |
870932dbd0700d02684abf6cd5d82a9652c9d4f0 | 5,753 | require 'simplecov'
SimpleCov.start
require 'config'
require 'active_support/testing/autorun'
require 'stringio'
require 'active_record'
require 'cases/test_case'
require 'active_support/dependencies'
require 'active_support/logger'
require 'active_support/core_ext/string/strip'
require 'support/config'
require 'support/connection'
# TODO: Move all these random hacks into the ARTest namespace and into the support/ dir
Thread.abort_on_exception = true
# Show backtraces for deprecated behavior for quicker cleanup.
ActiveSupport::Deprecation.debug = true
# Disable available locale checks to avoid warnings running the test suite.
I18n.enforce_available_locales = false
# Enable raise errors in after_commit and after_rollback.
ActiveRecord::Base.raise_in_transactional_callbacks = true
# Connect to the database
ARTest.connect
# Quote "type" if it's a reserved word for the current connection.
QUOTED_TYPE = ActiveRecord::Base.connection.quote_column_name('type')
def current_adapter?(*types)
types.any? do |type|
ActiveRecord::ConnectionAdapters.const_defined?(type) &&
ActiveRecord::Base.connection.is_a?(ActiveRecord::ConnectionAdapters.const_get(type))
end
end
def in_memory_db?
current_adapter?(:SQLite3Adapter) &&
ActiveRecord::Base.connection_pool.spec.config[:database] == ":memory:"
end
def mysql_56?
current_adapter?(:Mysql2Adapter) &&
ActiveRecord::Base.connection.send(:version).join(".") >= "5.6.0"
end
def mysql_enforcing_gtid_consistency?
current_adapter?(:MysqlAdapter, :Mysql2Adapter) && 'ON' == ActiveRecord::Base.connection.show_variable('enforce_gtid_consistency')
end
def supports_savepoints?
ActiveRecord::Base.connection.supports_savepoints?
end
def with_env_tz(new_tz = 'US/Eastern')
old_tz, ENV['TZ'] = ENV['TZ'], new_tz
yield
ensure
old_tz ? ENV['TZ'] = old_tz : ENV.delete('TZ')
end
def with_timezone_config(cfg)
verify_default_timezone_config
old_default_zone = ActiveRecord::Base.default_timezone
old_awareness = ActiveRecord::Base.time_zone_aware_attributes
old_zone = Time.zone
if cfg.has_key?(:default)
ActiveRecord::Base.default_timezone = cfg[:default]
end
if cfg.has_key?(:aware_attributes)
ActiveRecord::Base.time_zone_aware_attributes = cfg[:aware_attributes]
end
if cfg.has_key?(:zone)
Time.zone = cfg[:zone]
end
yield
ensure
ActiveRecord::Base.default_timezone = old_default_zone
ActiveRecord::Base.time_zone_aware_attributes = old_awareness
Time.zone = old_zone
end
# This method makes sure that tests don't leak global state related to time zones.
EXPECTED_ZONE = nil
EXPECTED_DEFAULT_TIMEZONE = :utc
EXPECTED_TIME_ZONE_AWARE_ATTRIBUTES = false
def verify_default_timezone_config
if Time.zone != EXPECTED_ZONE
$stderr.puts <<-MSG
\n#{self}
Global state `Time.zone` was leaked.
Expected: #{EXPECTED_ZONE}
Got: #{Time.zone}
MSG
end
if ActiveRecord::Base.default_timezone != EXPECTED_DEFAULT_TIMEZONE
$stderr.puts <<-MSG
\n#{self}
Global state `ActiveRecord::Base.default_timezone` was leaked.
Expected: #{EXPECTED_DEFAULT_TIMEZONE}
Got: #{ActiveRecord::Base.default_timezone}
MSG
end
if ActiveRecord::Base.time_zone_aware_attributes != EXPECTED_TIME_ZONE_AWARE_ATTRIBUTES
$stderr.puts <<-MSG
\n#{self}
Global state `ActiveRecord::Base.time_zone_aware_attributes` was leaked.
Expected: #{EXPECTED_TIME_ZONE_AWARE_ATTRIBUTES}
Got: #{ActiveRecord::Base.time_zone_aware_attributes}
MSG
end
end
def enable_extension!(extension, connection)
return false unless connection.supports_extensions?
return connection.reconnect! if connection.extension_enabled?(extension)
connection.enable_extension extension
connection.commit_db_transaction
connection.reconnect!
end
def disable_extension!(extension, connection)
return false unless connection.supports_extensions?
return true unless connection.extension_enabled?(extension)
connection.disable_extension extension
connection.reconnect!
end
class ActiveSupport::TestCase
include ActiveRecord::TestFixtures
self.fixture_path = FIXTURES_ROOT
self.use_instantiated_fixtures = false
self.use_transactional_fixtures = true
def create_fixtures(*fixture_set_names, &block)
ActiveRecord::FixtureSet.create_fixtures(ActiveSupport::TestCase.fixture_path, fixture_set_names, fixture_class_names, &block)
end
end
def load_schema
# silence verbose schema loading
original_stdout = $stdout
$stdout = StringIO.new
adapter_name = ActiveRecord::Base.connection.adapter_name.downcase
adapter_specific_schema_file = SCHEMA_ROOT + "/#{adapter_name}_specific_schema.rb"
load SCHEMA_ROOT + "/schema.rb"
if File.exist?(adapter_specific_schema_file)
load adapter_specific_schema_file
end
ensure
$stdout = original_stdout
end
load_schema
class SQLSubscriber
attr_reader :logged
attr_reader :payloads
def initialize
@logged = []
@payloads = []
end
def start(name, id, payload)
@payloads << payload
@logged << [payload[:sql].squish, payload[:name], payload[:binds]]
end
def finish(name, id, payload); end
end
module InTimeZone
private
def in_time_zone(zone)
old_zone = Time.zone
old_tz = ActiveRecord::Base.time_zone_aware_attributes
Time.zone = zone ? ActiveSupport::TimeZone[zone] : nil
ActiveRecord::Base.time_zone_aware_attributes = !zone.nil?
yield
ensure
Time.zone = old_zone
ActiveRecord::Base.time_zone_aware_attributes = old_tz
end
end
require 'mocha/setup' # FIXME: stop using mocha
# FIXME: we have tests that depend on run order, we should fix that and
# remove this method call.
require 'active_support/test_case'
ActiveSupport::TestCase.test_order = :sorted | 27.526316 | 132 | 0.770728 |
1aab323a4086e104979e36228822741ba65305ba | 280 | module SchemaPlus::Compatibility
module ActiveRecord
module Migration
module ClassMethods
def latest_version
begin
::ActiveRecord::Migration::Current
rescue
self
end
end
end
end
end
end
| 17.5 | 46 | 0.564286 |
6a72e78c234e604f332345a5baa12d602d78f5bf | 946 | ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
require 'rails/test_help'
require "minitest/reporters"
Minitest::Reporters.use!
class ActiveSupport::TestCase
fixtures :all
# Returns true if a test user is logged in.
def is_logged_in?
!session[:user_id].nil?
end
# Log in as a particular user.
def log_in_as(user)
session[:user_id] = user.id
end
def full_title(page_title = '')
base_title = "Ruby on Rails Tutorial Sample App"
if page_title.empty?
base_title
else
page_title + " | " + base_title
end
end
end
class ActionDispatch::IntegrationTest
# Log in as a particular user.
def log_in_as(user, password: 'password', remember_me: '1')
post login_path, params: { session: { email: user.email,
password: password,
remember_me: remember_me } }
end
end | 24.894737 | 70 | 0.633192 |
b9a9253f1ac84cf1ccfbc837ff2f5cd8493fd100 | 6,617 | get '/site_files/new_page' do
require_login
erb :'site_files/new_page'
end
# Redirect from original path
get '/site_files/new' do
require_login
redirect '/site_files/new_page'
end
post '/site_files/create' do
require_login
@errors = []
filename = params[:pagefilename] || params[:filename]
filename.gsub!(/[^a-zA-Z0-9_\-.]/, '')
redirect_uri = '/dashboard'
redirect_uri += "?dir=#{Rack::Utils.escape params[:dir]}" if params[:dir]
if filename.nil? || filename.strip.empty?
flash[:error] = 'You must provide a file name.'
redirect redirect_uri
end
name = "#{filename}"
name = "#{params[:dir]}/#{name}" if params[:dir]
name = current_site.scrubbed_path name
if current_site.file_exists?(name)
flash[:error] = %{Web page "#{Rack::Utils.escape_html name}" already exists! Choose another name.}
redirect redirect_uri
end
extname = File.extname name
unless extname.match /^\.#{Site::EDITABLE_FILE_EXT}/i
flash[:error] = "Must be an text editable file type (#{Site::VALID_EDITABLE_EXTENSIONS.join(', ')})."
redirect redirect_uri
end
site_file = current_site.site_files_dataset.where(path: name).first
if site_file
flash[:error] = 'File already exists, cannot create.'
redirect redirect_uri
end
if extname.match(/^\.html|^\.htm/i)
current_site.install_new_html_file name
else
file_path = current_site.files_path(name)
FileUtils.touch file_path
File.chmod 0640, file_path
site_file ||= SiteFile.new site_id: current_site.id, path: name
site_file.size = 0
site_file.set size: 0
site_file.set sha1_hash: Digest::SHA1.hexdigest('')
site_file.set updated_at: Time.now
site_file.save
end
escaped_name = Rack::Utils.escape_html name
flash[:success] = %{#{escaped_name} was created! <a style="color: #FFFFFF; text-decoration: underline" href="/site_files/text_editor/#{escaped_name}">Click here to edit it</a>.}
redirect redirect_uri
end
def file_upload_response(error=nil)
flash[:error] = error if error
if params[:from_button]
query_string = params[:dir] ? "?"+Rack::Utils.build_query(dir: params[:dir]) : ''
redirect "/dashboard#{query_string}"
else
halt 406, error if error
halt 200, 'File(s) successfully uploaded.'
end
end
def require_login_file_upload_ajax
file_upload_response 'You are not signed in!' unless signed_in?
file_upload_response 'Please contact support.' if banned?
end
post '/site_files/upload' do
if params[:filename]
require_login_file_upload_ajax
tempfile = Tempfile.new 'neocities_saving_file'
input = request.body.read
tempfile.set_encoding input.encoding
tempfile.write input
tempfile.close
params[:files] = [{filename: params[:filename], tempfile: tempfile}]
else
require_login
end
@errors = []
if params[:files].nil?
file_upload_response "Uploaded files were not seen by the server, cancelled. We don't know what's causing this yet. Please contact us so we can help fix it. Thanks!"
end
# For migration from original design.. some pages out there won't have the site_id param yet for a while.
site = params[:site_id].nil? ? current_site : Site[params[:site_id]]
unless site.owned_by?(current_site)
file_upload_response 'You do not have permission to save this file. Did you sign in as a different user?'
end
params[:files].each_with_index do |file,i|
dir_name = ''
dir_name = params[:dir] if params[:dir]
unless params[:file_paths].nil? || params[:file_paths].empty? || params[:file_paths].length == 0
file_path = params[:file_paths].select {|file_path|
file[:filename] == Pathname(file_path).basename.to_s
}.first
unless file_path.nil?
dir_name += '/' + Pathname(file_path).dirname.to_s
end
end
file[:filename] = "#{dir_name}/#{file[:filename]}"
if current_site.file_size_too_large? file[:tempfile].size
file_upload_response "#{file[:filename]} is too large, upload cancelled."
end
if !site.okay_to_upload? file
file_upload_response %{#{Rack::Utils.escape_html file[:filename]}: file type (or content in file) is only supported by <a href="/supporter">supporter accounts</a>. <a href="/site_files/allowed_types">Why We Do This</a>}
end
end
uploaded_size = params[:files].collect {|f| f[:tempfile].size}.inject{|sum,x| sum + x }
if site.file_size_too_large? uploaded_size
file_upload_response "File(s) do not fit in your available free space, upload cancelled."
end
if site.too_many_files? params[:files].length
file_upload_response "Your site has exceeded the maximum number of files, please delete some files first."
end
results = site.store_files params[:files]
file_upload_response
end
post '/site_files/delete' do
require_login
path = HTMLEntities.new.decode params[:filename]
current_site.delete_file path
flash[:success] = "Deleted #{params[:filename]}. Please note it can take up to 30 minutes for deleted files to stop being viewable on your site."
dirname = Pathname(path).dirname
dir_query = dirname.nil? || dirname.to_s == '.' ? '' : "?dir=#{Rack::Utils.escape dirname}"
redirect "/dashboard#{dir_query}"
end
get '/site_files/:username.zip' do |username|
require_login
zipfile_path = current_site.files_zip
content_type 'application/octet-stream'
attachment "neocities-#{current_site.username}.zip"
send_file zipfile_path
end
get %r{\/site_files\/download\/(.+)} do
require_login
dont_browser_cache
not_found if params[:captures].nil? || params[:captures].length != 1
filename = params[:captures].first
attachment filename
send_file current_site.current_files_path(filename)
end
get %r{\/site_files\/text_editor\/(.+)} do
require_login
dont_browser_cache
@filename = params[:captures].first
extname = File.extname @filename
@ace_mode = case extname
when /htm|html/ then 'html'
when /js/ then 'javascript'
when /md/ then 'markdown'
when /css/ then 'css'
else
nil
end
file_path = current_site.current_files_path @filename
if File.directory? file_path
flash[:error] = 'Cannot edit a directory.'
redirect '/dashboard'
end
if !File.exist?(file_path)
flash[:error] = 'We could not find the requested file.'
redirect '/dashboard'
end
@title = "Editing #{@filename}"
erb :'site_files/text_editor'
end
get '/site_files/allowed_types' do
erb :'site_files/allowed_types'
end
get '/site_files/hotlinking' do
erb :'site_files/hotlinking'
end
get '/site_files/mount_info' do
erb :'site_files/mount_info'
end
| 28.645022 | 225 | 0.708327 |
ab39faf8beaaa777aba1f7b17c5cada39386e329 | 38 | module PoliticalPollEntriesHelper
end
| 12.666667 | 33 | 0.921053 |
b92283986803e0a6b53e4dfa9d86635a44250df1 | 241 |
default["rubygems"]["gem_disable_default"] = false
default["rubygems"]["gem_sources"] = [ "https://rubygems.org" ]
default["rubygems"]["chef_gem_disable_default"] = false
default["rubygems"]["chef_gem_sources"] = [ "https://rubygems.org" ]
| 40.166667 | 68 | 0.713693 |
01c73b1da90fbb1a267859859df204851d8d4f9f | 1,928 | require 'spec_helper'
describe AaGlobalNotifications::PushNotification do
it "has valid factory" do
push_notification = build(:push_notification)
push_notification.should be_valid
end
describe "scheduling" do
before :each do
end
it "creating a push notification hits send_notification method and deliver" do
AaGlobalNotifications::PushNotification
.any_instance.should_receive(:deliver)
.and_call_original
AaGlobalNotifications::PushNotification.should_receive(:send_notification)
.and_call_original
push_notification = create(:push_notification)
end
it "failing notification sets state to failed" do
AaGlobalNotifications::PushNotification.stub(:response).and_return(false)
push_notification = create(:push_notification)
end
it "retrying a failed notification sets its status to pending" do
AaGlobalNotifications::PushNotification.stub(:response).and_return(false)
push_notification = create(:push_notification)
push_notification.mark_as_failed!
push_notification.failed?.should eq true
push_notification.retry!
push_notification.pending?.should eq true
end
end
describe "states" do
it "creating a push_notification sets it to pending" do
push_notification = create(:push_notification)
push_notification.pending?.should eq true
end
end
describe "validations" do
it "message between 1 and 50 characters is valid" do
push_notification = build(:push_notification, message: "this is a valid message")
push_notification.should be_valid
end
it "message greater than 50 characters is invalid" do
long_string = str = "this" * 50
push_notification = build(:push_notification, message: long_string)
push_notification.should_not be_valid
end
it "message less than 1 character is invalid" do
push_notification = build(:push_notification, message: "")
push_notification.should_not be_valid
end
end
end | 29.212121 | 84 | 0.776452 |
39f84eb2b8a87356c1094951633234668115bc86 | 1,000 | # encoding: UTF-8
require 'test_helper'
describe Vines::Stream::Client::Ready do
STANZAS = []
before do
@stream = MiniTest::Mock.new
@state = Vines::Stream::Client::Ready.new(@stream, nil)
def @state.to_stanza(node)
if node.name == 'bogus'
nil
else
stanza = MiniTest::Mock.new
stanza.expect(:process, nil)
stanza.expect(:validate_to, nil)
stanza.expect(:validate_from, nil)
STANZAS << stanza
stanza
end
end
end
after do
STANZAS.clear
end
it 'processes a valid node' do
node = node('<message/>')
@state.node(node)
assert_equal 1, STANZAS.size
assert STANZAS.map {|s| s.verify }.all?
end
it 'raises an unsupported-stanza-type stream error for invalid node' do
node = node('<bogus/>')
assert_raises(Vines::StreamErrors::UnsupportedStanzaType) { @state.node(node) }
assert STANZAS.empty?
end
private
def node(xml)
Nokogiri::XML(xml).root
end
end
| 20.833333 | 83 | 0.629 |
1a8a507dfe807d57b2f6dc96b7a95763e20fd90a | 7,685 | require 'listener_socket_context'
require 'extension/ionian_interface'
require 'ionian/extension/io'
require 'socket'
require 'timeout'
describe Ionian::Extension::IO do
include_context "ionian subject", Ionian::Extension::IO
include_examples "ionian interface"
it "can get and set the IO timeout" do
value = 5
subject.ionian_timeout = value
subject.ionian_timeout.should eq value
end
it "can get and set the regex match expression" do
value = /test/
subject.expression = value
subject.expression.should eq value
end
specify "read_match returns an empty array if timeout expires" do
subject.read_match(timeout: 0).should eq []
end
it "can purge Ionian's read buffer" do
client.puts 'test data'
subject.purge
subject.read_match(timeout: 0).should eq []
end
it "can read matched data" do
client.write "CS 1234 1\nCS 4567 0\n"
match = subject.read_match
match[0].cmd.should eq 'CS'
match[0].param.should eq '1234'
match[0].value.should eq '1'
match[1].cmd.should eq 'CS'
match[1].param.should eq '4567'
match[1].value.should eq '0'
end
it "attaches named captures as methods inside the block" do
client.write "CS 1234 1\n"
received_matches = false
subject.read_match do |match|
received_matches = true
match.cmd.should eq 'CS'
match.param.should eq '1234'
match.value.should eq '1'
end
received_matches.should eq true
end
it "can receive matched data on a listener" do
block_run = false
subject.on_match do |match, socket|
match.cmd.should eq 'CS'
match.param.should eq '7890'
match.value.should eq '1'
block_run = true
end
thread = subject.run_match
client.write "CS 7890 1\n"
Timeout.timeout(1) {
Thread.pass until block_run
thread.kill
}
block_run.should eq true
end
it "calls a listener block once for each match" do
block_run = false
match_count = 0
subject.on_match do |match, socket|
case match_count
when 0
match.cmd.should eq 'CS'
match.param.should eq '7890'
match.value.should eq '1'
when 1
match.cmd.should eq 'CS'
match.param.should eq '2345'
match.value.should eq '0'
block_run = true
end
match_count += 1
end
thread = subject.run_match
client.write "CS 7890 1\nCS 2345 0\n"
Timeout.timeout(1) {
Thread.pass until block_run
thread.kill
}
block_run.should eq true
end
it "can set the match expression in a #read_match kwarg" do
expression = /(?<param1>\w+)\s+(?<param2>\w+)\s*[\r\n]+/
data = "hello world\n"
subject
client.write data
match = subject.read_match expression: expression
match = match.first
match.param1.should eq 'hello'
match.param2.should eq 'world'
end
it "notifies listeners on #read_match" do
data = "CS 1234 65535\n"
match_triggered = false
subject.on_match { match_triggered = true }
client.write data
match = subject.read_match
match_triggered.should eq true
end
it "does not notify #read_match listeners if notify:false is set" do
data = "CS 1234 65535\n"
match_triggered = false
subject.on_match { match_triggered = true }
client.write data
match = subject.read_match notify: false
match.should_not be nil
match_triggered.should eq false
end
it "can read all of the data in the buffer" do
repeat = 8192 # 100 killobytes.
terminator = '0'
data = ''
repeat.times { data += '1111111111111111' }
data << terminator
subject
client.write data
result = ''
Timeout.timeout(1) do
result += subject.read_all until result.end_with? terminator
end
result.size.should eq data.size
result.should eq data
end
it "can match large data in the buffer" do
repeat = 8192 # 100 killobytes.
terminator = '0'
data = ''
repeat.times { data += '1111111111111111' }
data << terminator
subject.expression = /(?<data>1+)(?<term>0)/
client.write data
match = []
Timeout.timeout(5) { match = subject.read_match }
match.empty?.should eq false
match.first.data.should eq data.chop
match.first.term.should eq terminator
end
it "can match data that arrives in fragments" do
repeat = 3
terminator = '0'
data = '11111111'
subject.expression = /(?<data>1+)(?<term>0)/
result = nil
found_match = false
# Match handler.
subject.on_match do |match|
result = match
found_match = true
end
# Start looking for matches.
thread = subject.run_match
begin
Timeout.timeout 10 do
client.no_delay = true
# Feed data into the socket.
repeat.times do
client.write data
client.flush
end
client.write terminator
client.flush
Thread.pass until found_match
end
ensure
thread.kill # Make sure the run_match thread dies.
end
found_match.should eq true
# Replicate the data that should have been received.
expected_data = ''
repeat.times { expected_data += data }
result.data.should eq expected_data
result.term.should eq terminator
end
describe "read_all" do
let(:data) { "CS 1234 65535\n" }
it "has blocking mode" do
result = nil
# Block if no data.
begin
Timeout.timeout(1) { result = subject.read_all }
rescue Timeout::Error
end
result.should eq nil
client.write data
# Receive avalable data.
Timeout.timeout(1) { result = subject.read_all }
result.should eq data
end
it "has nonblocking mode" do
result = 'error' # Junk data. Nonblocking should return nil if no data.
# Block if no data.
begin
Timeout.timeout(1) { result = subject.read_all nonblocking: true }
rescue Timeout::Error
end
result.should eq nil
client.write data
# Receive avalable data.
Timeout.timeout(1) { result = subject.read_all nonblocking: true }
result.should eq data
end
end
describe "on_error" do
it "exception is accessible in block" do
block_run = false
subject.on_error do |error, socket|
error.should be_a EOFError
block_run = true
end
thread = subject.run_match
client.close
Timeout.timeout(1) {
Thread.pass until block_run
thread.kill
}
block_run.should eq true
end
end
describe "deprecated method" do
shared_examples "is deprecated" do
specify do
STDOUT.should_receive(:puts) { |str| str.downcase.should include "deprecated" }
subject.should_receive(forwarded_method)
subject.__send__ deprecated_method
end
end
describe "register_observer" do
let(:deprecated_method) { :register_observer }
let(:forwarded_method) { :register_match_handler }
include_examples "is deprecated"
end
describe "unregister_observer" do
let(:deprecated_method) { :unregister_observer }
let(:forwarded_method) { :unregister_match_handler }
include_examples "is deprecated"
end
end
end | 22.804154 | 87 | 0.613273 |
018e1fc6c4e75b762d1ca7a751ad574dd25e51ae | 880 | Pod::Spec.new do |s|
s.name = "JIMEnvironments"
s.version = "1.0.1"
s.summary = "A simple solution to handle environment specific settings."
s.description = <<-DESC
A simple solution to handle environment specific settings.
It uses the current Build Configuration (Debug, Release...) name to retrieve settings from a Plist file.
DESC
s.homepage = "https://github.com/nebiros/JIMEnvironments"
s.license = {:type => "BSD", :file => "LICENSE"}
s.author = {"Juan Felipe Alvarez Saldarriaga" => "[email protected]"}
s.social_media_url = "http://twitter.com/nebiros"
s.platform = :ios
s.source = {:git => "https://github.com/nebiros/JIMEnvironments.git", :tag => s.version.to_s}
s.source_files = "JIMEnvironments/*.{h,m}"
s.requires_arc = true
end
| 44 | 124 | 0.6125 |
873b8344599f7f62fb16775a558f9e886f104999 | 2,359 | remote_file "start_defense_server" do
source "https://raw.githubusercontent.com/clampz/fuzzy_challenge/master/src/start_defense_server"
path "/usr/bin/start_defense_server"
mode "0765"
not_if "test -e /tmp/test-file"
end
remote_file "equal" do
source "https://raw.githubusercontent.com/clampz/fuzzy_challenge/master/src/equal.py"
path "/usr/bin/equal"
mode "0765"
not_if "test -e /tmp/test-file"
end
remote_file "doubles_s_and_r" do
source "https://raw.githubusercontent.com/clampz/fuzzy_challenge/master/src/doubles_s_and_r"
path "/usr/bin/doubles_s_and_r"
mode "0765"
not_if "test -e /tmp/test-file"
end
remote_file "fuzzing_rules" do
source "https://raw.githubusercontent.com/clampz/fuzzy_challenge/master/src/cat_motd"
path "/usr/bin/fuzzing_rules"
mode "0765"
not_if "test -e /tmp/test-file"
end
remote_file "defending_server_reboot" do
source "https://raw.githubusercontent.com/clampz/fuzzy_challenge/master/src/nc_server_reboot"
path "/usr/bin/defending_server_reboot"
mode "0765"
not_if "test -e /tmp/test-file"
end
remote_file "blacklist_replace" do
source "https://raw.githubusercontent.com/clampz/fuzzy_challenge/master/src/blacklist_replace.py"
path "/usr/bin/blacklist_replace"
mode "0765"
not_if "test -e /tmp/test-file"
end
remote_file "update_calc" do
source "https://raw.githubusercontent.com/clampz/fuzzy_challenge/master/src/update_calc"
path "/usr/bin/update_calc"
mode "0765"
not_if "test -e /tmp/test-file"
end
remote_file "defender_motd" do
source "http://ada.evergreen.edu/~weidav02/defender_motd.txt"
path "/etc/motd.tail"
not_if "test -e /tmp/test-file"
end
remote_file "get_attacker_input" do
source "https://raw.githubusercontent.com/clampz/fuzzy_challenge/master/src/get_attacker_input"
path "/usr/bin/get_attacker_input"
mode "0765"
not_if "test -e /tmp/test-file"
end
remote_file "submit_calc" do
source "https://raw.githubusercontent.com/clampz/fuzzy_challenge/master/src/submit_calc"
path "/usr/bin/submit_calc"
mode "0765"
not_if "test -e /tmp/test-file"
end
script "start_defending_env" do
interpreter "bash"
cwd "/etc/update-motd.d"
code <<-EOH
echo "start_defense_server" >> /usr/bin/defending_server_reboot
echo "" > /etc/legal
rm 10* 50* 51* 90* 91* 98*
touch /tmp/test-file
EOH
not_if "test -e /tmp/test-file"
end | 29.4875 | 99 | 0.752437 |
08422904d50413b7d141ac6bab587d6dbef1254a | 475 | $LOAD_PATH << File.dirname(__FILE__) + "/../lib"
raise "jruby required" unless defined?(RUBY_ENGINE) && RUBY_ENGINE == 'jruby'
require 'magic'
require 'java'
import 'javax.swing.JFrame'
import 'javax.swing.JButton'
frame = Magic.build do
JFrame do
title 'Hello!'
size 400,500
JButton('Press me') do |b|
b.addActionListener do
b.setText 'Pressed!'
end
end
end
end
frame.set_default_close_operation(JFrame::EXIT_ON_CLOSE)
frame.show
| 19.791667 | 77 | 0.686316 |
619d68f564bd66877197db7ca76011f4bf30fabf | 953 | class Andi < Formula
desc "Estimate evolutionary distance between similar genomes"
homepage "https://github.com/EvolBioInf/andi"
# tag "bioinformatics"
# doi "10.1093/bioinformatics/btu815"
url "https://github.com/EvolBioInf/andi/releases/download/v0.11/andi-0.11.tar.gz"
sha256 "dba29ced86bb8160b6755eb69e30f6faff7b0e6588b8239faeb67437264d5451"
bottle do
cellar :any
sha256 "c425a345402fa748a7b0db68ee7e64b2d29a5ab3eb9d255efeefb0bc19a4ece3" => :sierra
sha256 "1948482416f7f91ece5e1e67a99ae2a278340a659d7068f7985a0817b5372634" => :el_capitan
sha256 "146f7bd5d895fced30bf03c0ea3bd7674d2fcb5ac9aff11e0b0a2bd55965f65d" => :yosemite
end
depends_on "gsl"
def install
system "./configure",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--without-libdivsufsort"
system "make", "install"
end
test do
system "#{bin}/andi", "--version"
end
end
| 29.78125 | 92 | 0.737671 |
e9df8ed13b981af6de3baf2863bd79b8ed08326d | 777 | #--
# Ruby Whois
#
# An intelligent pure Ruby WHOIS client and parser.
#
# Copyright (c) 2009-2018 Simone Carletti <[email protected]>
#++
require_relative 'base_afilias'
module Whois
class Parsers
# Parser for the whois.aero server.
#
# @see Whois::Parsers::Example
# The Example parser for the list of all available methods.
#
class WhoisAero < BaseAfilias
self.scanner = Scanners::BaseAfilias, {
pattern_reserved: /^This domain name has been reserved by SITA/,
}
property_supported :status do
if reserved?
:reserved
else
Array(node("Domain Status"))
end
end
# NEWPROPERTY
def reserved?
!!node("status:reserved")
end
end
end
end
| 17.659091 | 72 | 0.6139 |
33ab11bc6f81e6dd4ad2d6b2f26a61ac050458e1 | 46 | module RandomLocation
VERSION = "0.1.0"
end
| 11.5 | 21 | 0.717391 |
6a2e419f9b759c315e284a5589e22db26e9f573b | 121 | class AddTaglineToSpaces < ActiveRecord::Migration[6.0]
def change
add_column :spaces, :tagline, :string
end
end
| 20.166667 | 55 | 0.743802 |
335fc45d85a04ec78baa964f1a7ad26145195429 | 301 | namespace :knifeswitch do
desc 'Generate the migrations necessary to use Knifeswitch'
task :create_migrations do
sh 'rails g migration CreateKnifeswitchCounters ' \
'name:string:uniq counter:integer closetime:datetime'
puts "Done. Don't forget to run `rake db:migrate`."
end
end
| 30.1 | 61 | 0.744186 |
1c71a0a432ad740a68f5f32c76b8d7fc5c6aa808 | 425 | # frozen_string_literal: true
module Clusters
module Agents
class ProjectAuthorization < ApplicationRecord
self.table_name = 'agent_project_authorizations'
belongs_to :agent, class_name: 'Clusters::Agent', optional: false
belongs_to :project, class_name: '::Project', optional: false
validates :config, json_schema: { filename: 'cluster_agent_authorization_configuration' }
end
end
end
| 28.333333 | 95 | 0.745882 |
4a9b755cb3b0a62bc5dd5116e1682824efbe249d | 1,678 | require 'fileutils'
require 'yaml'
module Puppet::Parser::Functions
newfunction(:cve20113872_store_progress, :doc => <<-'ENDHEREDOC') do |args|
This function is used to write state information to persistent storage for
an individual node working its way through the remediation process.
This function expects the node cert name as the first argument and the step
it has reached as the second argument, and finally a message to write to
the state file which will be shown in the detailed progress report.
cve20113872_store_progress($agent_certname, "step2", "OK")
ENDHEREDOC
if args.length < 3 then
raise Puppet::ParseError, ("cve20113872_store_progress(): wrong number of arguments (#{args.length}; must >= 3)")
end
(agent_certname, step, message) = args
# Write the state information to Puppet[:yamldir]/cve20113872
folder = File.join(Puppet[:yamldir], "cve20113872")
FileUtils.mkdir_p folder unless File.directory? folder
progress_file = File.join(folder, "progress_#{agent_certname.downcase}.yaml")
# Try and grab the issuer of the agent certificate from facter...
agent_issuer = lookupvar('agent_cert_on_disk_issuer') || 'unknown'
state = {
agent_certname => {
'agent_certname' => agent_certname,
'step' => step.to_i,
'message' => message,
'timestamp' => Time.now,
'issuer' => agent_issuer,
}
}
# Write the file.
File.open(progress_file, "w+", 0644) do |io|
io.puts state.to_yaml
end
# Return the state hash if this function is ever converted to an rvalue
state
end
end
| 34.244898 | 119 | 0.675209 |
21e0e54c0d25c7b5ef1aa897df51a233c82f062e | 169 | class CreateSpaceAgencies < ActiveRecord::Migration[7.0]
def change
create_table :space_agencies do |t|
t.string :name
t.timestamps
end
end
end
| 16.9 | 56 | 0.686391 |
f7bc025bf923c1412b60a455f6dfbc046e456bc6 | 3,427 | Rails.application.routes.draw do
# resources :annotation_files
post 'project/:user_name/:repo_name/items', to: 'items#create', as: 'create_item'
get 'project/:user_name/:repo_name/items/new', to: 'items#new', as: 'new_item'
get 'project/:user_name/:repo_name/items/:slug/edit', to: 'items#edit', as: 'edit_item'
get 'project/:user_name/:repo_name/items/:slug', to: 'items#show', as: 'item'
patch 'project/:user_name/:repo_name/items/:slug', to: 'items#update', as: 'update_item'
delete 'project/:user_name/:repo_name/items/:slug', to: 'items#destroy', as: 'destroy_item'
post 'project/:user_name/:repo_name/items/:slug/files', to: 'items#add_annotation_file', as: 'add_annotation_file'
post 'project/:user_name/:repo_name/items/:slug/process', to: 'items#process_annotation_file', as: 'process_annotation_file'
get 'project/:user_name/:repo_name/items/:slug/files/:layer/destroy', to: 'items#delete_annotation_layer', as: 'delete_annotation_layer', constraints: { layer: /[^\/]+/ }
get 'project/:user_name/:repo_name/items/:slug/files/:file/download', to: 'items#download_annotation_file', as: 'download_annotation_file', constraints: { file: /[^\/]+/ }
get 'project/:user_name/:repo_name/items/:slug/files/:file/configure', to: 'items#configure_annotation_file', as: 'configure_annotation_file', constraints: { file: /[^\/]+/ }
get 'project/:user_name/:repo_name/items/:slug/files/:file/delete', to: 'items#delete_annotation_file', as: 'delete_annotation_file', constraints: { file: /[^\/]+/ }
get 'project/:user_name/:repo_name/items/import/new', to: 'items#new_import', as: 'new_import_item'
post 'project/:user_name/:repo_name/items/import', to: 'items#import_manifest', as: 'import_manifest'
get 'project/:user_name/:repo_name/items/:slug/external/review/:at_id', to: 'items#review_external_annotations', as: 'review_external_annotations', :constraints => { :at_id => /.*/ }
post 'project/:user_name/:repo_name/items/:slug/external/import/:at_id', to: 'items#import_external_annotations', as: 'import_external_annotations', :constraints => { :at_id => /.*/ }
post 'project/:user_name/:repo_name/pages', to: 'pages#create', as: 'create_page'
get 'project/:user_name/:repo_name/pages/new', to: 'pages#new', as: 'new_page'
get 'project/:user_name/:repo_name/pages/:slug/edit', to: 'pages#edit', as: 'edit_page'
get 'project/:user_name/:repo_name/pages/:slug', to: 'pages#show', as: 'page'
patch 'project/:user_name/:repo_name/pages/:slug', to: 'pages#update', as: 'update_page'
delete 'project/:user_name/:repo_name/pages/:slug', to: 'pages#destroy', as: 'destroy_page'
get 'project/:user_name/:repo_name/pages/:slug/move_up', to: 'pages#move_up', as: 'move_up_page'
get 'project/:user_name/:repo_name/pages/:slug/move_down', to: 'pages#move_down', as: 'move_down_page'
root to: 'project#all'
get 'project/all', as: 'all_projects'
get 'project/mine', as: 'my_projects'
get 'project/:user_name/:repo_name', to: 'project#show', as: 'project'
get 'project/new', to: 'project#new', as: 'new_project'
post 'project', to: 'project#create', as: 'create_project'
get 'project/:user_name/:repo_name/status', to: 'project#build_status', as: 'project_build_status'
get 'user/login'
get 'user/logout'
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
get '/auth/github/callback', to: 'user#login'
end
| 72.914894 | 186 | 0.720163 |
e86bd2f6b84678ced6e881cb92dbffd875e3e5bc | 675 | module Spree
class BillingIntegration::ConektaGateway::Card < Gateway
preference :auth_token, :string
preference :public_auth_token, :string
preference :source_method, :string, default: 'card'
unless Rails::VERSION::MAJOR == 4
attr_accessible :preferred_auth_token, :preferred_public_auth_token, :preferred_source_method, :gateway_response
end
def provider_class
Spree::Openpay::Provider
end
def payment_source_class
CreditCard
end
def card?
true
end
def auto_capture?
true
end
def with_installments?
false
end
def method_type
'conekta_card'
end
end
end
| 18.75 | 118 | 0.682963 |
1a935e625864e0b3d65f827053d1c56adf689cca | 2,625 | # Space Form model, used for space creation.
class SpaceForm
include ActiveModel::Model
attr_accessor(
:name,
:description,
:host_lead_dxuser,
:guest_lead_dxuser,
:space_type,
:cts,
:sponsor_org_handle,
:sponsor_lead_dxuser,
:source_space_id,
:restrict_to_template,
)
TYPE_GROUPS = "groups".freeze
TYPE_REVIEW = "review".freeze
TYPE_VERIFICATION = "verification".freeze
validates :name, :description, :space_type, presence: true
validate :validate_host_lead_dxuser
validate :validate_leads_orgs, if: -> { space_type == TYPE_REVIEW }
validate :validate_guest_lead_dxuser, if: -> { space_type.in?([TYPE_GROUPS, TYPE_VERIFICATION]) }
validate :validate_sponsor_lead_dxuser, if: -> { space_type == TYPE_REVIEW }
class << self
def model_name
Space.model_name
end
end
def persist!(api, user)
SpaceService::Create.call(self, api: api, user: user)
end
def space_sponsor
User.find_by(dxuser: sponsor_lead_dxuser)
end
private
# A host lead user validation
def validate_host_lead_dxuser
errors.add(:host_lead_dxuser, "'#{host_lead_dxuser}' not found") unless host_admin
end
# A guest lead user validation
def validate_guest_lead_dxuser
if guest_lead_dxuser == host_lead_dxuser
errors.add(:guest_lead_dxuser, "can't be the same as Host lead")
end
return unless guest_lead_in_groups || guest_lead_in_verification
errors.add(:guest_lead_dxuser, "'#{guest_lead_dxuser}' not found")
end
# Check guest lead in space of "groups" type
def guest_lead_in_groups
space_type == "groups" && !(guest_lead_dxuser.present? && guest_admin)
end
# Check guest lead in space of "verification" type
def guest_lead_in_verification
space_type == "verification" && guest_lead_dxuser.present? && guest_admin.nil?
end
# A sponsor lead user validation
def validate_sponsor_lead_dxuser
if sponsor_lead_dxuser == host_lead_dxuser
errors.add(:sponsor_lead_dxuser, "can't be the same as Reviewer lead")
end
errors.add(:sponsor_lead_dxuser, "'#{sponsor_lead_dxuser}' not found") unless space_sponsor
end
# Validation of host admin and space sponsor orgs:
# both admins should not be in the same Org.
def validate_leads_orgs
return unless space_sponsor && host_admin
return unless space_sponsor.org_id == host_admin.org_id
errors.add(:sponsor_lead_dxuser, "can't belong to the same Org as Reviewer lead")
end
def host_admin
User.find_by(dxuser: host_lead_dxuser)
end
def guest_admin
User.find_by(dxuser: guest_lead_dxuser)
end
end
| 27.061856 | 99 | 0.729524 |
081f5663870cc155083b731d75550e83c8368665 | 306 | module SlackRubyBot
module Commands
class Unknown < Base
match(/^(?<bot>\S*)[\s]*(?<expression>.*)$/)
def self.call(client, data, _match)
client.say(channel: data.channel, text: "Sorry <@#{data.user}>, I don't understand that command!", gif: 'idiot')
end
end
end
end
| 25.5 | 120 | 0.604575 |
b9492aae6d3498940f80789581aa8280d54b77e5 | 593 | Pod::Spec.new do |s|
s.name = 'BrightFutures'
s.version = '1.0.0-beta.5'
s.license = 'MIT'
s.summary = 'A simple Futures & Promises library for iOS and OS X written in Swift'
s.homepage = 'https://github.com/Thomvis/BrightFutures'
s.social_media_url = 'https://twitter.com/thomvis88'
s.authors = { 'Thomas Visser' => '[email protected]' }
s.source = { :git => 'https://github.com/Thomvis/BrightFutures.git', :tag => s.version }
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.10'
s.source_files = 'BrightFutures/*.swift'
s.requires_arc = true
end | 34.882353 | 90 | 0.677909 |
267a830d03e7ea43ab35cd7f3b77722a9f9bc3de | 664 | # frozen_string_literal: true
require 'active_support/core_ext/object/blank'
require 'active_support/core_ext/hash/reverse_merge'
require 'active_support/core_ext/hash/slice'
require 'active_support/core_ext/numeric/time'
require 'active_support/core_ext/string/inflections'
require 'active_support/core_ext/string/strip'
require 'active_support/core_ext/hash/keys'
require 'active_support/core_ext/array/conversions'
require 'colorized_string'
require 'krane/version'
require 'krane/errors'
require 'krane/formatted_logger'
require 'krane/statsd'
require 'krane/task_config'
require 'krane/task_config_validator'
module Krane
MIN_KUBE_VERSION = '1.15.0'
end
| 28.869565 | 52 | 0.832831 |
620b9659694ae6902b378e804431fd31b031f560 | 430 | cask "trilium-notes" do
version "0.45.5"
sha256 "14122579ac299a2ff696fcb879d3d1c007559308ce2934f631f4ebaf8940af58"
url "https://github.com/zadam/trilium/releases/download/v#{version}/trilium-mac-x64-#{version}.zip"
appcast "https://github.com/zadam/trilium/releases.atom"
name "Trilium Notes"
desc "Personal knowledge base"
homepage "https://github.com/zadam/trilium"
app "trilium-mac-x64/Trilium Notes.app"
end
| 33.076923 | 101 | 0.765116 |
614788c832f51e0f2a20aa8642656b34ec508328 | 837 | # coding: utf-8
Gem::Specification.new do |spec|
spec.name = "jekyll-whiteglass"
spec.version = "1.9.1"
spec.authors = ["Chayoung You"]
spec.email = ["[email protected]"]
spec.summary = %q{Minimal, responsive Jekyll theme for hackers.}
spec.homepage = "https://github.com/yous/whiteglass"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").select do |f|
f.match(%r{^(assets|_layouts|_includes|_sass|LICENSE|README|CHANGELOG)}i)
end
spec.add_runtime_dependency "jekyll", ">= 3.3"
spec.add_runtime_dependency "jekyll-archives", "~> 2.1"
spec.add_runtime_dependency "jekyll-paginate", "~> 1.1"
spec.add_runtime_dependency "jekyll-sitemap", "~> 1.0"
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake"
end
| 33.48 | 77 | 0.659498 |
e2c64100451acb54a8f6e39033d5925672f8177e | 2,277 | FactoryBot.define do
factory :member_node_login, class: Member::Node::Login, traits: [:cms_node] do
cur_site { cms_site }
route "member/login"
filename { SS.config.oauth.prefix_path.sub(/^\//, '') || "auth" }
twitter_oauth "enabled"
twitter_client_id { unique_id }
twitter_client_secret { unique_id }
facebook_oauth "enabled"
facebook_client_id { unique_id }
facebook_client_secret { unique_id }
end
factory :member_node_mypage, class: Member::Node::Mypage, traits: [:cms_node] do
route "member/mypage"
end
factory :member_node_my_profile, class: Member::Node::MyProfile, traits: [:cms_node] do
route "member/my_profile"
end
factory :member_node_my_blog, class: Member::Node::MyBlog, traits: [:cms_node] do
route "member/my_blog"
end
factory :member_node_my_photo, class: Member::Node::MyPhoto, traits: [:cms_node] do
route "member/my_photo"
end
factory :member_node_blog, class: Member::Node::Blog, traits: [:cms_node] do
route "member/blog"
end
factory :member_node_blog_page, class: Member::Node::BlogPage, traits: [:cms_node] do
route "member/blog_page"
end
factory :member_node_photo, class: Member::Node::Photo, traits: [:cms_node] do
route "member/photo"
end
factory :member_node_photo_search, class: Member::Node::PhotoSearch, traits: [:cms_node] do
route "member/photo_search"
end
factory :member_node_photo_spot, class: Member::Node::PhotoSpot, traits: [:cms_node] do
route "member/photo_spot"
end
factory :member_node_photo_category, class: Member::Node::PhotoCategory, traits: [:cms_node] do
route "member/photo_category"
end
factory :member_node_photo_location, class: Member::Node::PhotoLocation, traits: [:cms_node] do
route "member/photo_location"
end
factory :member_node_registration, class: Member::Node::Registration, traits: [:cms_node] do
route "member/registration"
end
factory :member_node_my_anpi_post, class: Member::Node::MyAnpiPost, traits: [:cms_node] do
route "member/my_anpi_post"
end
factory :member_node_my_group, class: Member::Node::MyGroup, traits: [:cms_node] do
route "member/my_group"
sender_name { unique_id }
sender_email { "#{sender_name}@example.jp" }
end
end
| 31.625 | 97 | 0.720246 |
2164abbca3fffc09ceab0a95a276c46d680eda40 | 7,762 | # encoding: utf-8
require "logstash/outputs/base"
require "logstash/namespace"
require "socket"
# This output allows you to pull metrics from your logs and ship them to
# Graphite. Graphite is an open source tool for storing and graphing metrics.
#
# An example use case: Some applications emit aggregated stats in the logs
# every 10 seconds. Using the grok filter and this output, it is possible to
# capture the metric values from the logs and emit them to Graphite.
class LogStash::Outputs::Graphite < LogStash::Outputs::Base
config_name "graphite"
EXCLUDE_ALWAYS = [ "@timestamp", "@version" ]
DEFAULT_METRICS_FORMAT = "*"
METRIC_PLACEHOLDER = "*"
# The hostname or IP address of the Graphite server.
config :host, :validate => :string, :default => "localhost"
# The port to connect to on the Graphite server.
config :port, :validate => :number, :default => 2003
# Interval between reconnect attempts to Carbon.
config :reconnect_interval, :validate => :number, :default => 2
# Should metrics be resent on failure?
config :resend_on_failure, :validate => :boolean, :default => false
# The metric(s) to use. This supports dynamic strings like %{host}
# for metric names and also for values. This is a hash field with key
# being the metric name, value being the metric value. Example:
# [source,ruby]
# metrics => { "%{host}/uptime" => "%{uptime_1m}" }
#
# The value will be coerced to a floating point value. Values which cannot be
# coerced will be set to zero (0). You may use either `metrics` or `fields_are_metrics`,
# but not both.
config :metrics, :validate => :hash, :default => {}
# An array indicating that these event fields should be treated as metrics
# and will be sent verbatim to Graphite. You may use either `fields_are_metrics`
# or `metrics`, but not both.
config :fields_are_metrics, :validate => :boolean, :default => false
# Include only regex matched metric names.
config :include_metrics, :validate => :array, :default => [ ".*" ]
# Exclude regex matched metric names, by default exclude unresolved %{field} strings.
config :exclude_metrics, :validate => :array, :default => [ "%\{[^}]+\}" ]
# Use this field for the timestamp instead of '@timestamp' which is the
# default. Useful when backfilling or just getting more accurate data into
# graphite since you probably have a cache layer infront of Logstash.
config :timestamp_field, :validate => :string, :default => '@timestamp'
# Defines the format of the metric string. The placeholder '*' will be
# replaced with the name of the actual metric.
# [source,ruby]
# metrics_format => "foo.bar.*.sum"
#
# NOTE: If no metrics_format is defined, the name of the metric will be used as fallback.
config :metrics_format, :validate => :string, :default => DEFAULT_METRICS_FORMAT
# When hashes are passed in as values they are broken out into a dotted notation
# For instance if you configure this plugin with
# # [source,ruby]
# metrics => "mymetrics"
#
# and "mymetrics" is a nested hash of '{a => 1, b => { c => 2 }}'
# this plugin will generate two metrics: a => 1, and b.c => 2 .
# If you've specified a 'metrics_format' it will respect that,
# but you still may want control over the separator within these nested key names.
# This config setting changes the separator from the '.' default.
config :nested_object_separator, :validate => :string, :default => "."
def register
@include_metrics.collect!{|regexp| Regexp.new(regexp)}
@exclude_metrics.collect!{|regexp| Regexp.new(regexp)}
if @metrics_format && !@metrics_format.include?(METRIC_PLACEHOLDER)
@logger.warn("metrics_format does not include placeholder #{METRIC_PLACEHOLDER} .. falling back to default format: #{DEFAULT_METRICS_FORMAT.inspect}")
@metrics_format = DEFAULT_METRICS_FORMAT
end
connect
end
def connect
# TODO(sissel): Test error cases. Catch exceptions. Find fortune and glory. Retire to yak farm.
begin
@socket = TCPSocket.new(@host, @port)
rescue Errno::ECONNREFUSED => e
@logger.warn("Connection refused to graphite server, sleeping...", :host => @host, :port => @port)
sleep(@reconnect_interval)
retry
end
end
def construct_metric_name(event, metric)
if @metrics_format
sprinted = event.sprintf(@metrics_format)
return sprinted.gsub(METRIC_PLACEHOLDER, metric)
end
metric
end
def receive(event)
# Graphite message format: metric value timestamp\n
# compact to remove nil messages which produces useless \n
messages = (
@fields_are_metrics \
? messages_from_event_fields(event, @include_metrics, @exclude_metrics)
: messages_from_event_metrics(event, @metrics)
).compact
if messages.empty?
@logger.debug? && @logger.debug("Message is empty, not sending anything to Graphite", :messages => messages, :host => @host, :port => @port)
else
message = messages.join("\n")
@logger.debug? && @logger.debug("Sending carbon messages", :messages => messages, :host => @host, :port => @port)
# Catch exceptions like ECONNRESET and friends, reconnect on failure.
# TODO(sissel): Test error cases. Catch exceptions. Find fortune and glory.
begin
@socket.puts(message)
rescue Errno::EPIPE, Errno::ECONNRESET, IOError => e
@logger.warn("Connection to graphite server died", :exception => e, :host => @host, :port => @port)
sleep(@reconnect_interval)
connect
retry if @resend_on_failure
end
end
end
private
def messages_from_event_fields(event, include_metrics, exclude_metrics)
@logger.debug? && @logger.debug("got metrics event", :metrics => event.to_hash)
timestamp = event_timestamp(event)
event.to_hash.flat_map do |metric,value|
next if EXCLUDE_ALWAYS.include?(metric)
next unless include_metrics.empty? || include_metrics.any? { |regexp| metric.match(regexp) }
next if exclude_metrics.any? {|regexp| metric.match(regexp)}
metrics_lines_for_event(event, metric, value, timestamp)
end
end
def messages_from_event_metrics(event, metrics)
timestamp = event_timestamp(event)
metrics.flat_map do |metric, value|
@logger.debug? && @logger.debug("processing", :metric => metric, :value => value)
metric = event.sprintf(metric)
next unless @include_metrics.any? {|regexp| metric.match(regexp)}
next if @exclude_metrics.any? {|regexp| metric.match(regexp)}
metrics_lines_for_event(event, metric, value, timestamp)
end
end
def event_timestamp(event)
event.get(@timestamp_field).to_i
end
def metrics_lines_for_event(event, metric, value, timestamp)
if event.get(metric).is_a?(Hash)
dotify(event.get(metric), metric).map do |k, v|
metrics_line(event, k, v, timestamp)
end
else
metrics_line(event, event.sprintf(metric), event.sprintf(value).to_f, timestamp)
end
end
def metrics_line(event, name, value, timestamp)
"#{construct_metric_name(event, name)} #{value} #{timestamp}"
end
# Take a nested ruby hash of the form {:a => {:b => 2}, c: => 3} and
# turn it into a hash of the form
# { "a.b" => 2, "c" => 3}
def dotify(hash, prefix = nil)
hash.reduce({}) do |acc, kv|
k, v = kv
pk = prefix ? "#{prefix}#{@nested_object_separator}#{k}" : k.to_s
if v.is_a?(Hash)
acc.merge!(dotify(v, pk))
elsif v.is_a?(Array)
# There's no right answer here, so we do nothing
@logger.warn("Array values not supported for graphite metrics! Ignoring #{hash} @ #{prefix}")
else
acc[pk] = v
end
acc
end
end
end
| 37.679612 | 156 | 0.682814 |
386c352366ae2fe8f72b2d209f48829bc761f9a5 | 1,274 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/pubsub_v1/service.rb'
require 'google/apis/pubsub_v1/classes.rb'
require 'google/apis/pubsub_v1/representations.rb'
module Google
module Apis
# Google Cloud Pub/Sub API
#
# Provides reliable, many-to-many, asynchronous messaging between applications.
#
# @see https://cloud.google.com/pubsub/docs
module PubsubV1
VERSION = 'V1'
REVISION = '20180319'
# View and manage your data across Google Cloud Platform services
AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform'
# View and manage Pub/Sub topics and subscriptions
AUTH_PUBSUB = 'https://www.googleapis.com/auth/pubsub'
end
end
end
| 33.526316 | 83 | 0.732339 |
1c275f35d472dfde45137afd3bf453a8ef0aee3a | 1,090 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module PowerOfPeople
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Don't generate system test files.
config.generators.system_tests = nil
end
end
| 32.058824 | 82 | 0.780734 |
18eb02634d0a77a8805e2cbaab3456f9de38143d | 6,144 | #
# Be sure to run `pod spec lint DictUtils.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see https://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |spec|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
spec.name = "DictUtils"
spec.version = "0.0.1"
spec.summary = "Adds bunch of extensions and operators that would make work with Dicitonaries easier."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
spec.description = <<-DESC
DictUtils
====
Adds bunch of extensions and operators that would make work with Dicitonaries easier. If you are working with JSON and don't want to use dependencies like SwiftyJSON and keep everything native, you can peacefully grab that little helper.
Requirements
----
* Xcode 10+
* Swift 4.0+
* iOS 8.0+
* tvOS 9.0+
* macOS 10.10+
* watchOS 2.0+
Install
----
#### Manually
* Copy the `DictUtils.swift` file into your codebase.
#### Cocoapods
```
pod 'DictUtils'
```
Extensions
-----
* `+` and `+=` operators
* Merges two dictionaries without hassle.
* Typed, chainable subscripts
* Get `int`, `string`, `bool`, `double`, `[String: Any]` and their array types.
``` swift
let index = dataSource[int: "index"]
let name = dataSource[string: "name"]
let zipCode = dataSource[dict: "users"]?[0][dict: "address"]?[string: "zip"]
```
* Initialize a dictionary from a JSON string.
```
let dict = [String: Any](json: response.string)
```
* Get pretty printed string with `dataSource.prettify()`.
DESC
spec.homepage = "https://github.com/cemolcay/DictUtils"
# spec.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See https://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
spec.license = "MIT"
# spec.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
spec.author = { "Cem Olcay" => "[email protected]" }
# Or just: spec.author = "Cem Olcay"
# spec.social_media_url = "https://twitter.com/cem_olcay"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# spec.platform = :ios
# spec.platform = :ios, "5.0"
# When using multiple platforms
spec.ios.deployment_target = "8.0"
spec.osx.deployment_target = "10.10"
spec.watchos.deployment_target = "2.0"
spec.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
spec.source = { :git => "https://github.com/cemolcay/DictUtils.git", :tag => "#{spec.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
spec.source_files = "DictUtils/Source/DictUtils.swift"
# spec.exclude_files = "Classes/Exclude"
# spec.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# spec.resource = "icon.png"
# spec.resources = "Resources/*.png"
# spec.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# spec.framework = "SomeFramework"
# spec.frameworks = "SomeFramework", "AnotherFramework"
# spec.library = "iconv"
# spec.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
spec.requires_arc = true
# spec.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# spec.dependency "JSONKit", "~> 1.4"
end
| 32.855615 | 237 | 0.618164 |
d51cbfb023ba38a8937e755f8da7dc45fa32683b | 2,243 | require_relative '../test_helper'
# Unit Test for SemanticLogger::Appender::Tcp
module Appender
class TcpTest < Minitest::Test
describe SemanticLogger::Appender::Tcp do
before do
Net::TCPClient.stub_any_instance(:connect, true) do
@appender = SemanticLogger::Appender::Tcp.new(server: 'localhost:8088')
end
@appender.tcp_client.instance_eval do
def retry_on_connection_failure
yield
end
end
@message = 'AppenderTcpTest log message'
end
SemanticLogger::LEVELS.each do |level|
it "send #{level}" do
data = nil
@appender.tcp_client.stub(:write, ->(d) { data = d }) do
@appender.send(level, @message)
end
hash = JSON.parse(data)
assert_equal @message, hash['message']
assert_equal level.to_s, hash['level']
refute hash['stack_trace']
end
it "send #{level} exceptions" do
exc = nil
begin
Uh oh
rescue Exception => e
exc = e
end
data = nil
@appender.tcp_client.stub(:write, ->(d) { data = d }) do
@appender.send(level, 'Reading File', exc)
end
hash = JSON.parse(data)
assert 'Reading File', hash['message']
assert 'NameError', hash['exception']['name']
assert 'undefined local variable or method', hash['exception']['message']
assert_equal level.to_s, hash['level'], 'Should be error level (3)'
assert hash['exception']['stack_trace'].first.include?(__FILE__), hash['exception']
end
it "send #{level} custom attributes" do
data = nil
@appender.tcp_client.stub(:write, ->(d) { data = d }) do
@appender.send(level, @message, key1: 1, key2: 'a')
end
hash = JSON.parse(data)
assert_equal @message, hash['message']
assert_equal level.to_s, hash['level']
refute hash['stack_trace']
assert payload = hash['payload'], hash
assert_equal 1, payload['key1'], payload
assert_equal 'a', payload['key2'], payload
end
end
end
end
end
| 33.477612 | 93 | 0.563085 |
4a6d15898dc7d6e8dfdde0b95ce153e7e4fbb8d1 | 1,141 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'grape/order/version'
Gem::Specification.new do |spec|
spec.name = "grape-order"
spec.version = Grape::Order::VERSION
spec.authors = ["Grzegorz Brzezinka"]
spec.email = ["[email protected]"]
spec.description = %q{collection ordering by params for grape API framework}
spec.summary = %q{collection ordering for grape}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency "grape"
spec.add_runtime_dependency "activerecord", "> 4.0"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "pry"
spec.add_development_dependency "simplecov"
spec.add_development_dependency "rack-test"
spec.add_development_dependency "rspec", "~> 2.9"
end | 36.806452 | 80 | 0.684487 |
79a01c94fdbda1fe7d2b554a196b007906f2ce94 | 4,928 | # encoding: utf-8
# This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/rpm/blob/master/LICENSE for complete details.
# https://newrelic.atlassian.net/wiki/display/eng/Agent+Thread+Profiling
# https://newrelic.atlassian.net/browse/RUBY-917
if RUBY_VERSION >= '1.9'
require 'thread'
require 'multiverse_helpers'
class ThreadProfilingTest < Minitest::Test
include MultiverseHelpers
setup_and_teardown_agent(:'thread_profiler.enabled' => true, :force_send => true) do |collector|
collector.stub('connect', {"agent_run_id" => 666 })
collector.stub('get_agent_commands', [])
collector.stub('agent_command_results', [])
end
def after_setup
agent.service.request_timeout = 0.5
agent.service.agent_id = 666
@thread_profiler_session = agent.agent_command_router.thread_profiler_session
@threads = []
end
def after_teardown
@threads.each { |t| t.kill }
@threads = nil
end
START_COMMAND = [[666,{
"name" => "start_profiler",
"arguments" => {
"profile_id" => -1,
"sample_period" => 0.01,
"duration" => 0.75,
"only_runnable_threads" => false,
"only_request_threads" => false,
"profile_agent_code" => true
}
}]]
STOP_COMMAND = [[666,{
"name" => "stop_profiler",
"arguments" => {
"profile_id" => -1,
"report_data" => true
}
}]]
# These are potentially fragile for being timing based
# START_COMMAND with 0.01 sampling and 0.5 duration expects to get
# roughly 50 polling cycles in. We check signficiantly less than that.
# STOP_COMMAND when immediately issued after a START_COMMAND is expected
# go only let a few cycles through, so we check less than 10
def test_thread_profiling
run_transaction_in_thread(:type => :controller, :request => stub)
run_transaction_in_thread(:type => :task)
issue_command(START_COMMAND)
let_it_finish
profile_data = $collector.calls_for('profile_data')[0]
assert_equal('666', profile_data.run_id, "Missing run_id, profile_data was #{profile_data.inspect}")
assert(profile_data.sample_count >= 2, "Expected sample_count >= 2, but was #{profile_data.sample_count}")
assert_saw_traces(profile_data, "OTHER")
assert_saw_traces(profile_data, "AGENT")
assert_saw_traces(profile_data, "REQUEST")
assert_saw_traces(profile_data, "BACKGROUND")
end
def test_thread_profiling_with_pruby_marshaller
with_config(:marshaller => 'pruby') do
run_transaction_in_thread(:type => :controller, :request => stub)
run_transaction_in_thread(:type => :task)
issue_command(START_COMMAND)
let_it_finish
end
profile_data = $collector.calls_for('profile_data')[0]
assert_equal('666', profile_data.run_id, "Missing run_id, profile_data was #{profile_data.inspect}")
assert(profile_data.sample_count >= 2, "Expected sample_count >= 2, but was #{profile_data.sample_count}")
assert_saw_traces(profile_data, "OTHER")
assert_saw_traces(profile_data, "AGENT")
assert_saw_traces(profile_data, "REQUEST")
assert_saw_traces(profile_data, "BACKGROUND")
end
def test_thread_profiling_can_stop
issue_command(START_COMMAND)
issue_command(STOP_COMMAND)
# No wait needed, should be immediately ready to harvest
assert @thread_profiler_session.ready_to_harvest?
harvest
profile_data = $collector.calls_for('profile_data')[0]
assert_equal('666', profile_data.run_id, "Missing run_id, profile_data was #{profile_data.inspect}")
assert(profile_data.sample_count < 50, "Expected sample_count < 50, but was #{profile_data.sample_count}")
end
def issue_command(cmd)
$collector.stub('get_agent_commands', cmd)
agent.send(:check_for_and_handle_agent_commands)
$collector.stub('get_agent_commands', [])
end
# Runs a thread we expect to span entire test and be killed at the end
def run_transaction_in_thread(opts)
q = Queue.new
@threads ||= []
@threads << Thread.new do
in_transaction(opts) do
q.push('.')
sleep # sleep until explicitly woken in join_background_threads
end
end
q.pop # block until the thread has had a chance to start up
end
def let_it_finish
wait_for_backtrace_service_poll(:timeout => 10.0, :iterations => 10)
harvest
join_background_threads
end
def join_background_threads
if @threads
@threads.each do |thread|
thread.run
thread.join
end
end
end
def harvest
agent.shutdown
end
def assert_saw_traces(profile_data, type)
assert_kind_of Hash, profile_data.traces
traces_for_type = profile_data.traces[type]
assert traces_for_type, "Missing key for type #{type} in profile_data"
assert_kind_of Array, traces_for_type
assert !profile_data.traces[type].empty?, "Zero #{type} traces seen"
end
end
end
| 30.419753 | 110 | 0.705966 |
0160bbdf17baf53670b344808fc1933c1a92d6bc | 573 | require 'test_helper'
class Line::Bot::HeadersTest < Minitest::Test
def test_request_headers
assert_equal(
{
'User-Agent' => "LineBotGem/#{Line::Bot::VERSION}",
'Content-Type' => 'application/json; charset=UTF-8',
'X-Line-ChannelID' => $client.channel_id,
'X-Line-ChannelSecret' => $client.channel_secret,
'X-Line-Trusted-User-With-ACL' => $client.channel_mid
},
Line::Bot::Headers.new($client, :get, "https://aisaac.in").request_headers
)
end
end
| 33.705882 | 80 | 0.568935 |
f7a479097565d6aea992e35a8da00699eec2b869 | 2,811 | module Relevance
module Tarantula
class FormSubmission
include Relevance::Tarantula
attr_accessor :meth, :action, :data, :attack, :form
class << self
def attacks
# normalize from hash input to Attack
@attacks = @attacks.map do |val|
Hash === val ? Relevance::Tarantula::Attack.new(val) : val
end
@attacks
end
def attacks=(atts)
# normalize from hash input to Attack
@attacks = atts.map do |val|
Hash === val ? Relevance::Tarantula::Attack.new(val) : val
end
end
end
@attacks = [Relevance::Tarantula::BasicAttack.new]
def initialize(form, attack = Relevance::Tarantula::BasicAttack.new)
@form = form
@meth = form.meth
@action = form.action
@attack = attack
@data = mutate_selects(form).merge(mutate_text_areas(form)).merge(mutate_inputs(form))
end
def crawl
begin
response = form.crawler.submit(meth, action, data)
log "Response #{response.code} for #{self}"
rescue ActiveRecord::RecordNotFound => e
log "Skipping #{action}, presumed ok that record is missing"
response = Relevance::Tarantula::Response.new(:code => "404", :body => e.message, :content_type => "text/plain")
end
form.crawler.handle_form_results(self, response)
response
end
def self.mutate(form)
attacks.map{|attack| new(form, attack)} if attacks
end
def url
action
end
def to_s
"#{action} #{meth} #{data.inspect} #{attack.inspect}"
end
# a form's signature is what makes it unique (e.g. action + fields)
# used to keep track of which forms we have submitted already
def signature
[action, data.keys.sort, attack.name]
end
def create_random_data_for(form, tag_selector)
form.search(tag_selector).inject({}) do |form_args, input|
# TODO: test
form_args[input['name']] = random_data(input) if input['name']
form_args
end
end
def mutate_inputs(form)
create_random_data_for(form, 'input')
end
def mutate_text_areas(form)
create_random_data_for(form, 'textarea')
end
def mutate_selects(form)
form.search('select').inject({}) do |form_args, select|
options = select.search('option')
option = options.sample
form_args[select['name']] = option && option['value']
form_args
end
end
def random_data(input)
case input['name']
when /^_method$/ then input['value']
else
attack.input(input)
end
end
end
end
end
| 28.393939 | 122 | 0.579153 |
39ba5eb7530f21fa44aa3570eb0e9c91a8b08d5f | 1,933 | require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start 'rails' do
add_filter "/admin/backdoors_controller.rb"
add_filter "/errors_controller"
add_filter "/spec/"
add_filter "/vendor/"
end
# This file is copied to spec/ when you run 'rails generate rspec:install'
require 'saml'
require 'rspec/core'
require 'rspec/collection_matchers'
require 'factories/all'
I18n.enforce_available_locales = false
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[File.join(File.dirname(__FILE__), "support/**/*.rb")].each {|f| require f}
RSpec.configure do |config|
# ## Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.expect_with :rspec do |c|
c.syntax = [:should, :expect]
end
config.mock_with :rspec do |c|
c.yield_receiver_to_any_instance_implementation_blocks = false
c.syntax = [:should, :expect]
end
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.order = "random"
config.include FactoryGirl::Syntax::Methods
config.raise_errors_for_deprecations!
end
| 30.68254 | 79 | 0.744956 |
8746725f843742fac6a5a75aa082e507d81644bd | 342 | cask "continuity-activation-tool" do
version :latest
sha256 :no_check
url "https://github.com/dokterdok/Continuity-Activation-Tool/archive/master.zip"
name "Continuity Activation Tool"
homepage "https://github.com/dokterdok/Continuity-Activation-Tool/"
app "Continuity-Activation-Tool-master/Continuity Activation Tool.app"
end
| 31.090909 | 82 | 0.78655 |
e94f317bb2e79569a0f0dbc26116cc089094073e | 1,346 | class Pelikan < Formula
desc "Production-ready cache services"
homepage "https://twitter.github.io/pelikan"
url "https://github.com/twitter/pelikan/archive/0.1.2.tar.gz"
sha256 "c105fdab8306f10c1dfa660b4e958ff6f381a5099eabcb15013ba42e4635f824"
license "Apache-2.0"
head "https://github.com/twitter/pelikan.git"
bottle do
cellar :any_skip_relocation
sha256 "98b69e12d5ba1d3e8824e87f3fa5773a3bf7ba63dc2c32c73f07839b2c9d0e81" => :big_sur
sha256 "61441ad2aeeb6d14ab8fa6183944c1f4ab0733776e3f810ad17b80faf2f25faf" => :catalina
sha256 "a313660eb003974995537cef07e391d3051218f7c65f3326c270b68f0855a59f" => :mojave
sha256 "a80ae1b508d4eae75d03fc5ad07477039a50a37419681b2472af4f9dc5f240ea" => :high_sierra
sha256 "37a675674b7ef33f07099029042f56c054f09b5d22400010d583fbfa41c0ce50" => :sierra
sha256 "e314ce6288bf76e271bf69ce844e2e846b16cad68ce635faf1e5130c3c6911d0" => :el_capitan
sha256 "ab04b8488e6272d0000c8e67842c4b286eb23459a6de9e9a392f14aa87c9978e" => :yosemite
sha256 "80459134cbab7aa94ab55d38488b2058696f7408869306f75e80cfa0350ed40d" => :mavericks
end
depends_on "cmake" => :build
def install
mkdir "_build" do
system "cmake", "..", *std_cmake_args
system "make"
system "make", "install"
end
end
test do
system "#{bin}/pelikan_twemcache", "-c"
end
end
| 38.457143 | 93 | 0.782318 |
1c46b0346456c93346a71a64547b23cca3c53d96 | 253 | # encoding: UTF-8
class Qqmusic < Cask
url 'http://dldir1.qq.com/music/clntupate/QQMusicForMacV1.3.0.dmg'
homepage 'http://y.qq.com'
version '1.3.0'
sha256 '2f1198f9b3e1407822a771fcdfdd643b65f35f6b51cc0af8c6b11fa11fc30a0d'
link 'QQ音乐.app'
end
| 28.111111 | 75 | 0.758893 |
f840952e2715786a74ad7f77a23e1de21265d428 | 6,613 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
require_relative './../support//board_index_page'
require_relative './../support/board_page'
describe 'Subtasks action board', type: :feature, js: true do
let(:type) { FactoryBot.create(:type_standard) }
let(:project) { FactoryBot.create(:project, types: [type], enabled_module_names: %i[work_package_tracking board_view]) }
let(:role) { FactoryBot.create(:role, permissions: permissions) }
let(:user) do
FactoryBot.create(:user,
member_in_project: project,
member_through_role: role)
end
let(:board_index) { Pages::BoardIndex.new(project) }
let!(:priority) { FactoryBot.create :default_priority }
let!(:open_status) { FactoryBot.create :default_status, name: 'Open' }
let!(:parent) { FactoryBot.create :work_package, project: project, subject: 'Parent WP', status: open_status }
let!(:child) { FactoryBot.create :work_package, project: project, subject: 'Child WP', parent: parent, status: open_status }
before do
with_enterprise_token :board_view
login_as(user)
end
context 'without the manage_subtasks permission' do
let(:permissions) {
%i[show_board_views manage_board_views add_work_packages
edit_work_packages view_work_packages manage_public_queries]
}
it 'does not allow to move work packages' do
board_index.visit!
# Create new board
board_page = board_index.create_board action: :Parent_child, expect_empty: true
# Expect we can add a work package column
board_page.add_list option: 'Parent WP'
board_page.expect_list 'Parent WP'
# Expect one work package there
board_page.expect_card 'Parent WP', 'Child'
board_page.expect_movable 'Parent WP', 'Child', movable: false
end
end
context 'with all permissions' do
let!(:other_wp) { FactoryBot.create :work_package, project: project, subject: 'Other WP', status: open_status }
let(:permissions) {
%i[show_board_views manage_board_views add_work_packages
edit_work_packages view_work_packages manage_public_queries manage_subtasks]
}
it 'allows management of subtasks work packages' do
board_index.visit!
# Create new board
board_page = board_index.create_board action: :Parent_child, expect_empty: true
# Expect we can add a child 1
board_page.add_list option: 'Parent WP'
board_page.expect_list 'Parent WP'
# Expect one work package there
board_page.expect_card 'Parent WP', 'Child'
# Expect move permission to be granted
board_page.expect_movable 'Parent WP', 'Child', movable: true
board_page.board(reload: true) do |board|
expect(board.name).to eq 'Action board (parent-child)'
queries = board.contained_queries
expect(queries.count).to eq(1)
query = queries.first
expect(query.name).to eq 'Parent WP'
expect(query.filters.first.name).to eq :parent
expect(query.filters.first.values).to eq [parent.id.to_s]
end
# Create new list
board_page.add_list option: 'Other WP'
board_page.expect_list 'Other WP'
board_page.expect_cards_in_order 'Other WP'
# Add item
board_page.add_card 'Parent WP', 'Second child'
sleep 2
# Expect added to query
queries = board_page.board(reload: true).contained_queries
expect(queries.count).to eq 2
first = queries.find_by(name: 'Parent WP')
second = queries.find_by(name: 'Other WP')
expect(first.ordered_work_packages.count).to eq(1)
expect(second.ordered_work_packages).to be_empty
# Expect work package to be saved in query first
wp = WorkPackage.where(id: first.ordered_work_packages.pluck(:work_package_id)).first
expect(wp.parent_id).to eq parent.id
# Move item to Child 2 list
board_page.move_card(0, from: 'Parent WP', to: 'Other WP')
board_page.expect_card('Parent WP', 'Second child', present: false)
board_page.expect_card('Other WP', 'Second child', present: true)
# Expect work package to be saved in query second
sleep 2
retry_block do
expect(first.reload.ordered_work_packages).to be_empty
expect(second.reload.ordered_work_packages.count).to eq(1)
end
wp = WorkPackage.where(id: second.ordered_work_packages.pluck(:work_package_id)).first
expect(wp.parent_id).to eq other_wp.id
# Reference back
board_page.reference('Parent WP', wp)
board_page.expect_card('Parent WP', 'Second child', present: true)
board_page.expect_card('Other WP', 'Second child', present: false)
end
it 'prevents adding a work package to its own column' do
board_index.visit!
board_page = board_index.create_board action: :Parent_child, expect_empty: true
board_page.add_list option: 'Parent WP'
board_page.expect_list 'Parent WP'
board_page.expect_card 'Parent WP', 'Child'
board_page.add_list option: 'Child WP'
board_page.expect_list 'Child WP'
# Try to move child to itself
board_page.move_card(0, from: 'Parent WP', to: 'Child WP')
board_page.expect_and_dismiss_notification type: :error,
message: I18n.t('js.boards.error_cannot_move_into_self')
child.reload
expect(child.parent).to eq parent
end
end
end
| 36.738889 | 126 | 0.701799 |
6259eb6e2417dad9de59ad78ba6bb1e4979f17da | 12,086 | require "rails_helper"
RSpec.describe Api::Customer::Cart::CheckoutsController,
type: :request do
include TimeZoneHelpers
let(:place) { create :place, nombre: "loh" }
let(:user) { create :user, :customer, current_place: place }
before { login_as user }
describe "submits my order" do
let(:current_order) {
create :customer_order,
customer_profile: user.customer_profile
}
let(:order_item) {
create :customer_order_item,
customer_order: current_order
}
let(:provider_office) {
create :provider_office,
:enabled,
provider_profile: order_item.provider_item.provider_profile
}
let(:shipping_fare) {
create :shipping_fare,
place: provider_office.place
}
let(:customer_address) {
create :customer_address,
customer_profile: user.customer_profile
}
let(:customer_billing_address) {
create :customer_billing_address,
customer_profile: user.customer_profile
}
let(:response_order) {
JSON.parse(response.body).fetch("customer_order")
}
let(:submission_attributes) {
{
forma_de_pago: "efectivo",
observaciones: "something",
customer_billing_address_id: customer_billing_address.id,
deliveries_attributes: [ {
id: current_order.deliveries.first.id,
provider_profile_id: order_item.provider_item.provider_profile.id,
delivery_method: "shipping",
customer_address_id: customer_address.id,
} ]
}
}
before do
current_order
provider_office
shipping_fare
order_item
end
describe "invalid - without address" do
let(:submission_attributes) {
{
forma_de_pago: "efectivo",
customer_billing_address_id: customer_billing_address.id,
deliveries_attributes: [ {
id: current_order.deliveries.first.id,
provider_profile_id: order_item.provider_item.provider_profile.id,
delivery_method: "shipping",
} ]
}
}
before do
post_with_headers(
"/api/customer/cart/checkout",
submission_attributes
)
end
it {
errors = JSON.parse(response.body).fetch("errors")
expect(errors).to have_key("order_items")
}
end
describe "successful submission" do
before do
post_with_headers(
"/api/customer/cart/checkout",
submission_attributes
)
end
it "order is persisted" do
expect(
response_order["status"]
).to eq("submitted")
expect(
response_order["observaciones"]
).to eq(submission_attributes[:observaciones])
end
it "shipping fare gets cached" do
delivery = current_order.deliveries.first.reload
expect(
delivery.read_attribute(:shipping_fare_price_cents)
).to be_present
end
end
describe "successful - without address (pickup)" do
let(:submission_attributes) {
{
forma_de_pago: "efectivo",
customer_billing_address_id: customer_billing_address.id,
deliveries_attributes: [ {
id: current_order.deliveries.first.id,
provider_profile_id: order_item.provider_item.provider_profile.id,
delivery_method: "pickup"
} ]
}
}
before do
post_with_headers(
"/api/customer/cart/checkout",
submission_attributes
)
end
it {
expect(
response_order["status"]
).to eq("submitted")
response_provider = response_order["provider_profiles"].first
expect(
response_provider["customer_order_delivery"]["delivery_method"]
).to eq("pickup")
}
end
describe "deliver later" do
let(:submission_attributes) {
{
forma_de_pago: "efectivo",
observaciones: "something",
customer_billing_address_id: customer_billing_address.id,
deliveries_attributes: [ {
id: current_order.deliveries.first.id,
provider_profile_id: order_item.provider_item.provider_profile.id,
delivery_method: "shipping",
deliver_at: (Time.now + 2.hours).strftime("%Y-%m-%d %H:%M %z"),
customer_address_id: customer_address.id
} ]
}
}
before do
post_with_headers(
"/api/customer/cart/checkout",
submission_attributes
)
end
it {
response_provider = response_order["provider_profiles"].first
expect(
response_provider["customer_order_delivery"]["deliver_at"]
).to eq(
formatted_time(
submission_attributes[:deliveries_attributes].first[:deliver_at]
)
)
}
end
describe "grouped by provider profile, can deliver to several addresses & pickup" do
let(:future_shipping) {
(Time.now + 2.hours).strftime("%Y-%m-%d %H:%M %z")
}
let(:submission_attributes) {
{
forma_de_pago: "efectivo",
customer_billing_address_id: customer_billing_address.id,
deliveries_attributes: [
{
id: current_order.delivery_for_provider(provider_one).id,
provider_profile_id: provider_one.id,
delivery_method: "shipping",
customer_address_id: customer_address.id
},
{
id: current_order.delivery_for_provider(provider_two).id,
provider_profile_id: provider_two.id,
delivery_method: "shipping",
customer_address_id: second_customer_address.id,
deliver_at: future_shipping
},
{
id: current_order.delivery_for_provider(provider_three).id,
provider_profile_id: provider_three.id,
delivery_method: "pickup",
deliver_at: (Time.now + 3.hours).strftime("%Y-%m-%d %H:%M %z")
}
]
}
}
let(:second_order_item) {
create :customer_order_item,
:ready_for_checkout,
customer_order: current_order
}
let(:third_order_item) {
create :customer_order_item,
customer_order: current_order
}
let(:second_customer_address) {
create :customer_address,
customer_profile: user.customer_profile
}
let(:provider_one) {
order_item.provider_item.provider_profile
}
let(:provider_two) {
second_order_item.provider_item.provider_profile
}
let(:provider_three) {
third_order_item.provider_item.provider_profile
}
before do
second_order_item
third_order_item
second_customer_address
end
it "creates three customer order deliveries" do
post_with_headers(
"/api/customer/cart/checkout",
submission_attributes
)
one = response_order["provider_profiles"].detect do |profile|
profile["id"] == provider_one.id
end
expect(
one["customer_order_delivery"]["customer_address_id"]
).to eq(customer_address.id)
two = response_order["provider_profiles"].detect do |profile|
profile["id"] == provider_two.id
end
expect(
two["customer_order_delivery"]["deliver_at"]
).to eq(
formatted_time(future_shipping)
)
three = response_order["provider_profiles"].detect do |profile|
profile["id"] == provider_three.id
end
expect(
three["customer_order_delivery"]["delivery_method"]
).to eq("pickup")
end
end
describe "with both consumidor final & address" do
let(:submission_attributes) {
{
forma_de_pago: "efectivo",
anon_billing_address: true,
customer_billing_address_id: customer_billing_address.id,
deliveries_attributes: [ {
id: current_order.deliveries.first.id,
provider_profile_id: order_item.provider_item.provider_profile.id,
delivery_method: "pickup"
} ]
}
}
before do
post_with_headers(
"/api/customer/cart/checkout",
submission_attributes
)
end
it {
errors = JSON.parse(response.body).fetch("errors")
expect(
errors
).to have_key("anon_billing_address")
expect(
errors
).to have_key("customer_billing_address_id")
}
end
describe "consumidor final" do
let(:submission_attributes) {
{
forma_de_pago: "efectivo",
anon_billing_address: true,
deliveries_attributes: [ {
id: current_order.deliveries.first.id,
provider_profile_id: order_item.provider_item.provider_profile.id,
delivery_method: "pickup"
} ]
}
}
before do
post_with_headers(
"/api/customer/cart/checkout",
submission_attributes
)
end
it {
expect(
response_order["status"]
).to eq("submitted")
response_provider = response_order["provider_profiles"].first
expect(
response_order["anon_billing_address"]
).to eq(true)
}
end
describe "checkout local items" do
let(:non_local_place) {
create(:place, nombre: "foreign")
}
let(:non_local_user) {
create :user,
current_place: non_local_place
}
let(:non_local_provider_profile) {
create :provider_profile,
:with_office,
user: non_local_user
}
let(:non_local_provider_item) {
create :provider_item,
provider_profile: non_local_provider_profile
}
let(:non_local_order_item) {
create :customer_order_item,
customer_order: non_local_order,
provider_item: non_local_provider_item
}
let(:non_local_order) {
create :customer_order,
customer_profile: user.customer_profile,
place: non_local_place
}
let(:submission_attributes) {
{
forma_de_pago: "efectivo",
anon_billing_address: true,
deliveries_attributes: [ {
id: current_order.deliveries.first.id,
provider_profile_id: order_item.provider_item.provider_profile.id,
delivery_method: "shipping",
customer_address_id: customer_address.id
} ]
}
}
before do
non_local_order_item
post_with_headers(
"/api/customer/cart/checkout",
submission_attributes
)
end
it {
response_providers = response_order["provider_profiles"]
expect(
response_providers.find do |response_provider|
response_provider["id"] == non_local_provider_profile.id
end
).to_not be_present
# but is in cart for other place
user.update!(current_place: non_local_place)
expect(
user.customer_profile.current_order.provider_profiles
).to include(non_local_provider_profile)
}
end
describe "notifies provider" do
let(:provider_device) {
create :user_device,
platform: :android,
user: order_item.provider_item.provider_profile.user
}
before do
provider_device
expect_any_instance_of(
PushService::AndroidNotifier
).to receive(:notify!)
end
it {
post_with_headers(
"/api/customer/cart/checkout",
submission_attributes
)
}
end
describe "discounts" do
pending
end
end
end
| 28.504717 | 88 | 0.59118 |
1da866b4d47eeda9893cf1230c9da33cae2b3172 | 1,329 | class Avce00 < Formula
desc "Make Arc/Info (binary) Vector Coverages appear as E00"
homepage "http://avce00.maptools.org/avce00/index.html"
url "http://avce00.maptools.org/dl/avce00-2.0.0.tar.gz"
sha256 "c0851f86b4cd414d6150a04820491024fb6248b52ca5c7bd1ca3d2a0f9946a40"
bottle do
cellar :any_skip_relocation
sha256 "40b26638adfaf290bc07ae792da49106b493ea3109a97c1fac775723a0463ac4" => :high_sierra
sha256 "576b5ea62376b42733d56e7bd862522588c16160ac1abb5f382c1c12055248e1" => :sierra
sha256 "45f18e289431af4de0d1e96c1fadd6a056e80907a1650654f8ee0dd1dafab401" => :el_capitan
sha256 "56e15b29411b2947d9a842d91ae713e16566aa59e297e06f7d4de4b301847e66" => :yosemite
sha256 "55990b93f7fe4639c6fdf29c4cc6c5791c6178c8661e22ef9e0dd64606532f56" => :mavericks
sha256 "4f114d3d8872cbf9e2df2c2ed2d4962b65b39efc568faf78eb5b2f47552a39da" => :mountain_lion
sha256 "00d468662055c9c9ab55d2860e8b812505e0e5aafac1a94aec57b6e9e8f8287e" => :x86_64_linux # glibc 2.19
end
conflicts_with "gdal", :because => "both install a cpl_conv.h header"
def install
system "make", "CC=#{ENV.cc}"
bin.install "avcimport", "avcexport", "avcdelete", "avctest"
lib.install "avc.a"
include.install Dir["*.h"]
end
test do
touch testpath/"test"
system "#{bin}/avctest", "-b", "test"
end
end
| 41.53125 | 107 | 0.775019 |
edb0a47d31126f95f43ab215338d9e3af7607d5d | 22,814 | # require "pry"
# require "pry-rescue"
require "json"
Puppet::Type.type(:azure_route_table).provide(:arm) do
mk_resource_methods
def initialize(value = {})
super(value)
@property_flush = {}
@is_create = false
@is_delete = false
end
def etag=(value)
Puppet.info("etag setter called to change to #{value}")
@property_flush[:etag] = value
end
def id=(value)
Puppet.info("id setter called to change to #{value}")
@property_flush[:id] = value
end
def location=(value)
Puppet.info("location setter called to change to #{value}")
@property_flush[:location] = value
end
def name=(value)
Puppet.info("name setter called to change to #{value}")
@property_flush[:name] = value
end
def properties=(value)
Puppet.info("properties setter called to change to #{value}")
@property_flush[:properties] = value
end
def tags=(value)
Puppet.info("tags setter called to change to #{value}")
@property_flush[:tags] = value
end
def type=(value)
Puppet.info("type setter called to change to #{value}")
@property_flush[:type] = value
end
def self.instances
fetch_all_as_hash
end
def self.prefetch(resources)
instances.each do |prov|
if (resource = (resources.find { |k, v| k.casecmp(prov.name).zero? } || [])[1])
resource.provider = prov
end
end
end
def self.fetch_all_as_hash
items = self.fetch_all
if items
items.collect do |item|
hash = {
api_version: item["api-version"],
etag: item["etag"],
id: item["id"],
location: item["location"],
name: item["name"],
parameters: item["parameters"],
properties: item["properties"],
resource_group_name: item["resourceGroupName"],
subscription_id: item["subscriptionId"],
tags: item["tags"],
type: item["type"],
ensure: :present,
}
self.deep_delete(hash, [:properties, "subnets"])
self.deep_delete(hash, [:type])
Puppet.debug("Adding to collection: #{item}")
new(hash) if hash
end.compact
else
[]
end
rescue Exception => ex
Puppet.alert("ex is #{ex} and backtrace is #{ex.backtrace}")
raise
end
def self.deep_delete(hash_item, tokens)
if tokens.size == 1
if hash_item.kind_of?(Array)
hash_item.map! { |item| deep_delete(item, tokens) }
else
hash_item.delete(tokens[0]) unless hash_item.nil? or hash_item[tokens[0]].nil?
end
else
if hash_item.kind_of?(Array)
hash_item.map! { |item| deep_delete(item, tokens[1..-1]) }
else
hash_item[tokens.first] = deep_delete(hash_item[tokens.first], tokens[1..-1]) unless hash_item.nil? or hash_item[tokens[0]].nil?
end
end
return hash_item
end
def self.fetch_all
response = invoke_list_all
if response.kind_of? Net::HTTPSuccess
body = JSON.parse(response.body)
if body.is_a? Hash and body.key? "value"
return body["value"]
end
end
end
def self.instance_to_hash(instance)
{
ensure: :present,
api_version: instance.api_version.respond_to?(:to_hash) ? instance.api_version.to_hash : instance.api_version,
etag: instance.etag.respond_to?(:to_hash) ? instance.etag.to_hash : instance.etag,
id: instance.id.respond_to?(:to_hash) ? instance.id.to_hash : instance.id,
location: instance.location.respond_to?(:to_hash) ? instance.location.to_hash : instance.location,
name: instance.name.respond_to?(:to_hash) ? instance.name.to_hash : instance.name,
parameters: instance.parameters.respond_to?(:to_hash) ? instance.parameters.to_hash : instance.parameters,
properties: instance.properties.respond_to?(:to_hash) ? instance.properties.to_hash : instance.properties,
resource_group_name: instance.resource_group_name.respond_to?(:to_hash) ? instance.resource_group_name.to_hash : instance.resource_group_name,
subscription_id: instance.subscription_id.respond_to?(:to_hash) ? instance.subscription_id.to_hash : instance.subscription_id,
tags: instance.tags.respond_to?(:to_hash) ? instance.tags.to_hash : instance.tags,
type: instance.type.respond_to?(:to_hash) ? instance.type.to_hash : instance.type,
object: instance,
}
end
def create
@is_create = true
Puppet.info("Entered create for resource #{name} of type RouteTable")
hash = build_hash
response = self.class.invoke_create(resource, hash)
if response.is_a? Net::HTTPSuccess
@property_hash[:ensure] = :present
Puppet.info("Added :ensure to property hash")
else
raise Puppet::Error, "Create failed. Response is #{response} and body is #{response.body}"
end
rescue Exception => ex
Puppet.alert("Exception during create. The state of the resource is unknown. ex is #{ex} and backtrace is #{ex.backtrace}")
raise
end
def flush
Puppet.info("Entered flush for resource #{name} of type RouteTable - creating ? #{@is_create}, deleting ? #{@is_delete}")
if @is_create || @is_delete
return # we've already done the create or delete
end
hash = build_hash
response = self.class.invoke_update(resource, hash)
if response.is_a? Net::HTTPSuccess
@property_hash[:ensure] = :present
Puppet.info("Added :ensure to property hash")
else
raise Puppet::Error, "Flush failed. The state of the resource is unknown. Response is #{response} and body is #{response.body}"
end
rescue Exception => ex
Puppet.alert("Exception during flush. ex is #{ex} and backtrace is #{ex.backtrace}")
raise
end
def build_hash
route_table = {}
route_table["etag"] = resource[:etag] unless resource[:etag].nil?
route_table["id"] = resource[:id] unless resource[:id].nil?
route_table["location"] = resource[:location] unless resource[:location].nil?
route_table["name"] = resource[:name] unless resource[:name].nil?
route_table["properties"] = resource[:properties] unless resource[:properties].nil?
route_table["tags"] = resource[:tags] unless resource[:tags].nil?
route_table["type"] = resource[:type] unless resource[:type].nil?
return route_table
end
def self.build_key_values
key_values = {}
key_values["api-version"] = "2018-11-01"
key_values
end
def destroy
delete(resource)
end
def delete(hash)
Puppet.info("Entered delete for resource #{hash[:name]} of type <no value>")
@is_delete = true
response = self.class.invoke_delete(hash)
if response.is_a? Net::HTTPSuccess
@property_hash[:ensure] = :present
Puppet.info "Added :absent to property_hash"
else
raise Puppet::Error, "Delete failed. The state of the resource is unknown. Response is #{response} and body is #{response.body}"
end
rescue Exception => ex
Puppet.alert("Exception during destroy. ex is #{ex} and backtrace is #{ex.backtrace}")
raise
end
def self.invoke_list_all(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation RouteTables_ListAll")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("etag", "body", "etag", "etag"),
self.op_param("id", "body", "id", "id"),
self.op_param("location", "body", "location", "location"),
self.op_param("name", "body", "name", "name"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("tags", "body", "tags", "tags"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/providers/Microsoft.Network/routeTables", "Get", "[application/json]")
end
def self.invoke_create(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation RouteTables_CreateOrUpdate")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("etag", "body", "etag", "etag"),
self.op_param("id", "body", "id", "id"),
self.op_param("location", "body", "location", "location"),
self.op_param("name", "body", "name", "name"),
self.op_param("parameters", "body", "parameters", "parameters"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("resourceGroupName", "path", "resource_group_name", "resource_group_name"),
self.op_param("routeTableName", "path", "name", "route_table_name"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("tags", "body", "tags", "tags"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/resourceGroups/%{resource_group_name}/providers/Microsoft.Network/routeTables/%{route_table_name}", "Put", "[application/json]")
end
def self.invoke_update(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation RouteTables_CreateOrUpdate")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("etag", "body", "etag", "etag"),
self.op_param("id", "body", "id", "id"),
self.op_param("location", "body", "location", "location"),
self.op_param("name", "body", "name", "name"),
self.op_param("parameters", "body", "parameters", "parameters"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("resourceGroupName", "path", "resource_group_name", "resource_group_name"),
self.op_param("routeTableName", "path", "name", "route_table_name"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("tags", "body", "tags", "tags"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/resourceGroups/%{resource_group_name}/providers/Microsoft.Network/routeTables/%{route_table_name}", "Put", "[application/json]")
end
def self.invoke_delete(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation RouteTables_Delete")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("etag", "body", "etag", "etag"),
self.op_param("id", "body", "id", "id"),
self.op_param("location", "body", "location", "location"),
self.op_param("name", "body", "name", "name"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("resourceGroupName", "path", "resource_group_name", "resource_group_name"),
self.op_param("routeTableName", "path", "name", "route_table_name"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("tags", "body", "tags", "tags"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/resourceGroups/%{resource_group_name}/providers/Microsoft.Network/routeTables/%{route_table_name}", "Delete", "[application/json]")
end
def self.invoke_list_with_params(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation RouteTables_ListAll")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("etag", "body", "etag", "etag"),
self.op_param("id", "body", "id", "id"),
self.op_param("location", "body", "location", "location"),
self.op_param("name", "body", "name", "name"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("tags", "body", "tags", "tags"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/providers/Microsoft.Network/routeTables", "Get", "[application/json]")
end
def self.invoke_get_one(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation RouteTables_Get")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("etag", "body", "etag", "etag"),
self.op_param("id", "body", "id", "id"),
self.op_param("location", "body", "location", "location"),
self.op_param("name", "body", "name", "name"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("resourceGroupName", "path", "resource_group_name", "resource_group_name"),
self.op_param("routeTableName", "path", "name", "route_table_name"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("tags", "body", "tags", "tags"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/resourceGroups/%{resource_group_name}/providers/Microsoft.Network/routeTables/%{route_table_name}", "Get", "[application/json]")
end
def self.authenticate(path_params, query_params, header_params, body_params)
token = fetch_oauth2_token
if token
header_params["Authorization"] = "Bearer #{token}"
return true
else
return false
end
end
def self.fetch_oauth2_token
Puppet.info("Getting oauth2 token")
@client_id = ENV["azure_client_id"]
@client_secret = ENV["azure_client_secret"]
@tenant_id = ENV["azure_tenant_id"]
uri = URI("https://login.microsoftonline.com/#{@tenant_id}/oauth2/token")
response = Net::HTTP.post_form(uri,
"grant_type" => "client_credentials",
"client_id" => @client_id,
"client_secret" => @client_secret,
"resource" => "https://management.azure.com/")
Puppet.debug("get oauth2 token response code is #{response.code} and body is #{response.body}")
success = response.is_a? Net::HTTPSuccess
if success
return JSON[response.body]["access_token"]
else
raise Puppet::Error, "Unable to get oauth2 token - response is #{response} and body is #{response.body}"
end
end
def exists?
return_value = @property_hash[:ensure] && @property_hash[:ensure] != :absent
Puppet.info("Checking if resource #{name} of type <no value> exists, returning #{return_value}")
return_value
end
def self.add_keys_to_request(request, hash)
if hash
hash.each { |x, v| request[x] = v }
end
end
def self.to_query(hash)
if hash
return_value = hash.map { |x, v| "#{x}=#{v}" }.reduce { |x, v| "#{x}&#{v}" }
if !return_value.nil?
return return_value
end
end
return ""
end
def self.op_param(name, inquery, paramalias, namesnake)
operation_param = {:name => name, :inquery => inquery, :paramalias => paramalias, :namesnake => namesnake}
return operation_param
end
def self.call_op(path_params, query_params, header_params, body_params, parent_host, operation_path, operation_verb, parent_consumes)
uri_string = "https://#{parent_host}#{operation_path}" % path_params
uri_string = uri_string + "?" + to_query(query_params)
header_params["Content-Type"] = "application/json" # first of #{parent_consumes}
if authenticate(path_params, query_params, header_params, body_params)
Puppet.info("Authentication succeeded")
uri = URI(uri_string)
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == "https") do |http|
if operation_verb == "Get"
req = Net::HTTP::Get.new(uri)
elsif operation_verb == "Put"
req = Net::HTTP::Put.new(uri)
elsif operation_verb == "Delete"
req = Net::HTTP::Delete.new(uri)
end
add_keys_to_request(req, header_params)
if body_params
req.body = body_params.to_json
end
Puppet.debug("URI is (#{operation_verb}) #{uri}, body is #{body_params}, query params are #{query_params}, headers are #{header_params}")
response = http.request req # Net::HTTPResponse object
Puppet.debug("response code is #{response.code} and body is #{response.body}")
success = response.is_a? Net::HTTPSuccess
Puppet.info("Called (#{operation_verb}) endpoint at #{uri}, success was #{success}")
return response
end
end
end
end
# this is the end of the ruby class
| 42.484171 | 262 | 0.660472 |
ac615f8d683a970426e53db22b115434dedb047f | 427 | module SitemapSearch::Model
def self.included(base)
base.instance_eval do
def site_map_search(options={})
@results = Page.search options[:query], :page => options[:page],
:per_page => options[:per_page]
end
end
end
end | 30.5 | 79 | 0.388759 |
b9c1009e359ee8bdeb52ad0140a5c531a255760f | 1,212 | require "language/node"
class Nativefier < Formula
desc "Wrap web apps natively"
homepage "https://github.com/nativefier/nativefier"
url "https://registry.npmjs.org/nativefier/-/nativefier-45.0.0.tgz"
sha256 "dbe38a880655e48986cfab25f22f1f8c3a2a46eb9879f2bd77c827257227efd7"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "500cac9dd61632a58302936043e728abd78dbe673897a5285bae1be3e77bc110"
sha256 cellar: :any_skip_relocation, big_sur: "62268dd74681e909e204435b539e14d9f673486a265db913cd57375ad308c93f"
sha256 cellar: :any_skip_relocation, catalina: "62268dd74681e909e204435b539e14d9f673486a265db913cd57375ad308c93f"
sha256 cellar: :any_skip_relocation, mojave: "62268dd74681e909e204435b539e14d9f673486a265db913cd57375ad308c93f"
sha256 cellar: :any_skip_relocation, x86_64_linux: "c386bee29ac40a667a50341fa49134492a1e8b75ae98ed5024dd1ac32a0c800c" # linuxbrew-core
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
assert_match version.to_s, shell_output("#{bin}/nativefier --version")
end
end
| 41.793103 | 139 | 0.793729 |
abb394cb94fa4329395e47e659d7d4ca0ae5d3af | 377 | require 'acts_as_list'
require 'acts_as_markup'
require 'active_admin-acts_as_list'
require 'friendly_id'
require 'paperclip'
require 'ecm/pictures/engine'
require 'ecm/pictures/configuration'
require 'ecm/pictures/routing'
require 'ecm/pictures/version'
require 'ecm/pictures/active_admin/pictureable_helper'
module Ecm
module Pictures
extend Configuration
end
end
| 19.842105 | 54 | 0.811671 |
393065da0a8ea14243037a5e72e3d2001d852b42 | 3,317 | require 'spec_helper'
describe 'aodh::notifier' do
let :pre_condition do
"class { '::aodh': }"
end
shared_examples_for 'aodh-notifier' do
context 'with workers' do
let :params do
{ :workers => 8 }
end
it 'configures workers' do
is_expected.to contain_aodh_config('notifier/workers').with_value(8)
end
end
context 'with batch parameters' do
let :params do
{
:batch_size => 100,
:batch_timeout => 60,
}
end
it 'configures batch options' do
is_expected.to contain_aodh_config('notifier/batch_size').with_value(100)
is_expected.to contain_aodh_config('notifier/batch_timeout').with_value(60)
end
end
context 'when enabled' do
it { is_expected.to contain_class('aodh::params') }
it 'installs aodh-notifier package' do
is_expected.to contain_package('aodh-notifier').with(
:ensure => 'present',
:name => platform_params[:notifier_package_name],
:tag => ['openstack', 'aodh-package']
)
end
it 'configures aodh-notifier service' do
is_expected.to contain_service('aodh-notifier').with(
:ensure => 'running',
:name => platform_params[:notifier_service_name],
:enable => true,
:hasstatus => true,
:hasrestart => true,
:tag => 'aodh-service',
)
end
it 'sets default values' do
is_expected.to contain_aodh_config('notifier/workers').with_value(4)
is_expected.to contain_aodh_config('notifier/batch_size').with_value('<SERVICE DEFAULT>')
is_expected.to contain_aodh_config('notifier/batch_timeout').with_value('<SERVICE DEFAULT>')
end
end
context 'when disabled' do
let :params do
{ :enabled => false }
end
# Catalog compilation does not crash for lack of aodh::db
it { is_expected.to compile }
it 'configures aodh-notifier service' do
is_expected.to contain_service('aodh-notifier').with(
:ensure => 'stopped',
:name => platform_params[:notifier_service_name],
:enable => false,
:hasstatus => true,
:hasrestart => true,
:tag => 'aodh-service',
)
end
end
context 'when service management is disabled' do
let :params do
{ :enabled => false,
:manage_service => false }
end
it 'should not configure aodh-notifier service' do
is_expected.to_not contain_service('aodh-notifier')
end
end
end
on_supported_os({
:supported_os => OSDefaults.get_supported_os
}).each do |os,facts|
context "on #{os}" do
let (:facts) do
facts.merge!(OSDefaults.get_facts({ :os_workers => 4 }))
end
let(:platform_params) do
case facts[:osfamily]
when 'Debian'
{ :notifier_package_name => 'aodh-notifier',
:notifier_service_name => 'aodh-notifier' }
when 'RedHat'
{ :notifier_package_name => 'openstack-aodh-notifier',
:notifier_service_name => 'openstack-aodh-notifier' }
end
end
it_configures 'aodh-notifier'
end
end
end
| 28.110169 | 100 | 0.588484 |
38d5307476ed7c58295a6095d37f9faab9ae49b8 | 6,501 | # frozen_string_literal: true
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# Require this file early so that the version constant gets defined before
# requiring "google/cloud". This is because google-cloud-core will load the
# entrypoint (gem name) file, which in turn re-requires this file (hence
# causing a require cycle) unless the version constant is already defined.
require "google/cloud/certificate_manager/version"
require "googleauth"
gem "google-cloud-core"
require "google/cloud" unless defined? ::Google::Cloud.new
require "google/cloud/config"
# Set the default configuration
::Google::Cloud.configure.add_config! :certificate_manager do |config|
config.add_field! :endpoint, "certificatemanager.googleapis.com", match: ::String
config.add_field! :credentials, nil, match: [::String, ::Hash, ::Google::Auth::Credentials]
config.add_field! :scope, nil, match: [::Array, ::String]
config.add_field! :lib_name, nil, match: ::String
config.add_field! :lib_version, nil, match: ::String
config.add_field! :interceptors, nil, match: ::Array
config.add_field! :timeout, nil, match: ::Numeric
config.add_field! :metadata, nil, match: ::Hash
config.add_field! :retry_policy, nil, match: [::Hash, ::Proc]
config.add_field! :quota_project, nil, match: ::String
end
module Google
module Cloud
module CertificateManager
##
# Create a new client object for CertificateManager.
#
# By default, this returns an instance of
# [Google::Cloud::CertificateManager::V1::CertificateManager::Client](https://googleapis.dev/ruby/google-cloud-certificate_manager-v1/latest/Google/Cloud/CertificateManager/V1/CertificateManager/Client.html)
# for version V1 of the API.
# However, you can specify specify a different API version by passing it in the
# `version` parameter. If the CertificateManager service is
# supported by that API version, and the corresponding gem is available, the
# appropriate versioned client will be returned.
#
# ## About CertificateManager
#
# API Overview
#
# Certificates Manager API allows customers to see and manage all their TLS
# certificates.
#
# Certificates Manager API service provides methods to manage certificates,
# group them into collections, and create serving configuration that can be
# easily applied to other Cloud resources e.g. Target Proxies.
#
# Data Model
#
# The Certificates Manager service exposes the following resources:
#
# * `Certificate` which describes a single TLS certificate.
# * `CertificateMap` which describes a collection of certificates that can be
# attached to a target resource.
# * `CertificateMapEntry` which describes a single configuration entry that
# consists of a SNI and a group of certificates. It's a subresource of
# CertificateMap.
#
# Certificate, CertificateMap and CertificateMapEntry IDs
# have to match "^[a-z0-9-]\\{1,63}$" regexp, which means that
# - only lower case letters, digits, and hyphen are allowed
# - length of the resource ID has to be in [1,63] range.
#
# Provides methods to manage Cloud Certificate Manager entities.
#
# @param version [::String, ::Symbol] The API version to connect to. Optional.
# Defaults to `:v1`.
# @return [CertificateManager::Client] A client object for the specified version.
#
def self.certificate_manager version: :v1, &block
require "google/cloud/certificate_manager/#{version.to_s.downcase}"
package_name = Google::Cloud::CertificateManager
.constants
.select { |sym| sym.to_s.downcase == version.to_s.downcase.tr("_", "") }
.first
package_module = Google::Cloud::CertificateManager.const_get package_name
package_module.const_get(:CertificateManager).const_get(:Client).new(&block)
end
##
# Configure the google-cloud-certificate_manager library.
#
# The following configuration parameters are supported:
#
# * `credentials` (*type:* `String, Hash, Google::Auth::Credentials`) -
# The path to the keyfile as a String, the contents of the keyfile as a
# Hash, or a Google::Auth::Credentials object.
# * `lib_name` (*type:* `String`) -
# The library name as recorded in instrumentation and logging.
# * `lib_version` (*type:* `String`) -
# The library version as recorded in instrumentation and logging.
# * `interceptors` (*type:* `Array<GRPC::ClientInterceptor>`) -
# An array of interceptors that are run before calls are executed.
# * `timeout` (*type:* `Numeric`) -
# Default timeout in seconds.
# * `metadata` (*type:* `Hash{Symbol=>String}`) -
# Additional gRPC headers to be sent with the call.
# * `retry_policy` (*type:* `Hash`) -
# The retry policy. The value is a hash with the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) -
# The error codes that should trigger a retry.
#
# @return [::Google::Cloud::Config] The default configuration used by this library
#
def self.configure
yield ::Google::Cloud.configure.certificate_manager if block_given?
::Google::Cloud.configure.certificate_manager
end
end
end
end
helper_path = ::File.join __dir__, "certificate_manager", "helpers.rb"
require "google/cloud/certificate_manager/helpers" if ::File.file? helper_path
| 46.106383 | 213 | 0.676665 |
08526bd322846605967f0918f04efb9251782f1f | 3,482 | require 'spec_helper_acceptance'
describe "Package pinning:" do
if fact('osfamily') != 'Suse'
describe "Pinning enabled" do
describe "Setup" do
it 'should run successful' do
write_hiera_config('')
pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', version => '#{test_settings['install_package_version']}', java_install => true }
elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
"
# Run it twice and test for idempotency
apply_manifest(pp, :catch_failures => true)
expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
end
describe package(test_settings['package_name']) do
it { should be_installed.with_version(test_settings['install_version']) }
end
end # end setup
describe "Run upgrade" do
it 'should run fine' do
case fact('osfamily')
when 'Debian'
shell('apt-get update && apt-get -y install elasticsearch')
when 'RedHat'
shell('yum -y update elasticsearch')
end
end
end
describe "check installed package" do
describe package(test_settings['package_name']) do
it { should be_installed.with_version(test_settings['install_version']) }
end
end
describe "Upgrade" do
it 'should run successful' do
pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', version => '#{test_settings['upgrade_package_version']}', java_install => true }
elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
"
# Run it twice and test for idempotency
apply_manifest(pp, :catch_failures => true)
expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
end
describe package(test_settings['package_name']) do
it { should be_installed.with_version(test_settings['upgrade_version']) }
end
end # end setup
describe "Run upgrade" do
it 'should run fine' do
case fact('osfamily')
when 'Debian'
shell('apt-get update && apt-get -y install elasticsearch')
when 'RedHat'
shell('yum -y update elasticsearch')
end
end
end
describe "check installed package" do
describe package(test_settings['package_name']) do
it { should be_installed.with_version(test_settings['upgrade_version']) }
end
end
end
describe "Cleanup" do
it 'should run successfully' do
pp = "class { 'elasticsearch': ensure => 'absent' }
elasticsearch::instance{ 'es-01': ensure => 'absent' }
"
apply_manifest(pp, :catch_failures => true)
end
describe file('/etc/elasticsearch/es-01') do
it { should_not be_directory }
end
describe service(test_settings['service_name_a']) do
it { should_not be_enabled }
it { should_not be_running }
end
end
end
end
| 32.240741 | 261 | 0.598219 |
1d429f867fd2582119d50826aca70f49959e28ad | 1,099 | namespace :scihist do
# some tasks for managing config sets and collections through solr cloud, such as
# but not limited to SearchStax-hosted solr.
#
# Config for solr location and collection name is taken from ScihistDigicoll:Env solr_url
#
# We use config set names that have a fingerprint digest on the end, so they are unique
# for config content.
namespace :solr_cloud do
desc "Create collection using on-disk configuration files, bootstrap on empty solr"
task :create_collection => :environment do
updater = SolrConfigsetUpdater.configured
updater.upload_and_create_collection(configset_name: updater.configset_digest_name)
end
desc "upload configset and (re-)attach to collection, only if it is not already up to date"
task :sync_configset => :environment do
updater = SolrConfigsetUpdater.configured
updated = updater.replace_configset_digest
if updated
puts "sync_configset: updated config set to #{updated}"
else
puts "sync_configset: no update to config set needed"
end
end
end
end
| 34.34375 | 95 | 0.733394 |
6a72c240f14a53320b83a22d006e892d1dca56bd | 27,716 | require "ostruct"
require "ethon"
require "uri"
require "json"
require "time"
require 'active_support/core_ext/string/inflections'
require 'active_support/core_ext/object/blank'
module Sharepoint
class Client
FILENAME_INVALID_CHARS = '~"#%&*:<>?/\{|}'
# @return [OpenStruct] The current configuration.
attr_reader :config
# Initializes a new client with given options.
#
# @param [Hash] options The client options:
# - `:uri` The SharePoint server's root url
# - `:username` self-explanatory
# - `:password` self-explanatory
# @return [Sharepoint::Client] client object
def initialize(config = {})
@config = OpenStruct.new(config)
validate_config!
end
# Get all the documents from path
#
# @param path [String] the path to request the content
# @param site_path [String] if the SP instance contains sites, the site path, e.g. "/sites/my-site"
#
# @return [Array] of OpenStructs with the info of the files in the path
def documents_for(path, site_path = '')
ethon = ethon_easy_json_requester
ethon.url = "#{computed_web_api_url(site_path)}GetFolderByServerRelativeUrl('#{uri_escape path}')/Files"
ethon.perform
check_and_raise_failure(ethon)
threads = []
rv = []
result = JSON.parse( ethon.response_body )
result['d']['results'].each do |file|
file_struct = OpenStruct.new(
title: file['Title'],
path: file['ServerRelativeUrl'],
name: file['Name'],
url: "#{base_url}#{file['ServerRelativeUrl']}",
created_at: Time.parse(file['TimeCreated']),
updated_at: Time.parse(file['TimeLastModified']),
record_type: nil,
date_of_issue: nil,
)
threads << Thread.new {
ethon2 = ethon_easy_json_requester
server_relative_url = "#{site_path}#{path}/#{file['Name']}"
ethon2.url = "#{computed_web_api_url(site_path)}GetFileByServerRelativeUrl('#{uri_escape server_relative_url}')/ListItemAllFields"
ethon2.perform
rs = JSON.parse(ethon2.response_body)['d']
file_struct.record_type = rs['Record_Type']
file_struct.date_of_issue = rs['Date_of_issue']
rv << file_struct
}
end
threads.each { |t| t.join }
rv
end
# Checks whether a document exists with the given path
#
# @param file_path [String] the file path, without the site path if any
# @param site_path [String] if the SP instance contains sites, the site path, e.g. "/sites/my-site"
#
# @return `true` if document exists, false otherwise.
def document_exists?(file_path, site_path=nil)
file = split_path(file_path)
sanitized_filename = sanitize_filename(file[:name])
server_relative_url = "#{site_path}#{file[:path]}/#{sanitized_filename}"
url = computed_web_api_url(site_path)
ethon = ethon_easy_json_requester
ethon.url = uri_escape "#{url}GetFileByServerRelativeUrl('#{odata_escape_single_quote server_relative_url}')"
ethon.perform
exists = false
if ethon.response_code.eql? 200
json_response = JSON.parse(ethon.response_body)
if json_response['d'] &&
json_response['d']['ServerRelativeUrl'].eql?(server_relative_url)
exists = true
end
end
return exists
end
# Get a document's metadata
#
# @param file_path [String] the file path, without the site path if any
# @param site_path [String] if the SP instance contains sites, the site path, e.g. "/sites/my-site"
# @param custom_properties [Array] of String with names of custom properties to be returned
#
# @return [OpenStruct] with both default and custom metadata
def get_document(file_path, site_path=nil, custom_properties=[])
url = computed_web_api_url(site_path)
server_relative_url = odata_escape_single_quote "#{site_path}#{file_path}"
ethon = ethon_easy_json_requester
ethon.url = "#{url}GetFileByServerRelativeUrl('#{uri_escape server_relative_url}')/ListItemAllFields"
ethon.perform
check_and_raise_failure(ethon)
parse_get_document_response(ethon.response_body, custom_properties)
end
# Search for all documents modified in a given time range,
# boundaries included. Uses SharePoint Search API endpoint
#
# @param options [Hash] Supported options are:
# * start_at [Time] Range start time (mandatory)
# * end_at [Time] Range end time (optional). If null, documents modified
# after start_at will be returned
# * list_id [String] the GUID of the List you want returned documents
# to belong to (optional)
# * web_id [String] the GUID of the Site you want returned documents
# to belong to (optional)
# * properties [Array] of String with names of custom properties
# to be returned (optional)
# * max_results [Integer] the maximum number of results to be returned;
# defaults to 500 which is the default `MaxRowLimit` in SharePoint 2013.
# If you have increased that in your on-premise SP instance, then that's
# your limit for `max_results` param as well
# * start_result [Integer] the offset for results to be returned; defaults to 0.
# Useful when more than `max_results` documents have been modified in your
# time range, so you can iterate to fetch'em all.
#
# @return [Hash] with the following keys:
# * `:requested_url` [String] the URL requested to the SharePoint server
# * `:server_responded_at` [Time] the time when server returned its response
# * `:results` [Array] of OpenStructs with all properties of search results,
# sorted by last modified date (`write`)
def search_modified_documents(options={})
ethon = ethon_easy_json_requester
query = uri_escape build_search_kql_conditions(options)
properties = build_search_properties(options)
filters = build_search_fql_conditions(options)
sorting = "sortlist='write:ascending'"
paging = build_search_paging(options)
ethon.url = "#{base_api_url}search/query?querytext=#{query}&refinementfilters=#{filters}&#{properties}&#{sorting}&#{paging}&clienttype='Custom'"
ethon.perform
check_and_raise_failure(ethon)
server_responded_at = Time.now
{
requested_url: ethon.url,
server_responded_at: server_responded_at,
results: parse_search_response(ethon.response_body)
}
end
# Dumb wrapper of SharePoint Search API endpoint.
#
# @param options [Hash] All key => values in this hash will be passed to
# the `/search/query` endpoint as param=value in the querystring.
# Some very useful ones are:
# * `:querytext` [String] A valid KQL query. See:
# https://msdn.microsoft.com/en-us/library/office/ee558911.aspx
# * `:refinementfilters` [String] A valid query using OData syntax. See:
# https://msdn.microsoft.com/en-us/library/office/fp142385.aspx
# * `:selectProperties` [String] A comma-separated list of properties
# whose values you want returned for your results
# * `:rowlimit` [Number] The number of results to be returned (max 500)
# @return [Hash] with the following keys:
# * `:requested_url` [String] the URL requested to the SharePoint server
# * `:server_responded_at` [Time] the time when server returned its response
# * `:results` [Array] of OpenStructs with all properties of search results
def search(options={})
params = []
options.each do |key, value|
params << "#{key}=#{value}"
end
ethon = ethon_easy_json_requester
ethon.url = uri_escape("#{base_api_url}search/query?#{params.join('&')}")
ethon.perform
check_and_raise_failure(ethon)
server_responded_at = Time.now
{
requested_url: ethon.url,
server_responded_at: server_responded_at,
results: parse_search_response(ethon.response_body)
}
end
# Search in a List for all documents matching the passed conditions.
#
# @param list_name [String] The name of the SharePoint List you want to
# search into. Please note: a Document Library is a List as well.
# @param conditions [String] OData conditions that returned documents
# should verify, or nil if you want all documents. See:
# https://msdn.microsoft.com/en-us/library/office/fp142385.aspx
# @param site_path [String] if the SP instance contains sites, the site path,
# e.g. "/sites/my-site"
# @param properties [Array] of String with names of custom properties to be returned
#
# @return [Hash] with the following keys:
# * `:requested_url` [String] the URL requested to the SharePoint server
# * `:server_responded_at` [Time] the time when server returned its response
# * `:results` [Array] of OpenStructs with all properties of search results
def list_documents(list_name, conditions, site_path=nil, properties=[])
url = computed_web_api_url(site_path)
filter_param = "$filter=#{conditions}" if conditions.present?
expand_param = '$expand=Folder,File'
default_properties = %w( FileSystemObjectType UniqueId Title Created Modified File )
all_properties = default_properties + properties
select_param = "$select=#{all_properties.join(',')}"
url = "#{url}Lists/GetByTitle('#{odata_escape_single_quote(list_name)}')/Items?#{expand_param}&#{select_param}"
url += "&#{filter_param}"
records = []
page_url = uri_escape url
loop do
body = list_documents_page(page_url)
records += parse_list_response(body, all_properties)
page_url = body['d']['__next']
break if page_url.blank?
end
server_responded_at = Time.now
{
requested_url: url,
server_responded_at: server_responded_at,
results: records
}
end
def list_documents_page(url)
ethon = ethon_easy_json_requester
ethon.url = url
ethon.perform
check_and_raise_failure(ethon)
return JSON.parse(ethon.response_body)
end
# Get a document's file contents. If it's a link to another document, it's followed.
#
# @param file_path [String] the file path, without the site path if any
# @param site_path [String] if the SP instance contains sites, the site path, e.g. "/sites/my-site"
# @param link_credentials [Hash] credentials to access the link's destination repo.
# Accepted keys: `:username` and `:password`
#
# @return [Hash] with the following keys:
# - `:file_contents` [String] with the file contents
# - `:link_url` [String] if the requested file is a link, this returns the destination file url
def download(file_path: nil, site_path: nil, link_credentials: {})
meta = get_document(file_path, site_path)
if meta.url.nil?
url = computed_web_api_url(site_path)
server_relative_url = odata_escape_single_quote "#{site_path}#{file_path}"
download_url "#{url}GetFileByServerRelativeUrl('#{server_relative_url}')/$value"
else # requested file is a link
paths = extract_paths(meta.url)
link_config = { uri: paths[:root] }
if link_credentials.empty?
link_config = config.to_h.merge(link_config)
else
link_config.merge!(link_credentials)
end
link_client = self.class.new(link_config)
result = link_client.download_url meta.url
result[:link_url] = meta.url if result[:link_url].nil?
result
end
end
# Downloads a file provided its full URL. Follows redirects.
#
# @param url [String] the URL of the file to download
# @return [Hash] with the following keys:
# - `:file_contents` [String] with the file contents
# - `:link_url` [String] if some redirect is followed, returns the last `Location:` header value
def download_url(url)
ethon = ethon_easy_requester
ethon.url = uri_escape(url)
ethon.perform
check_and_raise_failure(ethon)
{
file_contents: ethon.response_body,
link_url: last_location_header(ethon)
}
end
# Creates a folder
#
# @param name [String] the name of the folder
# @param path [String] the path where to create the folder
# @param site_path [String] if the SP instance contains sites, the site path, e.g. "/sites/my-site"
#
# @return [Fixnum] HTTP response code
def create_folder(name, path, site_path=nil)
return unless name
sanitized_name = sanitize_filename(name)
url = computed_web_api_url(site_path)
path = path[1..-1] if path[0].eql?('/')
url = uri_escape "#{url}GetFolderByServerRelativeUrl('#{path}')/Folders"
easy = ethon_easy_json_requester
easy.headers = { 'accept' => 'application/json;odata=verbose',
'content-type' => 'application/json;odata=verbose',
'X-RequestDigest' => xrequest_digest(site_path) }
payload = {
'__metadata' => {
'type' => 'SP.Folder'
},
'ServerRelativeUrl' => "#{path}/#{sanitized_name}"
}
easy.http_request(url, :post, body: payload.to_json)
easy.perform
check_and_raise_failure(easy)
easy.response_code
end
# Checks if a folder exists
#
# @param path [String] the folder path
# @param site_path [String] if the SP instance contains sites, the site path, e.g. "/sites/my-site"
#
# @return [Fixnum] HTTP response code
def folder_exists?(path, site_path=nil)
url = computed_web_api_url(site_path)
path = [site_path, path].compact.join('/')
url = uri_escape "#{url}GetFolderByServerRelativeUrl('#{path}')"
easy = ethon_easy_json_requester
easy.http_request(url, :get)
easy.perform
easy.response_code == 200
end
# Upload a file
#
# @param filename [String] the name of the file uploaded
# @param content [String] the body of the file
# @param path [String] the path where to upload the file
# @param site_path [String] if the SP instance contains sites, the site path, e.g. "/sites/my-site"
#
# @return [Fixnum] HTTP response code
def upload(filename, content, path, site_path=nil)
sanitized_filename = sanitize_filename(filename)
url = computed_web_api_url(site_path)
path = path[1..-1] if path[0].eql?('/')
url = uri_escape "#{url}GetFolderByServerRelativeUrl('#{path}')/Files/Add(url='#{sanitized_filename}',overwrite=true)"
easy = ethon_easy_json_requester
easy.headers = { 'accept' => 'application/json;odata=verbose',
'X-RequestDigest' => xrequest_digest(site_path) }
easy.http_request(url, :post, { body: content })
easy.perform
check_and_raise_failure(easy)
easy.response_code
end
# Update metadata of a file
#
# @param filename [String] the name of the file
# @param metadata [Hash] the metadata to change
# @param path [String] the path where the file is stored
# @param site_path [String] if the SP instance contains sites, the site path, e.g. "/sites/my-site"
#
# @return [Fixnum] HTTP response code
def update_metadata(filename, metadata, path, site_path=nil)
sanitized_filename = sanitize_filename(filename)
url = computed_web_api_url(site_path)
server_relative_url = "#{site_path}#{path}/#{sanitized_filename}"
easy = ethon_easy_json_requester
easy.url = uri_escape "#{url}GetFileByServerRelativeUrl('#{server_relative_url}')/ListItemAllFields"
easy.perform
__metadata = JSON.parse(easy.response_body)['d']['__metadata']
update_metadata_url = __metadata['uri']
prepared_metadata = prepare_metadata(metadata, __metadata['type'])
easy = ethon_easy_json_requester
easy.headers = { 'accept' => 'application/json;odata=verbose',
'content-type' => 'application/json;odata=verbose',
'X-RequestDigest' => xrequest_digest(site_path),
'X-Http-Method' => 'PATCH',
'If-Match' => "*" }
easy.http_request(update_metadata_url,
:post,
{ body: prepared_metadata })
easy.perform
check_and_raise_failure(easy)
easy.response_code
end
# Search for all lists in the SP instance
#
# @param site_path [String] if the SP instance contains sites, the site path, e.g. "/sites/my-site"
# @param query [Hash] Hash with OData query operations, e.g. `{ select: 'Id,Title', filter: 'ItemCount gt 0 and Hidden eq false' }`.
#
# @return [Hash] with the following keys:
# * `:requested_url` [String] the URL requested to the SharePoint server
# * `:server_responded_at` [Time] the time when server returned its response
# * `:results` [Array] of OpenStructs with all lists returned by the query
def lists(site_path = '', query = {})
url = "#{computed_web_api_url(site_path)}Lists".dup
url << "?#{build_query_params(query)}" if query.present?
ethon = ethon_easy_json_requester
ethon.url = uri_escape(url)
ethon.perform
check_and_raise_failure(ethon)
{
requested_url: ethon.url,
server_responded_at: Time.now,
results: parse_lists_in_site_response(ethon.response_body)
}
end
# Index a list field. Requires admin permissions
#
# @param list_name [String] the name of the list
# @param field_name [String] the name of the field to index
# @param site_path [String] if the SP instance contains sites, the site path, e.g. "/sites/my-site"
#
# @return [Fixnum] HTTP response code
def index_field(list_name, field_name, site_path = '')
url = computed_web_api_url(site_path)
easy = ethon_easy_json_requester
easy.url = uri_escape "#{url}Lists/GetByTitle('#{odata_escape_single_quote(list_name)}')/Fields/getByTitle('#{field_name}')"
easy.perform
parsed_response_body = JSON.parse(easy.response_body)
return 304 if parsed_response_body['d']['Indexed']
update_object_metadata parsed_response_body['d']['__metadata'], { 'Indexed' => true }, site_path
end
private
def base_url
config.uri
end
def base_api_url
"#{base_url}/_api/"
end
def base_api_web_url
"#{base_api_url}web/"
end
def computed_api_url(site)
if site.present?
"#{base_url}/#{site}/_api/"
else
"#{base_url}/_api/"
end
end
def computed_web_api_url(site)
remove_double_slashes("#{computed_api_url(site)}/web/")
end
def ethon_easy_json_requester
easy = ethon_easy_requester
easy.headers = { 'accept'=> 'application/json;odata=verbose' }
easy
end
def ethon_easy_options
config.ethon_easy_options || {}
end
def ethon_easy_requester
easy = Ethon::Easy.new({ httpauth: :ntlm, followlocation: 1, maxredirs: 5 }.merge(ethon_easy_options))
easy.username = config.username
easy.password = config.password
easy
end
# When you send a POST request, the request must include the form digest
# value in the X-RequestDigest header
def xrequest_digest(site_path=nil)
easy = ethon_easy_json_requester
url = remove_double_slashes("#{computed_api_url(site_path)}/contextinfo")
easy.http_request(url, :post, { body: '' })
easy.perform
JSON.parse(easy.response_body)['d']["GetContextWebInformation"]["FormDigestValue"]
end
def last_location_header(ethon)
last_redirect_idx = ethon.response_headers.rindex('HTTP/1.1 302')
return if last_redirect_idx.nil?
last_response_headers = ethon.response_headers[last_redirect_idx..-1]
location = last_response_headers.match(/\r\n(Location:)(.+)\r\n/)[2].strip
utf8_encode uri_unescape(location)
end
def check_and_raise_failure(ethon)
unless (200..299).include? ethon.response_code
raise "Request failed, received #{ethon.response_code}:\n#{ethon.url}\n#{ethon.response_body}"
end
end
def prepare_metadata(metadata, type)
metadata.inject("{ '__metadata': { 'type': '#{type}' }"){ |result, element|
key = element[0]
value = element[1]
result += ", '#{json_escape_single_quote(key.to_s)}': '#{json_escape_single_quote(value.to_s)}'"
} + " }"
end
def json_escape_single_quote(s)
s.gsub("'", %q(\\\'))
end
def odata_escape_single_quote(s)
s.gsub("'","''")
end
def split_path(file_path)
last_slash_pos = file_path.rindex('/')
{
path: file_path[0..last_slash_pos-1],
name: file_path[last_slash_pos+1..-1]
}
end
def extract_paths(url)
unescaped_url = string_unescape(url)
uri = URI(uri_escape unescaped_url)
path = utf8_encode uri_unescape(uri.path)
sites_match = /\/sites\/[^\/]+/.match(path)
site_path = sites_match[0] unless sites_match.nil?
file_path = site_path.nil? ? path : path.sub(site_path, '')
uri.path = ''
root_url = uri.to_s
{
root: root_url,
site: site_path,
file: file_path
}
end
def validate_config!
raise Errors::UsernameConfigurationError.new unless string_not_blank?(@config.username)
raise Errors::PasswordConfigurationError.new unless string_not_blank?(@config.password)
raise Errors::UriConfigurationError.new unless valid_config_uri?
raise Errors::EthonOptionsConfigurationError.new unless ethon_easy_options.is_a?(Hash)
end
def string_not_blank?(object)
!object.nil? && object != "" && object.is_a?(String)
end
def valid_config_uri?
if @config.uri and @config.uri.is_a? String
uri = URI.parse(@config.uri)
uri.kind_of?(URI::HTTP) || uri.kind_of?(URI::HTTPS)
else
false
end
end
# Waiting for RFC 3986 to be implemented, we need to escape square brackets
def uri_escape(uri)
URI::DEFAULT_PARSER.escape(uri).gsub('[', '%5B').gsub(']', '%5D')
end
def uri_unescape(uri)
URI::DEFAULT_PARSER.unescape(uri.gsub('%5B', '[').gsub('%5D', ']'))
end
def string_unescape(s)
s.gsub!(/\\(?:[abfnrtv])/, '') # remove control chars
s.gsub!('"', '\"') # escape double quotes
eval %Q{"#{s}"}
end
def utf8_encode(s)
s.force_encoding('UTF-8') unless s.nil?
end
def sanitize_filename(filename)
escaped = Regexp.escape(FILENAME_INVALID_CHARS)
regexp = Regexp.new("[#{escaped}]")
sanitized_filename = filename.gsub(regexp, '-')
if sanitized_filename.length > 128
dot_index = sanitized_filename.rindex('.')
if dot_index.nil?
sanitized_filename = sanitized_filename[0..127]
else
extension_length = sanitized_filename.length - dot_index
upper_bound = 127 - extension_length
sanitized_filename = sanitized_filename[0..upper_bound] + sanitized_filename[dot_index..sanitized_filename.length-1]
end
end
odata_escape_single_quote(sanitized_filename)
end
def build_search_kql_conditions(options)
conditions = []
conditions << "IsContainer<>true"
conditions << "contentclass:STS_ListItem"
conditions << "WebId=#{options[:web_id]}" unless options[:web_id].nil?
conditions << "ListId:#{options[:list_id]}" unless options[:list_id].nil?
"'#{conditions.join('+')}'"
end
def build_search_fql_conditions(options)
start_at = options[:start_at]
end_at = options[:end_at]
if end_at.nil?
"'write:range(#{start_at.utc.iso8601},max,from=\"ge\")'"
else
"'write:range(#{start_at.utc.iso8601},#{end_at.utc.iso8601},from=\"ge\",to=\"le\")'"
end
end
def build_search_properties(options)
default_properties = %w(
Write IsDocument IsContainer ListId WebId URL
Created Title Author Size Path UniqueId contentclass
)
properties = options[:properties] || []
properties += default_properties
"selectproperties='#{properties.join(',')}'"
end
def build_search_paging(options)
start = options[:start_result] || 0
max = options[:max_results] || 500
"startrow=#{start}&rowlimit=#{max}"
end
def parse_search_response(response_body)
json_response = JSON.parse(response_body)
search_results = json_response.dig('d', 'query', 'PrimaryQueryResult', 'RelevantResults', 'Table', 'Rows', 'results')
records = []
search_results.each do |result|
record = {}
result.dig('Cells', 'results').each do |result_attrs|
key = result_attrs['Key'].underscore.to_sym
record[key] = result_attrs['Value']
end
records << OpenStruct.new(record)
end
records
end
def parse_list_response(json_response, all_properties)
results = json_response['d']['results']
records = []
results.each do |result|
# Skip folders
next unless result['FileSystemObjectType'].eql? 0
record = {}
(all_properties - ['File', 'URL']).each do |key|
record[key.underscore.to_sym] = result[key]
end
file = result['File']
%w( Name ServerRelativeUrl Length).each do |key|
record[key.underscore.to_sym] = file[key]
end
record[:url] = result['URL'].nil? ? nil : result['URL']['Url']
records << OpenStruct.new(record)
end
records
end
def parse_get_document_response(response_body, custom_properties)
all_props = JSON.parse(response_body)['d']
default_properties = %w( GUID Title Created Modified )
keys = default_properties + custom_properties
props = {}
keys.each do |key|
props[key.underscore.to_sym] = all_props[key]
end
props[:url] = all_props['URL'].nil? ? nil : all_props['URL']['Url']
OpenStruct.new(props)
end
def remove_double_slashes(str)
str.to_s.gsub('//', '/')
.gsub('http:/', 'http://')
.gsub('https:/', 'https://')
end
def build_query_params(query)
query_params = []
query.each do |field, value|
query_params << "$#{field}=#{value}"
end
query_params.join('&')
end
def parse_lists_in_site_response(response_body)
json_response = JSON.parse(response_body)
results = json_response.dig('d', 'results')
results.map do |result|
OpenStruct.new(result.map { |k, v| [k.underscore.to_sym, v] }.to_h)
end
end
def update_object_metadata(metadata, new_metadata, site_path = '')
update_metadata_url = metadata['uri']
prepared_metadata = prepare_metadata(new_metadata, metadata['type'])
easy = ethon_easy_json_requester
easy.headers = { 'accept' => 'application/json;odata=verbose',
'content-type' => 'application/json;odata=verbose',
'X-RequestDigest' => xrequest_digest(site_path),
'X-Http-Method' => 'PATCH',
'If-Match' => "*" }
easy.http_request(update_metadata_url,
:post,
{ body: prepared_metadata })
easy.perform
check_and_raise_failure(easy)
easy.response_code
end
end
end
| 38.123796 | 150 | 0.64764 |
4a4bf065d545a6bc048d8c500692024495209df4 | 1,028 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'emil_seyidov_view_tool/version'
Gem::Specification.new do |spec|
spec.name = "emil_seyidov_view_tool"
spec.version = EmilSeyidovViewTool::VERSION
spec.authors = ["Emil Seyidov"]
spec.email = ["[email protected]"]
spec.summary = %q{Various view specific methods for applications I use.}
spec.description = %q{Provides generated HTML data for Rails applications.}
spec.homepage = "https://devcamp.com"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.13"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 36.714286 | 80 | 0.65856 |
117fc753552151707168a1d0e1002bb6157f456f | 1,202 | require 'brainstem/api_docs/abstract_collection'
require 'brainstem/api_docs/controller'
module Brainstem
module ApiDocs
class ControllerCollection < AbstractCollection
attr_accessor :include_internal
def valid_options
super | [
:include_internal
]
end
#
# Creates a new controller from a route object and appends it to the
# collection.
def create_from_route(route)
Controller.new(atlas,
const: route[:controller],
name: route[:controller_name].split("/").last,
include_internal: include_internal
).tap { |controller| self.<< controller }
end
#
# Finds a controller from a route object.
#
def find_by_route(route)
find do |controller|
controller.const == route[:controller]
end
end
#
# Finds a controller from a route object or creates one if it does not
# exist.
#
def find_or_create_from_route(route)
find_by_route(route) || create_from_route(route)
end
alias_method :find_or_create_by_route, :find_or_create_from_route
end
end
end
| 25.574468 | 76 | 0.623128 |
6af5b3d6a19bd7f4f9fb37ce1d5dd3604ad1785c | 709 | cask 'devolo-cockpit' do
version '5.1.2'
sha256 'ebec71ac589ac0fd8b9bf7c0209b8fba01203d52b7009c3073441592b2c4bd97'
url "https://www.devolo.com/fileadmin/Web-Content/DE/products/hnw/devolo-cockpit/software/devolo-cockpit-v#{version.dots_to_hyphens}.dmg"
appcast 'https://www.devolo.com/support/downloads/download/devolo-cockpit'
name 'Devolo dLAN Cockpit'
homepage 'https://www.devolo.com/internet-in-any-room/devolo-cockpit'
installer manual: "#{staged_path}/devolo Cockpit Installation.app"
uninstall script: {
executable: '/Applications/devolo/devolo Cockpit uninstall.app/Contents/MacOS/uninstall',
sudo: true,
}
end
| 41.705882 | 139 | 0.70945 |
1c3643aa161e19a69a3ce878fc9d0bf40805c559 | 1,083 | require 'spec_helper'
require 'support/feature_detection'
# Tests the automatic usage of `current_user` as the `whodunnit` attribute on the draft object
describe WhodunnitsController, type: :controller do
let(:trashable) { Trashable.create!(name: 'Bob') }
describe 'create' do
it 'records `current_user` via `user_for_draftsman' do
post :create
expect(Draftsman::Draft.last.whodunnit).to eql '153'
end
end
describe 'update' do
it 'records `current_user` via `user_for_draftsman' do
if request_test_helpers_require_keyword_args?
put :update, params: { id: trashable.id }
else
put :update, id: trashable.id
end
expect(Draftsman::Draft.last.whodunnit).to eql '153'
end
end
describe 'destroy' do
it 'records `current_user` via `user_for_draftsman' do
if request_test_helpers_require_keyword_args?
delete :destroy, params: { id: trashable.id }
else
delete :destroy, id: trashable.id
end
expect(Draftsman::Draft.last.whodunnit).to eql '153'
end
end
end
| 27.769231 | 94 | 0.687904 |
bf45ce73871996457ebc384e9ef906363c05f8da | 467 | module Intrigue
module Entity
class Person < Intrigue::Core::Model::Entity
def self.metadata
{
:name => "Person",
:description => "A Person",
:user_creatable => true,
:example => "Bazooka Joe"
}
end
def validate_entity
name =~ /^[[[:word:]]\,\s]+$/
end
def detail_string
"#{details["origin"]}"
end
def scoped?
return true if self.allow_list
return false if self.deny_list
true
end
end
end
end
| 14.59375 | 44 | 0.599572 |
f875460e0791d458a311fcf4df9515ca848bf822 | 919 | # frozen_string_literal: true
class FindLogsTfUploadsInLog
attr_accessor :log
LOGS_TF_UPLOADED_REGEX = %r{L (?'time'.*): \[TFTrue\] The log is available here: http://logs.tf/(?'logs_tf_id'\d+). Type !log to view it.}
def self.perform(log)
finder = new(log)
finder.parse_log
finder.logs_tf_upload_ids
end
def initialize(log)
@log = File.open(log)
end
def parse_log
log.each_line do |line|
logs_tf_id = find_logs_tf_upload_in_line(line)
logs_tf_upload_ids << logs_tf_id.to_i if logs_tf_id
end
end
def logs_tf_upload_ids
@logs_tf_upload_ids ||= []
end
def find_logs_tf_upload_in_line(line)
begin
match = line.match(LOGS_TF_UPLOADED_REGEX)
rescue ArgumentError
tidied_line = ActiveSupport::Multibyte::Chars.new(line).tidy_bytes
match = tidied_line.match(LOGS_TF_UPLOADED_REGEX)
end
match[:logs_tf_id] if match
end
end
| 23.564103 | 140 | 0.706202 |
03221eb5a1c293d2a7c73b50d181c76c8b82c4e3 | 440 | class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :masqueradable, :database_authenticatable, :confirmable, :registerable, :trackable, :recoverable, :rememberable, :validatable, :omniauthable
has_one_attached :avatar
has_person_name
has_many_attached :images
has_many :notifications, as: :recipient
has_many :services
end
| 36.666667 | 149 | 0.790909 |
ab669358a082e07e9f3d6b07160334b2f68f26ba | 4,615 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2017_10_01
module Models
#
# Inbound NAT pool of the load balancer.
#
class InboundNatPool < SubResource
include MsRestAzure
# @return [SubResource] A reference to frontend IP addresses.
attr_accessor :frontend_ipconfiguration
# @return [TransportProtocol] Possible values include: 'Udp', 'Tcp',
# 'All'
attr_accessor :protocol
# @return [Integer] The first port number in the range of external ports
# that will be used to provide Inbound Nat to NICs associated with a load
# balancer. Acceptable values range between 1 and 65534.
attr_accessor :frontend_port_range_start
# @return [Integer] The last port number in the range of external ports
# that will be used to provide Inbound Nat to NICs associated with a load
# balancer. Acceptable values range between 1 and 65535.
attr_accessor :frontend_port_range_end
# @return [Integer] The port used for internal connections on the
# endpoint. Acceptable values are between 1 and 65535.
attr_accessor :backend_port
# @return [String] Gets the provisioning state of the PublicIP resource.
# Possible values are: 'Updating', 'Deleting', and 'Failed'.
attr_accessor :provisioning_state
# @return [String] The name of the resource that is unique within a
# resource group. This name can be used to access the resource.
attr_accessor :name
# @return [String] A unique read-only string that changes whenever the
# resource is updated.
attr_accessor :etag
#
# Mapper for InboundNatPool class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'InboundNatPool',
type: {
name: 'Composite',
class_name: 'InboundNatPool',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
frontend_ipconfiguration: {
client_side_validation: true,
required: false,
serialized_name: 'properties.frontendIPConfiguration',
type: {
name: 'Composite',
class_name: 'SubResource'
}
},
protocol: {
client_side_validation: true,
required: true,
serialized_name: 'properties.protocol',
type: {
name: 'String'
}
},
frontend_port_range_start: {
client_side_validation: true,
required: true,
serialized_name: 'properties.frontendPortRangeStart',
type: {
name: 'Number'
}
},
frontend_port_range_end: {
client_side_validation: true,
required: true,
serialized_name: 'properties.frontendPortRangeEnd',
type: {
name: 'Number'
}
},
backend_port: {
client_side_validation: true,
required: true,
serialized_name: 'properties.backendPort',
type: {
name: 'Number'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
},
etag: {
client_side_validation: true,
required: false,
serialized_name: 'etag',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 32.5 | 79 | 0.52221 |
bf4d4a5ebf71acb949665831d58f29f524465d14 | 843 | # frozen_string_literal: true
require 'json'
module EsArgParser
def self.parse query_string, vars = {}
# Remove new line characters
json_string = query_string.gsub(/\\n/, '').gsub(/\\r/, '').gsub(/%/, '%%')
# Ready string to accept curator vars
# rubocop:disable Style/FormatStringToken
converted_string = json_string.clone
vars.each { |key, _value| converted_string.gsub!(format('##%s##', key), "%{#{key}}") }
# rubocop:enable Style/FormatStringToken
# Interpolate curator vars
converted_string = converted_string % vars if converted_string != json_string
# If there are any remaining escaped `%`, unescape them
converted_string.gsub!(/%%/, '%')
# Parse the JSON string
begin
return JSON.parse(converted_string)
rescue JSON::ParserError
return nil
end
end
end
| 28.1 | 90 | 0.670225 |
01e41c7454b43187529dcde1ce34aa5ef692f81d | 544 | require './config/environment'
require 'rack-flash'
class ApplicationController < Sinatra::Base
configure do
set :public_folder, 'public'
set :views, 'app/views'
enable :sessions
set :session_secret, "secret"
use Rack::Flash, sweep: true
end
get "/" do
if logged_in?
redirect "/profile/#{@user.id}"
else
erb :"users/index"
end
end
helpers do
def logged_in?
!!current_user
end
def current_user
@user ||= User.find_by(id: session[:user_id])
end
end
end
| 15.111111 | 51 | 0.621324 |
18d039dccd52abb111042bed4c41c81d586faa3c | 700 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'clevertap_flutter'
s.version = '0.0.1'
s.summary = 'Flutter Plugin For Clevertap'
s.description = <<-DESC
Flutter Plugin For Clevertap
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Raja Earla' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.dependency 'CleverTap-iOS-SDK'
s.ios.deployment_target = '8.0'
end
| 31.818182 | 83 | 0.562857 |
91cb2776f999a68bfc1915bfd56ac645873c4269 | 1,980 | require 'rails_helper'
RSpec.describe MessagesController, type: :controller do
describe "GET #index" do
it "returns http success" do
get :index
expect(response).to have_http_status(:success)
expect(assigns(:messages)).to eq([])
expect(response).to render_template(:index)
end
end
describe "GET #new" do
it "returns http success" do
get :new
expect(response).to have_http_status(:success)
expect(assigns(:message)).to be_a_new(Message)
expect(response).to render_template(:new)
end
end
describe "POST #create" do
it "creates an message and redirects to root with valid attributes" do
expect {
post :create, params: { message: { :to => '[email protected]', :subject => 'Test', :body => "This is a test" } }
}.to change { Message.count }.from(0).to(1)
expect(response).to have_http_status(:redirect)
expect(response).to redirect_to(root_path)
end
it "renders new with invalid attributes" do
expect {
post :create, params: { message: { to: 'test' } }
}.to_not change { Message.count }
expect(response).to have_http_status(:success)
expect(response).to render_template(:new)
expect(assigns(:message)).to be_a_new(Message)
end
end
context 'with an email' do
let(:message) { Message.create(:to => '[email protected]', :subject => 'hello', :body => 'hello') }
describe "GET #index" do
it "returns http success" do
get :index
expect(response).to have_http_status(:success)
expect(assigns(:messages)).to eq([message])
expect(response).to render_template(:index)
end
end
describe "GET #show" do
it "returns http success" do
get :show, params: { id: message.id }
expect(response).to have_http_status(:success)
expect(assigns(:message)).to eq(message)
expect(response).to render_template(:show)
end
end
end
end
| 30 | 119 | 0.639394 |
03a79072a7043e4d8e16f1bcab1fe3d25e2b39bd | 271 | require 'active_support/concern'
module Versioned
extend ActiveSupport::Concern
included do
has_paper_trail on: [:create, :update], save_changes: true, ignore: [:updated_at, :created_at], if: proc { |_x| User.current.present? }, class_name: 'Version'
end
end
| 27.1 | 162 | 0.734317 |
033df0f2874260f661a83ae9a3177afdab96feba | 3,243 | require 'legacy_spec_helper'
describe PDFKit::Source do
describe "#url?" do
it "returns true if passed a url like string" do
source = PDFKit::Source.new('http://google.com')
expect(source).to be_url
end
it "returns false if passed a file" do
source = PDFKit::Source.new(File.new(__FILE__))
expect(source).not_to be_url
end
it "returns false if passed HTML" do
source = PDFKit::Source.new('<blink>Oh Hai!</blink>')
expect(source).not_to be_url
end
it "returns false if passed HTML with embedded urls at the beginning of a line" do
source = PDFKit::Source.new("<blink>Oh Hai!</blink>\nhttp://www.google.com")
expect(source).not_to be_url
end
end
describe "#file?" do
it "returns true if passed a file" do
source = PDFKit::Source.new(::File.new(__FILE__))
expect(source).to be_file
end
it "returns false if passed a url like string" do
source = PDFKit::Source.new('http://google.com')
expect(source).not_to be_file
end
it "returns false if passed HTML" do
source = PDFKit::Source.new('<blink>Oh Hai!</blink>')
expect(source).not_to be_file
end
end
describe "#html?" do
it "returns true if passed HTML" do
source = PDFKit::Source.new('<blink>Oh Hai!</blink>')
expect(source).to be_html
end
it "returns false if passed a file" do
source = PDFKit::Source.new(::File.new(__FILE__))
expect(source).not_to be_html
end
it "returns false if passed a url like string" do
source = PDFKit::Source.new('http://google.com')
expect(source).not_to be_html
end
end
describe "#to_input_for_command" do
it "URI escapes source URLs and encloses them in quotes to accomodate ampersands" do
source = PDFKit::Source.new("https://www.google.com/search?q='cat<dev/zero>/dev/null'")
expect(source.to_input_for_command).to eq "\"https://www.google.com/search?q='cat%3Cdev/zero%3E/dev/null'\""
end
it "does not URI escape previously escaped source URLs" do
source = PDFKit::Source.new("https://www.google.com/search?q='cat%3Cdev/zero%3E/dev/null'")
expect(source.to_input_for_command).to eq "\"https://www.google.com/search?q='cat%3Cdev/zero%3E/dev/null'\""
end
it "returns a '-' for HTML strings to indicate that we send that content through STDIN" do
source = PDFKit::Source.new('<blink>Oh Hai!</blink>')
expect(source.to_input_for_command).to eq '-'
end
it "returns the file path for file sources" do
source = PDFKit::Source.new(::File.new(__FILE__))
expect(source.to_input_for_command).to match 'spec/legacy/source_spec.rb'
end
end
describe "#to_s" do
it "returns the HTML if passed HTML" do
source = PDFKit::Source.new('<blink>Oh Hai!</blink>')
expect(source.to_s).to eq('<blink>Oh Hai!</blink>')
end
it "returns a path if passed a file" do
source = PDFKit::Source.new(::File.new(__FILE__))
expect(source.to_s).to eq(__FILE__)
end
it "returns the url if passed a url like string" do
source = PDFKit::Source.new('http://google.com')
expect(source.to_s).to eq('http://google.com')
end
end
end
| 32.757576 | 114 | 0.658033 |
11361104c5a238b38371883b58d8910575fc949e | 8,033 | # encoding: UTF-8
# frozen_string_literal: true
describe API::V2::Account::Withdraws, type: :request do
let(:member) { create(:member, :level_3) }
let(:token) { jwt_for(member) }
let(:level_0_member) { create(:member, :level_0) }
let(:level_0_member_token) { jwt_for(level_0_member) }
describe 'GET /api/v2/account/withdraws' do
let!(:btc_withdraws) { create_list(:btc_withdraw, 20, member: member) }
let!(:usd_withdraws) { create_list(:usd_withdraw, 20, member: member) }
it 'require authentication' do
get '/api/v2/account/withdraws'
expect(response.code).to eq '401'
end
it 'validates currency param' do
api_get '/api/v2/account/withdraws', params: { currency: 'FOO' }, token: token
expect(response.code).to eq '422'
expect(response.body).to eq '{"error":{"code":1001,"message":"currency does not have a valid value"}}'
end
it 'validates page param' do
api_get '/api/v2/account/withdraws', params: { page: -1 }, token: token
expect(response.code).to eq '422'
expect(response.body).to eq '{"error":{"code":1001,"message":"page page must be greater than zero."}}'
end
it 'validates limit param' do
api_get '/api/v2/account/withdraws', params: { limit: 9999 }, token: token
expect(response.code).to eq '422'
expect(response.body).to eq '{"error":{"code":1001,"message":"limit must be in range: 1..1000."}}'
end
it 'returns withdraws for all currencies by default' do
api_get '/api/v2/account/withdraws', params: { limit: 1000 }, token: token
expect(response).to be_success
expect(JSON.parse(response.body).map { |x| x['currency'] }.uniq.sort).to eq %w[ btc usd ]
end
it 'returns withdraws specified currency' do
api_get '/api/v2/account/withdraws', params: { currency: 'BTC', limit: 1000 }, token: token
expect(response).to be_success
expect(JSON.parse(response.body).map { |x| x['currency'] }.uniq.sort).to eq %w[ btc ]
end
it 'paginates withdraws' do
ordered_withdraws = btc_withdraws.sort_by(&:id).reverse
api_get '/api/v2/account/withdraws', params: { currency: 'BTC', limit: 10, page: 1 }, token: token
expect(response).to be_success
expect(JSON.parse(response.body).first['id']).to eq ordered_withdraws[0].id
api_get '/api/v2/account/withdraws', params: { currency: 'BTC', limit: 10, page: 2 }, token: token
expect(response).to be_success
expect(JSON.parse(response.body).first['id']).to eq ordered_withdraws[10].id
end
it 'sorts withdraws' do
ordered_withdraws = btc_withdraws.sort_by(&:id).reverse
api_get '/api/v2/account/withdraws', params: { currency: 'BTC', limit: 100 }, token: token
expect(response).to be_success
results = JSON.parse(response.body)
expect(results.map { |x| x['id'] }).to eq ordered_withdraws.map(&:id)
end
it 'denies access to unverified member' do
api_get '/api/v2/account/withdraws', token: level_0_member_token
expect(response.code).to eq '403'
expect(JSON.parse(response.body)['error']).to eq( {'code' => 2000, 'message' => 'Please, pass the corresponding verification steps to withdraw funds.'} )
end
end
describe 'create withdraw' do
let(:currency) { Currency.coins.sample }
let(:amount) { 0.1575 }
let :data do
{ uid: member.uid,
currency: currency.code,
amount: amount,
rid: Faker::Bitcoin.address,
otp: 123456 }
end
let(:account) { member.accounts.with_currency(currency).first }
let(:balance) { 1.2 }
before { account.plus_funds(balance) }
before { Vault::TOTP.stubs(:validate?).returns(true) }
context 'fiat withdrawal' do
before { data[:currency] = Currency.fiats.pluck(:id).sample }
it 'doesn\'t allow fiat' do
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(422)
expect(response.body).to eq '{"error":{"code":1001,"message":"currency does not have a valid value"}}'
end
end
context 'crypto withdrawal' do
context 'disabled account withdrawal API' do
before { ENV['ENABLE_ACCOUNT_WITHDRAWAL_API'] = 'false' }
after { ENV['ENABLE_ACCOUNT_WITHDRAWAL_API'] = 'true' }
it 'doesn\'t allow account withdrawal API call' do
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(422)
expect(response.body).to eq '{"error":{"code":2000,"message":"Account withdrawal API is disabled"}}'
end
end
context 'extremely precise values' do
before { Currency.any_instance.stubs(:withdraw_fee).returns(BigDecimal(0)) }
before { Currency.any_instance.stubs(:precision).returns(16) }
it 'keeps precision for amount' do
currency.update!(precision: 16)
data[:amount] = '0.0000000123456789'
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(201)
expect(Withdraw.last.sum.to_s).to eq data[:amount]
end
end
it 'validates missing otp param' do
data.except!(:otp)
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(422)
expect(response.body).to eq '{"error":{"code":1001,"message":"otp is missing, otp is empty"}}'
end
it 'requires otp' do
data[:otp] = nil
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(422)
expect(response.body).to eq '{"error":{"code":1001,"message":"otp is empty"}}'
end
it 'validates otp code' do
Vault::TOTP.stubs(:validate?).returns(false)
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(422)
expect(response.body).to eq '{"error":{"code":2000,"message":"OTP code is invalid"}}'
end
it 'requires amount' do
data[:amount] = nil
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(422)
expect(response.body).to eq '{"error":{"code":1001,"message":"amount must be positive"}}'
end
it 'validates negative balance' do
data[:amount] = -1
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(422)
expect(response.body).to eq '{"error":{"code":1001,"message":"amount must be positive"}}'
end
it 'validates enough balance' do
data[:amount] = 100
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(422)
expect(response.body).to eq '{"errors":["Account balance is insufficient"]}'
end
it 'requires rid' do
data[:rid] = nil
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(422)
expect(response.body).to eq '{"error":{"code":1001,"message":"rid is empty"}}'
end
it 'requires currency' do
data[:currency] = nil
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(422)
expect(response.body).to eq '{"error":{"code":1001,"message":"currency does not have a valid value"}}'
end
it 'creates new withdraw and immediately submits it' do
api_post '/api/v2/account/withdraws', params: data, token: token
expect(response).to have_http_status(201)
record = Withdraw.last
expect(record.sum).to eq 0.1575
expect(record.aasm_state).to eq 'submitted'
expect(record.account).to eq account
expect(record.account.balance).to eq(1.2 - amount)
expect(record.account.locked).to eq amount
end
end
end
end
| 41.621762 | 159 | 0.642599 |
e9f6137119e866aa61edec371492cdbbc1dde210 | 1,530 | #
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
require 'ingenico/connect/sdk/factory'
require 'ingenico/connect/sdk/domain/definitions/card_without_cvv'
require 'ingenico/connect/sdk/domain/payment/complete_payment_card_payment_method_specific_input'
require 'ingenico/connect/sdk/domain/payment/complete_payment_request'
Definitions = Ingenico::Connect::SDK::Domain::Definitions
Payment = Ingenico::Connect::SDK::Domain::Payment
def example
get_client do |client|
card = Definitions::CardWithoutCvv.new
card.card_number = '67030000000000003'
card.cardholder_name = 'Wile E. Coyote'
card.expiry_date = '1299'
card_payment_method_specific_input = Payment::CompletePaymentCardPaymentMethodSpecificInput.new
card_payment_method_specific_input.card = card
body = Payment::CompletePaymentRequest.new
body.card_payment_method_specific_input = card_payment_method_specific_input
response = client.merchant('merchantId').payments.complete('paymentId', body)
end
end
def get_client
api_key_id = ENV.fetch('connect.api.apiKeyId', 'someKey')
secret_api_key = ENV.fetch('connect.api.secretApiKey', 'someSecret')
configuration_file_name = File.join(__FILE__, '..', '..', 'example_configuration.yml')
yield client = Ingenico::Connect::SDK::Factory.create_client_from_file(configuration_file_name, api_key_id, secret_api_key)
ensure
# Free networking resources when done
client.close unless client.nil?
end
| 39.230769 | 125 | 0.793464 |
18f9f404ac9e21e353183c1671d587aecba5c9d0 | 2,827 | require 'test_helper'
class BigDecimalAnnuitiesTest < Minitest::Test
include AnnuitiesHelper
def test_improve_interest_rate
# Based on Example 6 in http://oakroadsystems.com/math/loan.htm .
assert_improve_interest_rate payment: BigDecimal.new('291'),
periods: BigDecimal.new('48'), principal: BigDecimal.new('11200'),
guess: BigDecimal.new('0.01'), expected: BigDecimal.new('0.0094295242'),
delta: BigDecimal.new('0.00000001')
end
def test_interest_rate_stops_if_improvement_is_small
# Based on Example 6 in http://oakroadsystems.com/math/loan.htm .
assert_interest_rate_stops_if_improvement_is_small \
payment: BigDecimal.new('291'), periods: BigDecimal.new('48'),
principal: BigDecimal.new('11200'), guess: BigDecimal.new('0.01'),
precision: BigDecimal.new('0.5'),
expected: BigDecimal.new('0.0094295242'),
delta: BigDecimal.new('0.00000001')
end
def test_interest_rate_does_multiple_iterations
# Based on Example 6 in http://oakroadsystems.com/math/loan.htm .
assert_interest_rate_does_multiple_iterations \
payment: BigDecimal.new('291'), periods: BigDecimal.new('48'),
principal: BigDecimal.new('11200'), guess: BigDecimal.new('0.01'),
precision: BigDecimal.new('0'), max_decimals: 10, max_iterations: 4,
expected: BigDecimal.new('0.0094007411'),
delta: BigDecimal.new('0.0000000001')
end
def test_interest_rate_stops_if_max_iterations_reached
expected = BigDecimal.new('0.42')
actual = Refinance::Annuities.interest_rate(0, 0, 0, expected, 0, 1, 0)
assert_equal expected, actual
end
def test_payment
# Based on Example 2 in http://oakroadsystems.com/math/loan.htm .
assert_payment interest_rate: BigDecimal.new('0.0065'),
periods: BigDecimal.new('360'), principal: BigDecimal.new('225000'),
expected: BigDecimal.new('1619.708627'),
delta: BigDecimal.new('0.000001')
end
def test_periods
# Based on Example 3 in http://oakroadsystems.com/math/loan.htm .
assert_periods interest_rate: BigDecimal.new('0.005'),
payment: BigDecimal.new('100.0'), principal: BigDecimal.new('3500.0'),
expected: BigDecimal.new('38.57'), delta: BigDecimal.new('0.01')
end
def test_principal
# Based on Example 5 in http://oakroadsystems.com/math/loan.htm .
assert_principal interest_rate: BigDecimal.new('0.014083'),
payment: BigDecimal.new('60'), periods: BigDecimal.new('36'),
expected: BigDecimal.new('1685.26'), delta: BigDecimal.new('0.01')
end
def test_effective_interest_rate
assert_effective_interest_rate \
nominal_annual_interest_rate: BigDecimal.new('0.1'),
compounding_periods_per_year: BigDecimal.new('12'),
expected: BigDecimal.new('0.10471'), delta: BigDecimal.new('0.00001')
end
end
| 40.385714 | 78 | 0.713477 |
8775ba252c470e0af50db42abd4b7602cfb0b2d6 | 1,366 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module HackfestApp
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# We want to set up a custom logger which logs to STDOUT.
# Docker expects your application to log to STDOUT/STDERR and to be ran
# in the foreground.
config.log_level = :debug
config.log_tags = [:subdomain, :uuid]
config.logger = ActiveSupport::TaggedLogging.new(Logger.new(STDOUT))
# Since we're using Redis for Sidekiq, we might as well use Redis to back
# our cache store. This keeps our application stateless as well.
# config.cache_store = :redis_cache_store, ENV['CACHE_URL'], { namespace: 'drkiq::cache' }
# If you've never dealt with background workers before, this is the Rails
# way to use them through Active Job. We just need to tell it to use Sidekiq.
config.active_job.queue_adapter = :sidekiq
end
end
| 40.176471 | 94 | 0.732796 |
1adfe10a41d50a58f2e156503efbeb486056369e | 574 | require "isodoc"
require_relative "metadata"
require_relative "xref"
require_relative "i18n"
module IsoDoc
module Csa
module Init
def metadata_init(lang, script, i18n)
@meta = Metadata.new(lang, script, i18n)
end
def xref_init(lang, script, klass, i18n, options)
html = HtmlConvert.new(language: lang, script: script)
@xrefs = Xref.new(lang, script, html, i18n, options)
end
def i18n_init(lang, script, i18nyaml = nil)
@i18n = I18n.new(lang, script, i18nyaml || @i18nyaml)
end
end
end
end
| 22.96 | 62 | 0.648084 |
28b3e79c1ff9fb02a00f8972cd0c68fdfc9e8234 | 1,341 | require 'spec_helper'
describe 'environments routing' do
let(:project) { create(:project) }
let(:environment) do
create(:environment, project: project,
name: 'staging-1.0/review')
end
let(:environments_route) do
"#{project.full_path}/environments/"
end
describe 'routing environment folders' do
context 'when using JSON format' do
it 'correctly matches environment name and JSON format' do
expect(get_folder('staging-1.0.json'))
.to route_to(*folder_action(id: 'staging-1.0', format: 'json'))
end
end
context 'when using HTML format' do
it 'correctly matches environment name and HTML format' do
expect(get_folder('staging-1.0.html'))
.to route_to(*folder_action(id: 'staging-1.0', format: 'html'))
end
end
context 'when using implicit format' do
it 'correctly matches environment name' do
expect(get_folder('staging-1.0'))
.to route_to(*folder_action(id: 'staging-1.0'))
end
end
end
def get_folder(folder)
get("#{project.full_path}/environments/folders/#{folder}")
end
def folder_action(**opts)
options = { namespace_id: project.namespace.path,
project_id: project.path }
['projects/environments#folder', options.merge(opts)]
end
end
| 27.367347 | 73 | 0.646532 |
7a9e697f241cf05daa4e458cc8920f60e9499890 | 3,702 | class Headword < ActiveRecord::Base
has_many :orthographs
has_many :phonetic_forms, :through => :orthographs
has_many :notes, as: :annotatable
translates :form, :fallbacks_for_empty_translations => true
globalize_accessors :locales => (Language.defined_language_codes | [I18n.default_locale])
accepts_nested_attributes_for :phonetic_forms, :allow_destroy => true, :reject_if => proc { |attributes| attributes.all? {|k,v| v.blank?} }
accepts_nested_attributes_for :notes, :allow_destroy => true, :reject_if => proc { |attributes| attributes.all? {|k,v| v.blank?} }
scope :unattested, joins(['LEFT OUTER JOIN "parses" ON "parses"."parsed_form" = "form"']).where({:parses => {:parsed_form => nil}})
belongs_to :lexeme
delegate :senses, :to => '(lexeme or return nil)'
belongs_to :language
validate :any_form_present?
before_save :set_defaults
DESCRIPTIVE = 1
PRESCRIPTIVE = 2
INCLUDE_TREE = { :headwords => [PhoneticForm::INCLUDE_TREE, :language, :translations] }
after_initialize do |hw|
# Most likely default: assume the most acceptable forms are being entered.
self.acceptance ||= DESCRIPTIVE | PRESCRIPTIVE
end
def set_defaults
default_language = lexeme.try(:language) || Language.new
self.language ||= default_language
end
def self.lookup_all_by_parse parse
self.find_all_by_form(parse.parsed_form)
end
def self.lookup_by_parse parse
self.find_by_form(parse.parsed_form)
end
# Returns whether the headword has been marked as descriptively correct
def descriptively_ok?
acceptance & DESCRIPTIVE == DESCRIPTIVE
end
# Given 1 (true) or 0 (false), set the headword's descriptively-correct status
def descriptively_ok=(status)
self.acceptance = (acceptance & PRESCRIPTIVE) | (status.to_i * DESCRIPTIVE)
end
# Returns whether the headword has been marked as prescriptively correct
def prescriptively_ok?
acceptance & PRESCRIPTIVE == PRESCRIPTIVE
end
# Given 1 (true) or 0 (false), set the headword's prescriptively-correct status
def prescriptively_ok=(status)
self.acceptance = (acceptance & DESCRIPTIVE) | (status.to_i * PRESCRIPTIVE)
end
# Return the orthographic form appropriate to the given locale.
# If no locale is given, return the default (current) locale, or the
# first translation given.
def form loc = nil
if loc
form_translations[loc]
else
self[:form] || form_translations.detect(->{[]}) {|k, v| v.present? }[1]
end
end
# Return an array of all defined orthographic forms
def orthographic_forms
translations.inject([]) do |memo, obj|
obj.form? ? memo | [obj.form] : memo
end
end
# Returns an array containing the forms of the most acceptable headwords
def self.best_headword_forms(headwords)
return [] if headwords.empty?
most_accepted = headwords.max_by {|hw2| hw2.acceptance }.acceptance
forms = headwords.select {|hw| hw.acceptance == most_accepted}.inject([]) do |memo, obj|
memo | obj.orthographic_forms
end
end
# Returns an array containing the phonetic forms of the most acceptable headwords
def self.best_phonetic_forms(headwords)
return [] if headwords.empty?
most_accepted = headwords.max_by {|hw2| hw2.acceptance }.acceptance
forms = headwords.select {|hw| hw.acceptance == most_accepted}.inject([]) do |memo, obj|
memo | obj.phonetic_forms.collect(&:phonetic_forms).flatten
end
end
protected
def any_form_present?
if globalize_attribute_names.select {|k,v| k.to_s.start_with?("form")}.all? {|v| v.blank? }
errors.add(:form, I18n.t('errors.messages.blank'))
end
end
end
| 33.654545 | 141 | 0.708266 |
91da79dc53f2b1de41e44000a99e9bf79d7c38c0 | 854 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
require 'msf/core/payload/generic'
module Metasploit3
include Msf::Payload::Single
def initialize(info = {})
super(merge_info(info,
'Name' => 'Generic x86 Debug Trap',
'Version' => '$Revision$',
'Description' => 'Generate a debug trap in the target process',
'Author' => 'robert <robertmetasploit [at] gmail.com>',
'Platform' => [ 'win', 'linux', 'bsd', 'solaris', 'bsdi', 'osx' ],
'License' => MSF_LICENSE,
'Arch' => ARCH_X86,
'Payload' =>
{
'Payload' =>
"\xcc"
}
))
end
end | 22.473684 | 72 | 0.619438 |
ab00e1c1d8e0d9795d2aff3f06097b834cfe7713 | 12,364 | require 'test_helper'
class MonerisTest < Test::Unit::TestCase
include CommStub
def setup
Base.mode = :test
@gateway = MonerisGateway.new(
:login => 'store1',
:password => 'yesguy'
)
@amount = 100
@credit_card = credit_card('4242424242424242')
@options = { :order_id => '1', :customer => '1', :billing_address => address}
end
def test_default_options
assert_equal 7, @gateway.options[:crypt_type]
assert_equal "store1", @gateway.options[:login]
assert_equal "yesguy", @gateway.options[:password]
end
def test_successful_purchase
@gateway.expects(:ssl_post).returns(successful_purchase_response)
assert response = @gateway.authorize(100, @credit_card, @options)
assert_success response
assert_equal '58-0_3;1026.1', response.authorization
end
def test_failed_purchase
@gateway.expects(:ssl_post).returns(failed_purchase_response)
assert response = @gateway.authorize(100, @credit_card, @options)
assert_failure response
end
def test_deprecated_credit
@gateway.expects(:ssl_post).with(anything, regexp_matches(/txn_number>123<\//), anything).returns("")
@gateway.expects(:parse).returns({})
assert_deprecation_warning(Gateway::CREDIT_DEPRECATION_MESSAGE) do
@gateway.credit(@amount, "123;456", @options)
end
end
def test_refund
@gateway.expects(:ssl_post).with(anything, regexp_matches(/txn_number>123<\//), anything).returns("")
@gateway.expects(:parse).returns({})
@gateway.refund(@amount, "123;456", @options)
end
def test_amount_style
assert_equal '10.34', @gateway.send(:amount, 1034)
assert_raise(ArgumentError) do
@gateway.send(:amount, '10.34')
end
end
def test_preauth_is_valid_xml
params = {
:order_id => "order1",
:amount => "1.01",
:pan => "4242424242424242",
:expdate => "0303",
:crypt_type => 7,
}
assert data = @gateway.send(:post_data, 'preauth', params)
assert REXML::Document.new(data)
assert_equal xml_capture_fixture.size, data.size
end
def test_purchase_is_valid_xml
params = {
:order_id => "order1",
:amount => "1.01",
:pan => "4242424242424242",
:expdate => "0303",
:crypt_type => 7,
}
assert data = @gateway.send(:post_data, 'purchase', params)
assert REXML::Document.new(data)
assert_equal xml_purchase_fixture.size, data.size
end
def test_capture_is_valid_xml
params = {
:order_id => "order1",
:amount => "1.01",
:pan => "4242424242424242",
:expdate => "0303",
:crypt_type => 7,
}
assert data = @gateway.send(:post_data, 'preauth', params)
assert REXML::Document.new(data)
assert_equal xml_capture_fixture.size, data.size
end
def test_supported_countries
assert_equal ['CA'], MonerisGateway.supported_countries
end
def test_supported_card_types
assert_equal [:visa, :master, :american_express, :diners_club, :discover], MonerisGateway.supported_cardtypes
end
def test_should_raise_error_if_transaction_param_empty_on_credit_request
[nil, '', '1234'].each do |invalid_transaction_param|
assert_raise(ArgumentError) { @gateway.void(invalid_transaction_param) }
end
end
def test_successful_store
@gateway.expects(:ssl_post).returns(successful_store_response)
assert response = @gateway.store(@credit_card)
assert_success response
assert_equal "Successfully registered cc details", response.message
assert response.params["data_key"].present?
@data_key = response.params["data_key"]
end
def test_successful_unstore
@gateway.expects(:ssl_post).returns(successful_unstore_response)
test_successful_store
assert response = @gateway.unstore(@data_key)
assert_success response
assert_equal "Successfully deleted cc details", response.message
assert response.params["data_key"].present?
end
def test_update
@gateway.expects(:ssl_post).returns(successful_update_response)
test_successful_store
assert response = @gateway.update(@data_key, @credit_card)
assert_success response
assert_equal "Successfully updated cc details", response.message
assert response.params["data_key"].present?
end
def test_successful_purchase_with_vault
@gateway.expects(:ssl_post).returns(successful_purchase_response)
test_successful_store
assert response = @gateway.purchase(100, @data_key, {:order_id => generate_unique_id, :customer => generate_unique_id})
assert_success response
assert_equal "Approved", response.message
assert response.authorization.present?
end
def test_successful_authorization_with_vault
@gateway.expects(:ssl_post).returns(successful_purchase_response)
test_successful_store
assert response = @gateway.authorize(100, @data_key, {:order_id => generate_unique_id, :customer => generate_unique_id})
assert_success response
assert_equal "Approved", response.message
assert response.authorization.present?
end
def test_failed_authorization_with_vault
@gateway.expects(:ssl_post).returns(failed_purchase_response)
test_successful_store
assert response = @gateway.authorize(100, @data_key, @options)
assert_failure response
end
def test_gets_sent_when_its_enabled
gateway = MonerisGateway.new(login: 'store1', password: 'yesguy', cvv_enabled: true)
@credit_card.verification_value = "452"
stub_comms(gateway) do
gateway.purchase(@amount, @credit_card, @options)
end.check_request do |endpoint, data, headers|
assert_match(%r{cvd_indicator>1<}, data)
assert_match(%r{cvd_value>452<}, data)
end.respond_with(successful_purchase_response)
end
def test_no_cvv_specified_when_its_enabled
gateway = MonerisGateway.new(login: 'store1', password: 'yesguy', cvv_enabled: true)
@credit_card.verification_value = ""
stub_comms(gateway) do
gateway.purchase(@amount, @credit_card, @options)
end.check_request do |endpoint, data, headers|
assert_match(%r{cvd_indicator>0<}, data)
assert_no_match(%r{cvd_value>}, data)
end.respond_with(successful_purchase_response)
end
def test_passing_cvv_when_not_enabled
@credit_card.verification_value = "452"
stub_comms do
@gateway.purchase(@amount, @credit_card, @options)
end.check_request do |endpoint, data, headers|
assert_no_match(%r{cvd_value>}, data)
assert_no_match(%r{cvd_indicator>}, data)
end.respond_with(successful_purchase_response)
end
def test_no_cvv_specified_when_not_enabled
@credit_card.verification_value = ""
stub_comms do
@gateway.purchase(@amount, @credit_card, @options)
end.check_request do |endpoint, data, headers|
assert_no_match(%r{cvd_value>}, data)
assert_no_match(%r{cvd_indicator>}, data)
end.respond_with(successful_purchase_response)
end
def test_avs_information_present_with_address
billing_address = address(address1: "1234 Anystreet", address2: "")
stub_comms do
@gateway.purchase(@amount, @credit_card, billing_address: billing_address, order_id: "1")
end.check_request do |endpoint, data, headers|
assert_match(%r{avs_street_number>1234<}, data)
assert_match(%r{avs_street_name>Anystreet<}, data)
assert_match(%r{avs_zipcode>#{billing_address[:zip]}<}, data)
end.respond_with(successful_purchase_response_with_avs_result)
end
def test_avs_information_absent_with_no_address
stub_comms do
@gateway.purchase(@amount, @credit_card, @options.tap { |x| x.delete(:billing_address) })
end.check_request do |endpoint, data, headers|
assert_no_match(%r{avs_street_number>}, data)
assert_no_match(%r{avs_street_name>}, data)
assert_no_match(%r{avs_zipcode>}, data)
end.respond_with(successful_purchase_response)
end
def test_avs_result_valid_with_address
@gateway.expects(:ssl_post).returns(successful_purchase_response_with_avs_result)
assert response = @gateway.purchase(100, @credit_card, @options)
assert_equal(response.avs_result, {
'code' => 'A',
'message' => 'Street address matches, but 5-digit and 9-digit postal code do not match.',
'street_match' => 'Y',
'postal_match' => 'N'
})
end
def test_customer_can_be_specified
stub_comms do
@gateway.purchase(@amount, @credit_card, order_id: "3", customer: "Joe Jones")
end.check_request do |endpoint, data, headers|
assert_match(%r{cust_id>Joe Jones}, data)
end.respond_with(successful_purchase_response)
end
def test_customer_not_specified_card_name_used
stub_comms do
@gateway.purchase(@amount, @credit_card, order_id: "3")
end.check_request do |endpoint, data, headers|
assert_match(%r{cust_id>Longbob Longsen}, data)
end.respond_with(successful_purchase_response)
end
private
def successful_purchase_response
<<-RESPONSE
<?xml version="1.0"?>
<response>
<receipt>
<ReceiptId>1026.1</ReceiptId>
<ReferenceNum>661221050010170010</ReferenceNum>
<ResponseCode>027</ResponseCode>
<ISO>01</ISO>
<AuthCode>013511</AuthCode>
<TransTime>18:41:13</TransTime>
<TransDate>2008-01-05</TransDate>
<TransType>00</TransType>
<Complete>true</Complete>
<Message>APPROVED * =</Message>
<TransAmount>1.00</TransAmount>
<CardType>V</CardType>
<TransID>58-0_3</TransID>
<TimedOut>false</TimedOut>
</receipt>
</response>
RESPONSE
end
def successful_purchase_response_with_avs_result
<<-RESPONSE
<?xml version="1.0"?>
<response>
<receipt>
<ReceiptId>9c7189ec64b58f541335be1ca6294d09</ReceiptId>
<ReferenceNum>660110910011136190</ReferenceNum>
<ResponseCode>027</ResponseCode>
<ISO>01</ISO>
<AuthCode>115497</AuthCode>
<TransTime>15:20:51</TransTime>
<TransDate>2014-06-18</TransDate>
<TransType>00</TransType>
<Complete>true</Complete><Message>APPROVED * =</Message>
<TransAmount>10.10</TransAmount>
<CardType>V</CardType>
<TransID>491573-0_9</TransID>
<TimedOut>false</TimedOut>
<BankTotals>null</BankTotals>
<Ticket>null</Ticket>
<CorporateCard>false</CorporateCard>
<AvsResultCode>A</AvsResultCode>
<ITDResponse>null</ITDResponse>
<IsVisaDebit>false</IsVisaDebit>
</receipt>
</response>
RESPONSE
end
def failed_purchase_response
<<-RESPONSE
<?xml version="1.0"?>
<response>
<receipt>
<ReceiptId>1026.1</ReceiptId>
<ReferenceNum>661221050010170010</ReferenceNum>
<ResponseCode>481</ResponseCode>
<ISO>01</ISO>
<AuthCode>013511</AuthCode>
<TransTime>18:41:13</TransTime>
<TransDate>2008-01-05</TransDate>
<TransType>00</TransType>
<Complete>true</Complete>
<Message>DECLINED * =</Message>
<TransAmount>1.00</TransAmount>
<CardType>V</CardType>
<TransID>97-2-0</TransID>
<TimedOut>false</TimedOut>
</receipt>
</response>
RESPONSE
end
def successful_store_response
<<-RESPONSE
<?xml version="1.0"?>
<response>
<receipt>
<DataKey>1234567890</DataKey>
<ResponseCode>027</ResponseCode>
<Complete>true</Complete>
<Message>Successfully registered cc details * =</Message>
</receipt>
</response>
RESPONSE
end
def successful_unstore_response
<<-RESPONSE
<?xml version="1.0"?>
<response>
<receipt>
<DataKey>1234567890</DataKey>
<ResponseCode>027</ResponseCode>
<Complete>true</Complete>
<Message>Successfully deleted cc details * =</Message>
</receipt>
</response>
RESPONSE
end
def successful_update_response
<<-RESPONSE
<?xml version="1.0"?>
<response>
<receipt>
<DataKey>1234567890</DataKey>
<ResponseCode>027</ResponseCode>
<Complete>true</Complete>
<Message>Successfully updated cc details * =</Message>
</receipt>
</response>
RESPONSE
end
def xml_purchase_fixture
'<request><store_id>store1</store_id><api_token>yesguy</api_token><purchase><amount>1.01</amount><pan>4242424242424242</pan><expdate>0303</expdate><crypt_type>7</crypt_type><order_id>order1</order_id></purchase></request>'
end
def xml_capture_fixture
'<request><store_id>store1</store_id><api_token>yesguy</api_token><preauth><amount>1.01</amount><pan>4242424242424242</pan><expdate>0303</expdate><crypt_type>7</crypt_type><order_id>order1</order_id></preauth></request>'
end
end
| 31.380711 | 225 | 0.716839 |
e8cc56bedb3c1638e54b7702d95ccd355654c986 | 18,259 | class ServiceController < ApplicationController
include Mixins::GenericSessionMixin
include Mixins::GenericShowMixin
include Mixins::BreadcrumbsMixin
before_action :check_privileges
before_action :get_session_data
after_action :cleanup_action
after_action :set_session_data
SERVICE_X_BUTTON_ALLOWED_ACTIONS = {
'service_delete' => :service_delete,
'service_edit' => :service_edit,
'service_ownership' => :service_ownership,
'service_tag' => :service_tag_edit,
'service_retire' => :service_retire,
'service_retire_now' => :service_retire_now,
'service_reconfigure' => :service_reconfigure
}.freeze
def button
case params[:pressed]
when 'generic_object_tag'
tag(GenericObject)
when "custom_button"
@display == 'generic_objects' ? generic_object_custom_buttons : custom_buttons
end
end
def generic_object_custom_buttons
display_options = {}
ids = @lastaction == 'generic_object' ? @sb[:rec_id] : 'LIST'
display_options[:display] = @display
display_options[:record_id] = parse_nodetype_and_id(x_node).last
display_options[:display_id] = params[:id] if @lastaction == 'generic_object'
custom_buttons(ids, display_options)
end
def x_button
generic_x_button(SERVICE_X_BUTTON_ALLOWED_ACTIONS)
end
def title
_("My Services")
end
# Service show selected, redirect to proper controller
def show
@record = Service.find(params[:id].to_i)
@lastaction = "show"
@gtl_url = "/show"
set_display
case @display
when 'generic_objects'
show_generic_object
return
when 'custom_button_events'
display_nested_list(@display)
return
end
unless @explorer
tree_node_id = TreeBuilder.build_node_id(@record)
redirect_to(:controller => "service",
:action => "explorer",
:id => tree_node_id)
return
end
redirect_to(:action => 'show', :controller => @record.class.base_model.to_s.underscore, :id => @record.id)
end
def set_display
@display = params[:display]
@display ||= default_display unless pagination_or_gtl_request?
@display ||= 'generic_objects' if role_allows?(:feature => "generic_object_view") && @record.number_of(:generic_objects).positive?
end
def show_generic_object
if params[:generic_object_id]
show_single_generic_object
else
display_nested_list(@display)
end
end
def show_list
flash_to_session
redirect_to(:action => 'explorer')
end
def explorer
@explorer = true
@lastaction = "explorer"
# if AJAX request, replace right cell, and return
if request.xml_http_request?
replace_right_cell
return
end
x_node_to_set = nil
if params[:id] # Tree node id came in, show it in the tree.
@find_with_aggregates = true
nodetype, id = params[:id].split("-")
x_node_to_set = "#{nodetype}-#{id}"
end
@breadcrumbs.clear if @breadcrumbs.present?
build_accordions_and_trees(x_node_to_set)
params.instance_variable_get(:@parameters).merge!(session[:exp_parms]) if session[:exp_parms] # Grab any explorer parm overrides
session.delete(:exp_parms)
@in_a_form = false
render :layout => "application"
end
def identify_service(id = nil)
@st = @record = identify_record(id || params[:id])
end
# ST clicked on in the explorer right cell
def x_show
identify_service(params[:id])
generic_x_show
end
def service_edit
assert_privileges("service_edit")
@explorer = true
case params[:button]
when "cancel"
service = find_record_with_rbac(Service, params[:id])
add_flash(_("Edit of Service \"%{name}\" was cancelled by the user") % {:name => service.name})
replace_right_cell
when "save", "add"
service = find_record_with_rbac(Service, params[:id])
service_set_record_vars(service)
begin
service.save
rescue => bang
add_flash(_("Error during 'Service Edit': %{message}") % {:message => bang.message}, :error)
else
add_flash(_("Service \"%{name}\" was saved") % {:name => service.name})
end
replace_right_cell(:replace_trees => [:svcs])
when "reset", nil # Reset or first time in
checked = find_checked_items
checked[0] = params[:id] if checked.blank? && params[:id]
@service = find_record_with_rbac(Service, checked[0])
@in_a_form = true
replace_right_cell(:action => "service_edit")
return
end
end
def service_reconfigure
@explorer = true
service = Service.find_by(:id => params[:id])
service_template = service.service_template
resource_action = service_template.resource_actions.find_by(:action => 'Reconfigure') if service_template
@right_cell_text = _("Reconfigure Service \"%{name}\"") % {:name => service.name}
dialog_locals = {:resource_action_id => resource_action.id, :target_id => service.id}
replace_right_cell(:action => "reconfigure_dialog", :dialog_locals => dialog_locals)
end
def service_form_fields
service = Service.find(params[:id])
render :json => {
:name => service.name,
:description => service.description
}
end
# display a single generic object
#
def show_single_generic_object
return unless init_show_variables
@lastaction = 'generic_object'
@item = @record.generic_objects.find { |e| e[:id] == params[:generic_object_id].to_i }
drop_breadcrumb(:name => _("%{name} (All Generic Objects)") % {:name => @record.name}, :url => show_link(@record, :display => 'generic_objects'))
drop_breadcrumb(:name => @item.name, :url => show_link(@record, :display => 'generic_objects', :generic_object_id => params[:generic_object_id]))
@view = get_db_view(GenericObject)
@sb[:rec_id] = params[:generic_object_id]
@record = @item
show_item
end
def self.display_methods
%w[generic_objects custom_button_events]
end
def display_generic_objects
nested_list(GenericObject)
end
private
def record_class
Service
end
def sanitize_output(stdout)
htm = stdout.gsub('"', '\"')
regex_map = {
/\\'/ => "'",
/'/ => "\\\\'",
/{{/ => '\{\{',
/}}/ => '\}\}'
}
regex_map.each_pair { |f, t| htm.gsub!(f, t) }
htm
end
helper_method :sanitize_output
def textual_group_list
if @item && @item.kind_of?(GenericObject)
[%i[go_properties attribute_details_list methods go_relationships]]
elsif %w[ServiceAnsiblePlaybook ServiceAnsibleTower].include?(@record.type)
[%i[properties miq_custom_attributes], %i[lifecycle tags generic_objects]]
else
[%i[properties lifecycle relationships generic_objects miq_custom_attributes], %i[vm_totals tags]]
end
end
helper_method :textual_group_list
def textual_provisioning_group_list
[%i[provisioning_results provisioning_plays], %i[provisioning_details provisioning_credentials]]
end
helper_method :textual_provisioning_group_list
def textual_retirement_group_list
[%i[retirement_results retirement_plays], %i[retirement_details retirement_credentials]]
end
helper_method :textual_retirement_group_list
def textual_tower_job_group_list
[%i[tower_job_results tower_job_plays], %i[tower_job_details tower_job_credentials]]
end
helper_method :textual_tower_job_group_list
def features
[
{
:role => "service",
:role_any => true,
:name => :svcs,
:title => _("Services")
}
].map { |hsh| ApplicationController::Feature.new_with_hash(hsh) }
end
def service_ownership
@explorer = true
set_ownership
replace_right_cell(:action => 'ownership')
end
def service_tag_edit
@explorer = true
service_tag
replace_right_cell(:action => 'tag')
end
def service_retire
@explorer = true
retirevms
replace_right_cell(:action => 'retire')
end
def service_retire_now
@explorer = true
retirevms_now
end
def service_set_record_vars(svc)
svc.name = params[:name] if params[:name]
svc.description = params[:description] if params[:description]
end
def service_delete
assert_privileges("service_delete")
@explorer = true
elements = []
if params[:id] # delete service from its details page
elements.push(params[:id])
deleted_service = Service.find(params[:id].to_i) # service which is going to be deleted
process_elements(elements, Service, 'destroy') unless elements.empty?
self.x_node = deleted_service.retired ? "xx-rsrv" : "xx-asrv" # set x_node for returning to Active or Retired Services
else # delete choosen service(s), choosen by checking appropriate checkbox(es)
elements = find_checked_items
if elements.empty?
add_flash(_("No Services were selected for deletion"), :error)
end
process_elements(elements, Service, 'destroy') unless elements.empty?
end
params[:id] = nil
replace_right_cell(:replace_trees => [:svcs])
end
def get_record_display_name(record)
record.name
end
# Get all info for the node about to be displayed
def get_node_info(treenodeid, _show_list = true)
@nodetype, id = parse_nodetype_and_id(valid_active_node(treenodeid))
# resetting action that was stored during edit to determine what is being edited
@sb[:action] = nil
# Set session properly - the same as when the filter is cleared
# No need to edit session here again if adv_search_clear was called
listnav_search_selected(0) if session[:adv_search] && %w[adv_search_button adv_search_clear x_search_by_name].exclude?(params[:action])
@edit = session[:edit]
case TreeBuilder.get_model_for_prefix(@nodetype)
when "Service"
show_record(id)
drop_breadcrumb(:name => _('Services'), :url => '/service/explorer') if @breadcrumbs.empty?
@right_cell_text = _("Service \"%{name}\"") % {:name => @record.name}
@no_checkboxes = true
@gtl_type = "grid"
@items_per_page = ONE_MILLION
@view, @pages = get_view(Vm, :parent => @record, :parent_method => :all_vms, :all_pages => true) # Get the records (into a view) and the paginator
when "Hash"
case id
when 'asrv'
process_show_list(:named_scope => [[:retired, false], :displayed])
@right_cell_text = _("Active Services")
when 'rsrv'
process_show_list(:named_scope => %i[retired displayed])
@right_cell_text = _("Retired Services")
end
when "MiqSearch", nil # nil if applying a filter from Advanced search - and @nodetype is root
load_adv_search unless @nodetype == "root" || %w[saveit].include?(params[:button]) # Select/load filter from Global/My Filters
process_show_list
@right_cell_text = _("All Services")
end
@right_cell_text += _(" (Names with \"%{search_text}\")") % {:search_text => @search_text} if @search_text.present? && @nodetype != 's'
@right_cell_text += @edit[:adv_search_applied][:text] if x_tree && @edit && @edit[:adv_search_applied]
end
# Select/load filter from Global/My Filters
def load_adv_search
adv_search_build("Service")
session[:edit] = @edit
@explorer = true
@nodetype, id = parse_nodetype_and_id(valid_active_node(x_node))
if @nodetype == "ms"
listnav_search_selected(id) unless params.key?(:search_text)
if @edit[:adv_search_applied] &&
MiqExpression.quick_search?(@edit[:adv_search_applied][:exp]) &&
%w[reload tree_select].include?(params[:action])
self.x_node = params[:id]
quick_search_show
end
end
end
# set partial name and cell header for edit screens
def set_right_cell_vars(action)
case action
when "dialog_provision"
partial = "shared/dialogs/dialog_provision"
header = @right_cell_text
action = "dialog_form_button_pressed"
when "ownership"
partial = "shared/views/ownership"
header = _("Set Ownership for Service")
action = "ownership_update"
when "retire"
partial = "shared/views/retire"
header = _("Set/Remove retirement date for Service")
action = "retire"
when "reconfigure_dialog"
partial = "shared/dialogs/reconfigure_dialog"
header = @right_cell_text
action = nil
when "service_edit"
partial = "service_form"
header = _("Editing Service \"%{name}\"") % {:name => @service.name}
action = "service_edit"
when "tag", 'service_tag'
partial = "layouts/tagging"
header = _("Edit Tags for Service")
action = "service_tag"
else
action = nil
end
return partial, action, header
end
# Replace the right cell of the explorer
def replace_right_cell(options = {})
if @flash_array && @refresh_partial == "layouts/flash_msg"
javascript_flash
return
end
action, replace_trees = options.values_at(:action, :replace_trees)
@explorer = true
partial, action_url, @right_cell_text = set_right_cell_vars(action) if action # Set partial name, action and cell header
get_node_info(x_node) if !action && !@in_a_form && !params[:display]
replace_trees = @replace_trees if @replace_trees # get_node_info might set this
type, _ = parse_nodetype_and_id(x_node)
record_showing = type && ["Service"].include?(TreeBuilder.get_model_for_prefix(type))
if x_active_tree == :svcs_tree && !@in_a_form && !@sb[:action]
if record_showing && @sb[:action].nil?
cb_tb = build_toolbar(Mixins::CustomButtons::Result.new(:single))
else
cb_tb = build_toolbar(Mixins::CustomButtons::Result.new(:list))
v_tb = build_toolbar("x_gtl_view_tb")
end
c_tb = build_toolbar(center_toolbar_filename)
end
presenter = ExplorerPresenter.new(
:active_tree => x_active_tree,
:right_cell_text => @right_cell_text
)
reload_trees_by_presenter(presenter, build_replaced_trees(replace_trees, %i[svcs]))
# Replace right cell divs
presenter.update(
:main_div,
if %w[dialog_provision ownership retire service_edit tag service_tag reconfigure_dialog].include?(action)
r[:partial => partial, :locals => options[:dialog_locals]]
elsif params[:display]
r[:partial => 'layouts/x_gtl', :locals => {:controller => "vm", :action_url => @lastaction}]
elsif record_showing
@selected_ids = [] # FIXME: hack to hide checkboxes
r[:partial => "service/svcs_show", :locals => {:controller => "service"}]
else
r[:partial => "layouts/x_gtl"]
end
)
if %w[ownership tag service_tag].include?(action)
presenter.show(:form_buttons_div).remove_paging.hide(:toolbar).show(:paging_div)
if %w[tag service_tag].include?(action)
locals = {:action_url => action_url}
locals[:multi_record] = true # need save/cancel buttons on edit screen even tho @record.id is not there
locals[:record_id] = @sb[:rec_id] || @edit[:object_ids] && @edit[:object_ids][0]
elsif action == "ownership"
locals = {:action_url => action_url}
locals[:multi_record] = true
presenter.update(:form_buttons_div, r[:partial => "layouts/angular/paging_div_buttons"])
else
locals = {:record_id => @edit[:rec_id], :action_url => action_url}
presenter.update(:form_buttons_div, r[:partial => "layouts/x_edit_buttons", :locals => locals])
end
elsif (action != "retire") && (record_showing ||
(@pages && (@items_per_page == ONE_MILLION || @pages[:items] == 0)))
# Added so buttons can be turned off even tho div is not being displayed it still pops up Abandon changes box
# when trying to change a node on tree after saving a record
presenter.hide(:form_buttons_div, :paging_div).show(:toolbar)
else
presenter.hide(:form_buttons_div).show(:toolbar, :paging_div)
end
# Clear the JS gtl_list_grid var if changing to a type other than list
presenter[:clear_gtl_list_grid] = @gtl_type && @gtl_type != 'list'
presenter.reload_toolbars(:center => c_tb, :view => v_tb, :custom => cb_tb)
presenter.set_visibility(c_tb.present? || v_tb.blank?, :toolbar)
presenter[:record_id] = determine_record_id_for_presenter
presenter[:lock_sidebar] = @edit && @edit[:current]
presenter[:osf_node] = x_node
# Hide/show searchbox depending on if a list is showing
presenter.set_visibility(!(@record || @in_a_form), :adv_searchbox_div)
presenter[:clear_search_toggle] = clear_search_status
presenter.update(:breadcrumbs, r[:partial => 'layouts/breadcrumbs'])
render :json => presenter.for_render
end
def show_record(id = nil)
@display = params[:display] || "main" unless pagination_or_gtl_request?
@lastaction = "show"
@showtype = "config"
identify_service(id)
return if record_no_longer_exists?(@record)
get_tagdata(@record)
end
# Overwriting from application controller
#
# FIXME: Find a more graceful way of adding .with_aggregates to the @record
def find_record_with_rbac(db, id)
options = @find_with_aggregates ? { :named_scope => :with_aggregates } : {}
super(db, id, options)
end
def tagging_explorer_controller?
# this controller behaves explorer-like for services and non-explorer-like for GO
@tagging == 'Service'
end
def get_session_data
super
@layout = "services"
@options = session[:prov_options]
end
def set_session_data
super
session[:prov_options] = @options if @options
end
def breadcrumbs_options
{
:breadcrumbs => [
{:title => _("Services")},
{:title => _("My services"), :url => (url_for(:action => 'explorer', :controller => controller_name) if generic_objects_list?)},
],
:record_info => (hide_record_info? ? {} : @service),
:ancestry => Service,
:not_tree => generic_objects_list?,
}
end
def generic_objects_list?
params[:display] == 'generic_objects'
end
def hide_record_info?
generic_objects_list? && !params[:generic_object_id]
end
menu_section :svc
has_custom_buttons
end
| 33.258652 | 152 | 0.675502 |
795aed22b96704fb082db4d5ecdc1da97237d4bf | 59 | class Transaction < Sequel::Model
one_to_one :ride
end | 14.75 | 33 | 0.745763 |
ac6c9cb7f612ed89f4046b57e3e6d1b35daf9e43 | 315 | require "spec_helper"
RSpec.describe SolidusSixPayments::Terminal do
subject { described_class.new }
let(:hash) {
{
"TerminalID": '17925560'
}
}
describe 'to_hash' do
it 'returns the hash representation of the terminal id' do
expect(subject.to_hash).to eq(hash)
end
end
end
| 17.5 | 62 | 0.666667 |
bfa9277b9a817c220fc7f45c3b459a6c9203aed7 | 620 | module Intrigue
module Ident
module Check
class ManageEngine < Intrigue::Ident::Check::Base
def generate_checks(url)
[
{
:type => "fingerprint",
:category => "application",
:tags => [""],
:vendor =>"ManageEngine",
:product =>"ServiceDesk Plus",
:match_details =>"cookie",
:version => nil,
:match_type => :content_cookies,
:match_content => /SDPSESSIONID=/i,
:paths => ["#{url}"],
:inference => false
}
]
end
end
end
end
end
| 22.142857 | 53 | 0.469355 |
017fc1172a7f770463a904fbc6a78ce99682ba75 | 242 | Sequel.migration do
change do
alter_table :discovery_response_services do
add_foreign_key :sp_sso_descriptor_id, :sp_sso_descriptors, null: false,
foreign_key_constraint_name: 'sp_drs_fkey'
end
end
end
| 26.888889 | 78 | 0.719008 |
ffe58eef0d34e171274ade728afc6098d7743a84 | 2,195 | # encoding: utf-8
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# /spec/fixtures/responses/whois.nic.ki/status_registered.expected
#
# and regenerate the tests with the following rake task
#
# $ rake spec:generate
#
require 'spec_helper'
require 'whois/record/parser/whois.nic.ki.rb'
describe Whois::Record::Parser::WhoisNicKi, "status_registered.expected" do
before(:each) do
file = fixture("responses", "whois.nic.ki/status_registered.txt")
part = Whois::Record::Part.new(:body => File.read(file))
@parser = klass.new(part)
end
describe "#domain" do
it do
@parser.domain.should == "google.ki"
end
end
describe "#domain_id" do
it do
lambda { @parser.domain_id }.should raise_error(Whois::PropertyNotSupported)
end
end
describe "#status" do
it do
@parser.status.should == :registered
end
end
describe "#available?" do
it do
@parser.available?.should == false
end
end
describe "#registered?" do
it do
@parser.registered?.should == true
end
end
describe "#created_on" do
it do
@parser.created_on.should be_a(Time)
@parser.created_on.should == Time.parse("2006-05-15")
end
end
describe "#updated_on" do
it do
@parser.updated_on.should be_a(Time)
@parser.updated_on.should == Time.parse("2010-03-21")
end
end
describe "#expires_on" do
it do
@parser.expires_on.should be_a(Time)
@parser.expires_on.should == Time.parse("2010-11-27")
end
end
describe "#nameservers" do
it do
@parser.nameservers.should be_a(Array)
@parser.nameservers.should have(4).items
@parser.nameservers[0].should be_a(_nameserver)
@parser.nameservers[0].name.should == "ns1.google.com"
@parser.nameservers[1].should be_a(_nameserver)
@parser.nameservers[1].name.should == "ns2.google.com"
@parser.nameservers[2].should be_a(_nameserver)
@parser.nameservers[2].name.should == "ns3.google.com"
@parser.nameservers[3].should be_a(_nameserver)
@parser.nameservers[3].name.should == "ns4.google.com"
end
end
end
| 26.768293 | 82 | 0.668793 |
ac272f71aefd72a175a781decf185c4989f4d39a | 866 | class Clac < Formula
desc "Command-line, stack-based calculator with postfix notation"
homepage "https://github.com/soveran/clac"
url "https://github.com/soveran/clac/archive/0.3.3.tar.gz"
sha256 "e751e31bd2d3cdf6daa80da0ea7761630767aa22df6954df35997d1fcc5fa8ae"
license "BSD-2-Clause"
bottle do
cellar :any_skip_relocation
sha256 "8582b92bdb32ca857fc3b8a5a33d0f19797edc1d1331ef79ffd9dc86ced96303" => :big_sur
sha256 "afa69f51c57695ae2e01a3ad9a44690fa746e925129ab2809550811cafe0fb3f" => :catalina
sha256 "af0e4194b84f969e0a701f32c102dd64a432f6f675e6913716b81b96e548d451" => :mojave
sha256 "5be5ed82ad0b8669aef16942e39e9d60341ab6201ee019b9b997ab5ee4ea76b1" => :high_sierra
end
def install
system "make", "PREFIX=#{prefix}", "install"
end
test do
assert_equal "7", shell_output("#{bin}/clac '3 4 +'").strip
end
end
| 36.083333 | 93 | 0.778291 |
e89371aa2a9607a333c58803a98c98a3878a22ba | 4,709 | # frozen_string_literal: true
module Hesa
module CodeSets
module DegreeTypes
# https://www.hesa.ac.uk/collection/c21053/xml/c21053/c21053codelists.xsd
MAPPING = {
"001" => "BEd",
"002" => "BEd (Hons)",
"003" => "BSc/Education",
"004" => "BSc Hons /Education",
"005" => "BTech/Education",
"006" => "BTech (Hons) /Education",
"007" => "BA/Education",
"008" => "BA (Hons) /Education",
"009" => "BA Combined Studies/Education of the Deaf",
"010" => "BA (Hons) Combined Studies/Education of the Deaf",
"012" => "BA with intercalated PGCE",
"013" => "BSc (Hons) with intercalated PGCE",
"014" => "BA (Hons) with intercalated PGCE",
"051" => "Bachelor of Arts(BA)",
"052" => "Bachelor of Arts Economics(BA ECON)",
"053" => "Bachelor of Arts in Architecture(BA ARCH)",
"054" => "Bachelor of the Art of Obstetrics (BAO)",
"055" => "Bachelor of Architecture(BA ARCH)",
"056" => "Bachelor of Applied Science(B ASC)",
"057" => "Bachelor of Agriculture(B AG)",
"058" => "Bachelor of Accountancy(B ACC)",
"059" => "Bachelor of Administration(B ADMIN)",
"060" => "Bachelor of Business Administration(BBA)",
"061" => "Bachelor of Combined Studies (BCS)",
"062" => "Bachelor of Commerce(B COMM)",
"063" => "Bachelor of Divinity(BD)",
"064" => "Bachelor of Dental Surgery(BDS)",
"065" => "Bachelor of Engineering(B ENG)",
"066" => "Bachelor of Engineering with Business Studies(B ENG/BS)",
"067" => "Bachelor of Fine Art(BFA)",
"068" => "Bachelor of General Studies(BGS)",
"069" => "Bachelor of Humanities (BH)",
"070" => "Bachelor of Hygiene (B HYG)",
"071" => "Bachelor of Law(LLB)",
"072" => "Bachelor of Librarianship (B LIB)",
"073" => "Bachelor of Librarianship and Info. Studies(BLS)",
"074" => "Bachelor of Literature (B LITT)",
"075" => "Bachelor of Land Economy(BL ECON)",
"076" => "Bachelor of Medical Science(B MED SC)",
"077" => "Bachelor of Medicine(BM)",
"078" => "Bachelor of Metallurgy (B MET)",
"079" => "Bachelor of Metallurgy and Engineering(B MET/EN)",
"080" => "Bachelor of Music (B MU)",
"081" => "Bachelor of Nursing (BN)",
"082" => "Bachelor of Pharmacy(B PHARM)",
"083" => "Bachelor of Science (B SC)",
"084" => "Bachelor of Science Economics (BSC/ECO)",
"085" => "Bachelor of Science & Engineering (BSC/ENG)",
"086" => "Bachelor of Science & Technology (BSC/TEC)",
"087" => "Bachelor of Science in Social Science (BSC SS)",
"088" => "Bachelor of Science in Speech Therapy (BSC SPT)",
"089" => "Bachelor of Social Science (BSS)",
"090" => "Bachelor of Surgery (BS)",
"091" => "Bachelor of Chirurgiae (B CH)",
"092" => "Bachelor of Technology (B TECH)",
"093" => "Bachelor of Theology (B THEOL)",
"094" => "Bachelor of Veterinary Medicine (B VMED)",
"095" => "Bachelor of Veterinary Medicine and Surgery (BVMS)",
"096" => "Bachelor of Veterinary Science (B VSC)",
"097" => "Bachelor of Education Scotland & N Ireland (B ED)",
"098" => "Bachelor of Philosophy (B PHIL)",
"200" => "Master of Arts (MA)",
"201" => "Master of Librarianship (M LIB)",
"202" => "Master of Literature (M LITT)",
"203" => "Master of Music (M MUS)",
"204" => "Master of Philosophy (M PHIL)",
"205" => "Master of Science (M SC)",
"206" => "Master of Theology (M THEOL)",
"207" => "Certificate of Membership of Cranfield Institute of Technology",
"208" => "Master of Education (M ED)",
"209" => "Master of Business Studies (M BS)",
"210" => "Master of Social Studies (M SS)",
"211" => "Master of Engineering (M ENG)",
"212" => "Master of Law (M LAW)",
"213" => "Master of Business Administration (MBA)",
"214" => "Master of Chemistry (M CHEM)",
"215" => "Master of Physics (M PHYS)",
"300" => "Doctor of Divinity (DD)",
"301" => "Doctor of Civil Law (DCL)",
"302" => "Doctor of Medicine (MD)",
"303" => "Doctor of Music (D MU)",
"304" => "Doctor of Science (D SC)",
"305" => "Doctor of Philosophy (D PHIL)",
"306" => "PhD (PHD)",
"400" => "First Degree",
"401" => "Higher Degree",
"402" => "Degree equivalent (inc. foreign qualifications)",
"999" => "Unknown",
}.freeze
end
end
end
| 46.623762 | 82 | 0.543852 |
ac2f04678c6837c259a6d65b9e32b67c4581dfed | 1,971 | class Pgpdump < Formula
desc "PGP packet visualizer"
homepage "https://www.mew.org/~kazu/proj/pgpdump/en/"
url "https://github.com/kazu-yamamoto/pgpdump/archive/v0.33.tar.gz"
sha256 "fe580ef43f651da59816c70f38f177ea4fa769d64e3d6883a9d1f661bb0a6952"
license "BSD-3-Clause"
head "https://github.com/kazu-yamamoto/pgpdump.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "e2be885245f927b5cc3203bff8af2f063d60853d2c7627100ef46f8cd4eb730d"
sha256 cellar: :any_skip_relocation, big_sur: "c87222c16b88f4f1a34504d503eb7bebd6559da8029cd4cd374d27bb694cbc88"
sha256 cellar: :any_skip_relocation, catalina: "60bbe481621cc653edc834b9d54b910deb3c1634cc7155dd1e9aca9e3f207ca4"
sha256 cellar: :any_skip_relocation, mojave: "8141ac85359c7be7ac5ef51075823612582ecd0e02f0048cace4b4bae2217771"
sha256 cellar: :any_skip_relocation, high_sierra: "2d5ad982f29c20cad30f5a90d4fcd8af3d369432e2c4ab4f35fcfa3b31712a1f"
sha256 cellar: :any_skip_relocation, sierra: "9c2ed5f4eb7e0c833a90d53fc8d96d613b781b36c3524959fa102ae62a4d167e"
sha256 cellar: :any_skip_relocation, el_capitan: "1cfd7cb5b0cdbc7e70031841d7efb1196ddbbd6f11f5af3cce4b38b6f7358ae2"
sha256 cellar: :any_skip_relocation, x86_64_linux: "8aff5b4553f8db403d33fac7202a3d59cfab9f19bf6b630736eef81453c33913"
end
def install
system "./configure", "--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"sig.pgp").write <<~EOS
-----BEGIN PGP MESSAGE-----
Version: GnuPG v1.2.6 (NetBSD)
Comment: For info see https://www.gnupg.org
owGbwMvMwCSYq3dE6sEMJU7GNYZJLGmZOanWn4xaQzIyixWAKFEhN7W4ODE9VaEk
XyEpVaE4Mz0vNUUhqVIhwD1Aj6vDnpmVAaQeZogg060chvkFjPMr2CZNmPnwyebF
fJP+td+b6biAYb779N1eL3gcHUyNsjliW1ekbZk6wRwA
=+jUx
-----END PGP MESSAGE-----
EOS
output = shell_output("#{bin}/pgpdump sig.pgp")
assert_match("Key ID - 0x6D2EC41AE0982209", output)
end
end
| 46.928571 | 122 | 0.775748 |
21ed4ff96cec2fdce20919d6fa1b73f8400eac8a | 1,040 | $:.push File.expand_path("../lib", __FILE__)
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "proxy_manager"
s.version = '1.0.1'
s.authors = ["Kirill Platonov"]
s.licenses = ['MIT']
s.email = ["[email protected]"]
s.homepage = "https://github.com/bloodyhistory/proxy_manager"
s.summary = "This is gem for easy usage proxy in your parsers/web-bots."
s.description = <<-DESCRIPTION
Proxy manager for easy usage proxy in your parsers/web-bots
DESCRIPTION
s.files = `git ls-files`.split("\n") - %w[.gitignore .travis.yml]
s.test_files = Dir["spec/**/*"]
s.add_development_dependency 'turn', '~> 0.9', '>= 0.9.7'
s.add_development_dependency 'rspec', '~> 2.14', '>= 2.14.1'
s.add_development_dependency 'guard-rspec', '~> 4.2', '>= 4.2.8'
s.add_development_dependency 'growl', '~> 1.0', '>= 1.0.3'
s.add_development_dependency 'fuubar', '~> 1.3', '>= 1.3.2'
s.add_development_dependency 'yard', '~> 0.8', '>= 0.8.7.4'
end
| 40 | 78 | 0.630769 |
f755fb1b75fd1b91e13f9812b6704ca7b8049c2e | 155 | json.array!(@auditoria) do |auditorium|
json.extract! auditorium, :id, :name, :seating_capacity
json.url auditorium_url(auditorium, format: :json)
end
| 31 | 57 | 0.754839 |
acfa86a7c8412bc33ff8b79455250cfd27705f4d | 91 | require 'gather_content/error/request_error'
module GatherContent
module Error
end
end
| 15.166667 | 44 | 0.824176 |
189233a7c9d1b859f6e66c6d1acc0351752d2e4a | 23 | class DecOperations
end | 11.5 | 19 | 0.913043 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.