hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
f7197a762be2f4aa838e004763b62fd2a2d4fdb2 | 4,305 | class Opendata::Dataset
include Cms::Model::Page
include ::Workflow::Addon::Approver
include Opendata::Addon::Resource
include Opendata::Addon::UrlResource
include Opendata::Addon::Category
include Opendata::Addon::Area
include Opendata::Addon::DatasetGroup
include Opendata::Reference::Member
include Opendata::Common
include Opendata::Addon::CmsRef::Page
include Cms::Addon::Release
include Contact::Addon::Page
include Cms::Addon::RelatedPage
include Cms::Addon::GroupPermission
include Workflow::MemberPermission
include Opendata::DatasetSearchable
include Opendata::DatasetTemplateVariables
set_permission_name "opendata_datasets"
scope :formast_is, ->(word, *fields) {
options = fields.extract_options!
method = options[:method].presence || 'and'
operator = method == 'and' ? "$and" : "$or"
where(operator => [{ "$or" => fields.map { |field| { field => word.to_s } } } ])
}
scope :license_is, ->(id, *fields) {
options = fields.extract_options!
method = options[:method].presence || 'and'
operator = method == 'and' ? "$and" : "$or"
where(operator => [{ "$or" => fields.map { |field| { field => id.to_i } } } ])
}
set_permission_name "opendata_datasets"
field :text, type: String
field :point, type: Integer, default: "0"
field :tags, type: SS::Extensions::Words
field :downloaded, type: Integer
has_many :points, primary_key: :dataset_id, class_name: "Opendata::DatasetPoint",
dependent: :destroy
has_many :apps, foreign_key: :dataset_ids, class_name: "Opendata::App"
has_many :ideas, foreign_key: :dataset_ids, class_name: "Opendata::Idea"
validates :text, presence: true
permit_params :text, :tags, tags: []
before_save :seq_filename, if: ->{ basename.blank? }
after_save :on_state_changed, if: ->{ state_changed? }
default_scope ->{ where(route: "opendata/dataset") }
def point_url
get_url(url, "/point.html")
end
def point_members_url
get_url(url, "/point/members.html")
end
def dataset_apps_url
get_url(url, "/apps/show.html")
end
def dataset_ideas_url
get_url(url, "/ideas/show.html")
end
def contact_present?
return false if member_id.present?
super
end
private
def validate_filename
@basename.blank? ? nil : super
end
def seq_filename
self.filename = dirname ? "#{dirname}#{id}.html" : "#{id}.html"
end
def on_state_changed
resources.each do |r|
r.try(:state_changed)
end
url_resources.each do |r|
r.try(:state_changed)
end
end
class << self
def to_dataset_path(path)
suffix = %w(/point.html /point/members.html /apps/show.html /ideas/show.html).find { |suffix| path.end_with? suffix }
return path if suffix.blank?
path[0..(path.length - suffix.length - 1)] + '.html'
end
def sort_options
[
[I18n.t("opendata.sort_options.released"), "released"],
[I18n.t("opendata.sort_options.popular"), "popular"],
[I18n.t("opendata.sort_options.attention"), "attention"]
]
end
def sort_hash(sort)
case sort
when "released"
{ released: -1, _id: -1 }
when "popular"
{ point: -1, _id: -1 }
when "attention"
{ downloaded: -1, _id: -1 }
else
return { released: -1 } if sort.blank?
{ sort.sub(/ .*/, "") => (sort =~ /-1$/ ? -1 : 1) }
end
end
def aggregate_field(name, opts = {})
Opendata::Common.get_aggregate_field(self, name, opts)
end
def aggregate_array(name, opts = {})
Opendata::Common.get_aggregate_array(self, name, opts)
end
def aggregate_resources(name, opts = {})
Opendata::Common.get_aggregate_resources(self, name, opts)
end
def get_tag_list(query)
Opendata::Common.get_tag_list(self, query)
end
def get_tag(tag_name)
Opendata::Common.get_tag(self, tag_name)
end
def format_options
pipes = []
pipes << { "$match" => { "route" => "opendata/dataset" } }
pipes << { "$unwind" => "$resources" }
pipes << { "$group" => { "_id" => "$resources.format", "count" => { "$sum" => 1 } } }
self.collection.aggregate(pipes).map do |data|
format = data["_id"]
[format, format]
end
end
end
end
| 26.90625 | 123 | 0.636702 |
6a03f0c3df4c963c113cda2ec6cf87357ef5948b | 131 | class RenameNameAddress < ActiveRecord::Migration
def change
rename_column :spree_addresses, :full_name, :user_name
end
end
| 21.833333 | 57 | 0.793893 |
019f208903bad49eb2ae13ed7c769c615e0aa95a | 26,302 | require 'spec_helper'
require 'digest/md5'
describe 'nginx::resource::location' do
let :title do
'rspec-test'
end
let :facts do
{
:osfamily => 'Debian',
:operatingsystem => 'debian',
}
end
let :pre_condition do
[
'include ::nginx::params',
'include ::nginx::config',
]
end
describe 'os-independent items' do
describe 'basic assumptions' do
let :params do {
:www_root => "/var/www/rspec",
:vhost => 'vhost1',
} end
it { should contain_class("nginx::params") }
it { should contain_class("nginx::config") }
it { should contain_concat__fragment("f25e14942fb58942ee13b1465a4e1719").with_content(/location rspec-test/) }
it { should_not contain_file('/etc/nginx/fastcgi_params') }
it { should_not contain_concat__fragment("vhost1-800-rspec-test-ssl") }
it { should_not contain_file("/etc/nginx/rspec-test_htpasswd") }
end
describe "vhost/location_header template content" do
[
{
:title => 'should set the location',
:attr => 'location',
:value => 'my_location',
:match => ' location my_location {',
},
{
:title => 'should not set internal',
:attr => 'internal',
:value => false,
:notmatch => /internal;/
},
{
:title => 'should set internal',
:attr => 'internal',
:value => true,
:match => ' internal;'
},
{
:title => 'should set location_allow',
:attr => 'location_allow',
:value => %w( 127.0.0.1 10.0.0.1 ),
:match => [
' allow 127.0.0.1;',
' allow 10.0.0.1;',
],
},
{
:title => 'should set location_deny',
:attr => 'location_deny',
:value => %w( 127.0.0.1 10.0.0.1 ),
:match => [
' deny 127.0.0.1;',
' deny 10.0.0.1;',
],
},
{
:title => 'should contain ordered prepended directives',
:attr => 'location_cfg_prepend',
:value => { 'test1' => 'test value 1', 'test2' => ['test value 2a', 'test value 2b'],
'test3' => { 'subtest1' => ['"sub test value1a"', '"sub test value1b"'],
'subtest2' => '"sub test value2"' } },
:match => [
' test1 test value 1;',
' test2 test value 2a;',
' test2 test value 2b;',
' test3 subtest1 "sub test value1a";',
' test3 subtest1 "sub test value1b";',
' test3 subtest2 "sub test value2";',
],
},
{
:title => 'should contain custom prepended directives',
:attr => 'location_custom_cfg_prepend',
:value => { 'test1' => 'bar', 'test2' => ['foobar', 'barbaz'],
'test3' => { 'subtest1' => ['"sub test value1a"', '"sub test value1b"'],
'subtest2' => '"sub test value2"' } },
:match => [
/^[ ]+test1\s+bar/,
/^[ ]+test2\s+foobar/,
/^[ ]+test2\s+barbaz/,
/^[ ]+test3\s+subtest1 "sub test value1a"/,
/^[ ]+test3\s+subtest1 "sub test value1b"/,
/^[ ]+test3\s+subtest2 "sub test value2"/,
],
},
{
:title => 'should contain raw_prepend directives',
:attr => 'raw_prepend',
:value => [
'if (a) {',
' b;',
'}'
],
:match => /^\s+if \(a\) {\n\s++b;\n\s+\}/,
},
].each do |param|
context "when #{param[:attr]} is #{param[:value]}" do
let :default_params do { :location => 'location', :proxy => 'proxy_value', :vhost => 'vhost1' } end
let :params do default_params.merge({ param[:attr].to_sym => param[:value] }) end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")) }
it param[:title] do
fragment = Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")
matches = Array(param[:match])
if matches.all? { |m| m.is_a? Regexp }
matches.each { |item| should contain_concat__fragment(fragment).with_content(item) }
else
lines = subject.resource('concat::fragment', fragment).send(:parameters)[:content].split("\n")
(lines & matches).should == matches
end
Array(param[:notmatch]).each do |item|
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")).without_content(item)
end
end
end
end
end
describe "vhost/location_footer template content" do
[
{
:title => 'should contain ordered appended directives',
:attr => 'location_cfg_append',
:value => { 'test1' => 'test value 1', 'test2' => ['test value 2a', 'test value 2b'],
'test3' => { 'subtest1' => ['"sub test value1a"', '"sub test value1b"'],
'subtest2' => '"sub test value2"' } },
:match => [
' test1 test value 1;',
' test2 test value 2a;',
' test2 test value 2b;',
' test3 subtest1 "sub test value1a";',
' test3 subtest1 "sub test value1b";',
' test3 subtest2 "sub test value2";',
],
},
{
:title => 'should contain custom appended directives',
:attr => 'location_custom_cfg_append',
:value => { 'test1' => 'bar', 'test2' => ['foobar', 'barbaz'],
'test3' => { 'subtest1' => ['"sub test value1a"', '"sub test value1b"'],
'subtest2' => '"sub test value2"' } },
:match => [
/^[ ]+test1\s+bar/,
/^[ ]+test2\s+foobar/,
/^[ ]+test2\s+barbaz/,
/^[ ]+test3\s+subtest1 "sub test value1a"/,
/^[ ]+test3\s+subtest1 "sub test value1b"/,
/^[ ]+test3\s+subtest2 "sub test value2"/,
],
},
{
:title => 'should contain raw_append directives',
:attr => 'raw_append',
:value => [
'if (a) {',
' b;',
'}'
],
:match => /^\s+if \(a\) {\n\s++b;\n\s+\}/,
},
].each do |param|
context "when #{param[:attr]} is #{param[:value]}" do
let :default_params do { :location => 'location', :proxy => 'proxy_value', :vhost => 'vhost1' } end
let :params do default_params.merge({ param[:attr].to_sym => param[:value] }) end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")) }
it param[:title] do
fragment = Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")
matches = Array(param[:match])
if matches.all? { |m| m.is_a? Regexp }
matches.each { |item| should contain_concat__fragment(fragment).with_content(item) }
else
lines = subject.resource('concat::fragment', fragment).send(:parameters)[:content].split("\n")
(lines & matches).should == matches
end
Array(param[:notmatch]).each do |item|
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")).without_content(item)
end
end
it "should end with a closing brace" do
fragment = Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")
content = subject.resource('concat::fragment', fragment).send(:parameters)[:content]
(content.split("\n").reject {|l| l =~ /^(\s*#|$)/ }.last).strip.should == '}'
end
end
end
end
describe "vhost_location_alias template content" do
let :default_params do
{ :location => 'location', :vhost => 'vhost1', :location_alias => 'value' }
end
context "when location_alias is 'value'" do
let :params do default_params end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")) }
it "should set alias" do
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")).
with_content(/^[ ]+alias\s+value;/)
end
end
context "when autoindex is 'on'" do
let :params do default_params.merge({ :autoindex => 'on' }) end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")) }
it "should set autoindex" do
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")).
with_content(/^[ ]+autoindex\s+on;/)
end
end
context "when autoindex is not set" do
let :params do default_params end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")) }
it "should not set autoindex" do
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")).
without_content(/^[ ]+autoindex[^;]+;/)
end
end
end
describe "vhost_location_directory template content" do
let :default_params do
{
:location => 'location',
:www_root => '/var/www/root',
:vhost => 'vhost1'
}
end
[
{
:title => 'should set www_root',
:attr => 'www_root',
:value => '/',
:match => ' root /;'
},
{
:title => 'should set try_file(s)',
:attr => 'try_files',
:value => ['name1','name2'],
:match => ' try_files name1 name2;',
},
{
:title => 'should set index_file(s)',
:attr => 'index_files',
:value => ['name1','name2'],
:match => ' index name1 name2;',
},
{
:title => 'should contain rewrite rules',
:attr => 'rewrite_rules',
:value => [
'^(/download/.*)/media/(.*)\..*$ $1/mp3/$2.mp3 last',
'^(/download/.*)/audio/(.*)\..*$ $1/mp3/$2.ra last',
'^/users/(.*)$ /show?user=$1? last',
],
:match => [
' rewrite ^(/download/.*)/media/(.*)\..*$ $1/mp3/$2.mp3 last;',
' rewrite ^(/download/.*)/audio/(.*)\..*$ $1/mp3/$2.ra last;',
' rewrite ^/users/(.*)$ /show?user=$1? last;',
],
},
{
:title => 'should not set rewrite_rules',
:attr => 'rewrite_rules',
:value => [],
:notmatch => /rewrite/
},
{
:title => 'should set auth_basic',
:attr => 'auth_basic',
:value => 'value',
:match => ' auth_basic "value";',
},
{
:title => 'should set auth_basic_user_file',
:attr => 'auth_basic_user_file',
:value => 'value',
:match => ' auth_basic_user_file value;',
},
].each do |param|
context "when #{param[:attr]} is #{param[:value]}" do
let :params do default_params.merge({ param[:attr].to_sym => param[:value] }) end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")) }
it param[:title] do
fragment = Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")
matches = Array(param[:match])
if matches.all? { |m| m.is_a? Regexp }
matches.each { |item| should contain_concat__fragment(fragment).with_content(item) }
else
lines = subject.resource('concat::fragment', fragment).send(:parameters)[:content].split("\n")
(lines & matches).should == matches
end
Array(param[:notmatch]).each do |item|
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")).without_content(item)
end
end
end
end
context "when autoindex is 'on'" do
let :params do default_params.merge({ :autoindex => 'on' }) end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")) }
it "should set autoindex" do
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")).
with_content(/^[ ]+autoindex\s+on;/)
end
end
context "when autoindex is not set" do
let :params do default_params end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")) }
it "should not set autoindex" do
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")).
without_content(/^[ ]+autoindex[^;]+;/)
end
end
end
describe "vhost_location_empty template content" do
[
{
:title => 'should contain ordered config directives',
:attr => 'location_custom_cfg',
:value => { 'test1' => ['test value 1a', 'test value 1b'], 'test2' => 'test value 2', 'allow' => 'test value 3',
'test4' => { 'subtest1' => ['"sub test value1a"', '"sub test value1b"'],
'subtest2' => '"sub test value2"' } },
:match => [
' allow test value 3;',
' test1 test value 1a;',
' test1 test value 1b;',
' test2 test value 2;',
' test4 subtest1 "sub test value1a";',
' test4 subtest1 "sub test value1b";',
' test4 subtest2 "sub test value2";',
],
},
].each do |param|
context "when #{param[:attr]} is #{param[:value]}" do
let :default_params do { :location => 'location', :location_custom_cfg => {'test1'=>'value1'}, :vhost => 'vhost1' } end
let :params do default_params.merge({ param[:attr].to_sym => param[:value] }) end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")) }
it param[:title] do
fragment = Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")
matches = Array(param[:match])
if matches.all? { |m| m.is_a? Regexp }
matches.each { |item| should contain_concat__fragment(fragment).with_content(item) }
else
lines = subject.resource('concat::fragment', fragment).send(:parameters)[:content].split("\n")
(lines & matches).should == matches
end
Array(param[:notmatch]).each do |item|
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")).without_content(item)
end
end
end
end
end
describe "vhost_location_fastcgi template content" do
let :default_params do
{
:location => 'location',
:fastcgi => 'localhost:9000',
:vhost => 'vhost1'
}
end
[
{
:title => 'should set www_root',
:attr => 'www_root',
:value => '/',
:match => ' root /;'
},
{
:title => 'should set fastcgi_split_path',
:attr => 'fastcgi_split_path',
:value => 'value',
:match => ' fastcgi_split_path_info value;'
},
{
:title => 'should set try_file(s)',
:attr => 'try_files',
:value => ['name1','name2'],
:match => ' try_files name1 name2;',
},
{
:title => 'should set fastcgi_params',
:attr => 'fastcgi_params',
:value => 'value',
:match => /^[ ]+include\s+value;/
},
{
:title => 'should set fastcgi_pass',
:attr => 'fastcgi',
:value => 'value',
:match => ' fastcgi_pass value;'
},
].each do |param|
context "when #{param[:attr]} is #{param[:value]}" do
let :params do default_params.merge({ param[:attr].to_sym => param[:value] }) end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")) }
it param[:title] do
fragment = Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")
matches = Array(param[:match])
if matches.all? { |m| m.is_a? Regexp }
matches.each { |item| should contain_concat__fragment(fragment).with_content(item) }
else
lines = subject.resource('concat::fragment', fragment).send(:parameters)[:content].split("\n")
(lines & matches).should == matches
end
Array(param[:notmatch]).each do |item|
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")).without_content(item)
end
end
end
end
context "when fastcgi_script is 'value'" do
let :params do default_params.merge({ :fastcgi_script => 'value' }) end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")) }
it "should set fastcgi_script" do
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")).
with_content(%r|^[ ]+fastcgi_param\s+SCRIPT_FILENAME\s+value;|)
end
end
context "when fastcgi_script is not set" do
let :params do default_params end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")) }
it "should not set fastcgi_script" do
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")).
without_content(/^[ ]+fastcgi_param\s+SCRIPT_FILENAME\s+.+?;/)
end
end
end
describe "vhost_location_proxy template content" do
[
{
:title => 'should set proxy_cache',
:attr => 'proxy_cache',
:value => 'value',
:match => /^[ ]+proxy_cache\s+value;/,
},
{
:title => 'should not set proxy_cache',
:attr => 'proxy_cache',
:value => false,
:notmatch => /proxy_cache\b/
},
{
:title => 'should set proxy_pass',
:attr => 'proxy',
:value => 'value',
:match => /^[ ]+proxy_pass\s+value;/,
},
{
:title => 'should set proxy_read_timeout',
:attr => 'proxy_read_timeout',
:value => 'value',
:match => ' proxy_read_timeout value;',
},
{
:title => 'should set proxy_connect_timeout',
:attr => 'proxy_connect_timeout',
:value => 'value',
:match => ' proxy_connect_timeout value;',
},
{
:title => 'should set proxy_read_timeout',
:attr => 'proxy_read_timeout',
:value => 'value',
:match => ' proxy_read_timeout value;',
},
{
:title => 'should set proxy headers',
:attr => 'proxy_set_header',
:value => [ 'X-TestHeader1 value1', 'X-TestHeader2 value2' ],
:match => [
/^[ ]+proxy_set_header\s+X-TestHeader1 value1;/,
/^[ ]+proxy_set_header\s+X-TestHeader2 value2;/,
]
},
{
:title => 'should set proxy_method',
:attr => 'proxy_method',
:value => 'value',
:match => ' proxy_method value;',
},
{
:title => 'should set proxy_set_body',
:attr => 'proxy_set_body',
:value => 'value',
:match => ' proxy_set_body value;',
},
{
:title => 'should contain rewrite rules',
:attr => 'rewrite_rules',
:value => [
'^(/download/.*)/media/(.*)\..*$ $1/mp3/$2.mp3 last',
'^(/download/.*)/audio/(.*)\..*$ $1/mp3/$2.ra last',
'^/users/(.*)$ /show?user=$1? last',
],
:match => [
' rewrite ^(/download/.*)/media/(.*)\..*$ $1/mp3/$2.mp3 last;',
' rewrite ^(/download/.*)/audio/(.*)\..*$ $1/mp3/$2.ra last;',
' rewrite ^/users/(.*)$ /show?user=$1? last;',
],
},
{
:title => 'should not set rewrite_rules',
:attr => 'rewrite_rules',
:value => [],
:notmatch => /rewrite/
},
].each do |param|
context "when #{param[:attr]} is #{param[:value]}" do
let :default_params do { :location => 'location', :proxy => 'proxy_value', :vhost => 'vhost1' } end
let :params do default_params.merge({ param[:attr].to_sym => param[:value] }) end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")) }
it param[:title] do
fragment = Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")
matches = Array(param[:match])
if matches.all? { |m| m.is_a? Regexp }
matches.each { |item| should contain_concat__fragment(fragment).with_content(item) }
else
lines = subject.resource('concat::fragment', fragment).send(:parameters)[:content].split("\n")
(lines & matches).should == matches
end
Array(param[:notmatch]).each do |item|
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")).without_content(item)
end
end
end
end
context "when proxy_cache_valid is 10m" do
let :params do {
:location => 'location',
:proxy => 'proxy_value',
:vhost => 'vhost1',
:proxy_cache => 'true',
:proxy_cache_valid => '10m',
} end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-location")).with_content(/proxy_cache_valid 10m;/) }
end
end
describe "vhost_location_stub_status template content" do
let :params do { :location => 'location', :stub_status => true, :vhost => 'vhost1' } end
it do
should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-#{params[:location]}")).
with_content(/stub_status\s+on/)
end
end
context 'attribute resources' do
context 'when fastcgi => "localhost:9000"' do
let :params do { :fastcgi => 'localhost:9000', :vhost => 'vhost1' } end
it { should contain_file('/etc/nginx/fastcgi_params').with_mode('0770') }
end
context 'when ssl_only => true' do
let :params do { :ssl_only => true, :vhost => 'vhost1', :www_root => '/', } end
it { should_not contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-rspec-test")) }
end
context 'when ssl_only => false' do
let :params do { :ssl_only => false, :vhost => 'vhost1', :www_root => '/', } end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-500-rspec-test")) }
end
context 'when ssl => true' do
let :params do { :ssl => true, :vhost => 'vhost1', :www_root => '/', } end
it { should contain_concat__fragment(Digest::MD5.hexdigest("vhost1-800-rspec-test-ssl")) }
end
context 'when ssl => false' do
let :params do { :ssl => false, :vhost => 'vhost1', :www_root => '/', } end
it { should_not contain_concat__fragment(Digest::MD5.hexdigest("vhost1-800-rspec-test-ssl")) }
end
context 'when auth_basic_user_file => true' do
let :params do { :auth_basic_user_file => '/path/to/file', :vhost => 'vhost1', :www_root => '/', } end
it { should contain_file("/etc/nginx/rspec-test_htpasswd") }
end
context 'when ensure => absent' do
let :params do {
:www_root => '/',
:vhost => 'vhost1',
:ensure => 'absent',
:ssl => true,
:auth_basic_user_file => '/path/to/file',
} end
it { should contain_file("/etc/nginx/rspec-test_htpasswd").with_ensure('absent') }
end
context "vhost missing" do
let :params do {
:www_root => '/',
} end
it { expect { should contain_class('nginx::resource::location') }.to raise_error(Puppet::Error, /Cannot create a location reference without attaching to a virtual host/) }
end
context "location type missing" do
let :params do {
:vhost => 'vhost1',
} end
it { expect { should contain_class('nginx::resource::location') }.to raise_error(Puppet::Error, /Cannot create a location reference without a www_root, proxy, location_alias, fastcgi, stub_status, or location_custom_cfg defined/) }
end
context "www_root and proxy are set" do
let :params do {
:vhost => 'vhost1',
:www_root => '/',
:proxy => 'http://localhost:8000/uri/',
} end
it { expect { should contain_class('nginx::resource::location') }.to raise_error(Puppet::Error, /Cannot define both directory and proxy in a virtual host/) }
end
context 'when vhost name is sanitized' do
let :title do 'www.rspec-location.com' end
let :params do {
:vhost => 'www rspec-vhost com',
:www_root => '/',
:ssl => true,
} end
it { should contain_concat__fragment(Digest::MD5.hexdigest("www_rspec-vhost_com-500-www.rspec-location.com")).with_target('/etc/nginx/sites-available/www_rspec-vhost_com.conf') }
it { should contain_concat__fragment(Digest::MD5.hexdigest("www_rspec-vhost_com-800-www.rspec-location.com-ssl")).with_target('/etc/nginx/sites-available/www_rspec-vhost_com.conf') }
end
end
end
end
| 38.285298 | 239 | 0.518326 |
4aa8907f5788c187f53352b2f61bbb6dca551be9 | 775 | # frozen_string_literal: true
module Analytics
module UniqueVisitsHelper
extend ActiveSupport::Concern
def visitor_id
return cookies[:visitor_id] if cookies[:visitor_id].present?
return unless current_user
uuid = SecureRandom.uuid
cookies[:visitor_id] = { value: uuid, expires: 24.months }
uuid
end
def track_visit(target_id)
return unless visitor_id
Gitlab::Analytics::UniqueVisits.new.track_visit(target_id, values: visitor_id)
end
class_methods do
def track_unique_visits(controller_actions, target_id:)
after_action only: controller_actions, if: -> { request.format.html? && request.headers['DNT'] != '1' } do
track_visit(target_id)
end
end
end
end
end
| 25 | 114 | 0.683871 |
6a828836622f0f09bfc3563cb57a2349a529a6cb | 138 | # A simple pingtest gem
module Pingtest
# Ping test
#
# @return [String] "pong"
#
def self.ping
return "pong"
end
end
| 11.5 | 27 | 0.601449 |
03841339ce5ef90b7e3743cad2abe16dd2c0d7c3 | 155 | class CreateTopics < ActiveRecord::Migration[5.0]
def change
create_table :topics do |t|
t.string :title
t.timestamps
end
end
end
| 15.5 | 49 | 0.658065 |
f75d3d6d39082524ae87cebf5f9b4caec2f74ef4 | 281 | workers Integer(ENV['WEB_CONCURRENCY'] || 3)
threads_count = Integer(ENV['MAX_THREADS'] || 5)
threads threads_count, threads_count
preload_app!
port ENV['PORT'] || 3000
environment ENV['RACK_ENV'] || 'development'
on_worker_boot do
ActiveRecord::Base.establish_connection
end
| 21.615385 | 48 | 0.765125 |
ff3de493c406d4a4ab5c81f0f9b8d14cee483ecb | 4,396 | require 'support/config'
require 'support/test_helper'
require 'phusion_passenger/railz/application_spawner'
require 'ruby/rails/minimal_spawner_spec'
require 'ruby/spawn_server_spec'
require 'ruby/rails/spawner_privilege_lowering_spec'
require 'ruby/rails/spawner_error_handling_spec'
include PhusionPassenger
include PhusionPassenger::Railz
describe ApplicationSpawner do
include TestHelper
before :each do
@stub = setup_rails_stub('foobar')
@spawner = ApplicationSpawner.new(@stub.app_root,
"lowest_user" => CONFIG['lowest_user'])
@spawner.start
@server = @spawner
end
after :each do
@spawner.stop
@stub.destroy
end
it_should_behave_like "a spawn server"
def spawn_arbitrary_application
@spawner.spawn_application
end
end
describe ApplicationSpawner do
include TestHelper
describe "smart spawning" do
it_should_behave_like "a minimal spawner"
it_should_behave_like "handling errors in application initialization"
it "calls the starting_worker_process event, with forked=true, after a new worker process has been forked off" do
use_rails_stub('foobar') do |stub|
File.append(stub.environment_rb, %q{
PhusionPassenger.on_event(:starting_worker_process) do |forked|
File.append("result.txt", "forked = #{forked}\n")
end
File.append("result.txt", "end of environment.rb\n");
})
spawner = ApplicationSpawner.new(stub.app_root,
"lowest_user" => CONFIG['lowest_user'])
spawner.start
begin
spawner.spawn_application.close
spawner.spawn_application.close
ensure
spawner.stop
end
# Give some time for the starting_worker_process hook to be executed.
sleep 0.2
contents = File.read("#{stub.app_root}/result.txt")
contents.should == "end of environment.rb\n" +
"forked = true\n" +
"forked = true\n"
end
end
def spawn_stub_application(stub, extra_options = {})
options = { "lowest_user" => CONFIG['lowest_user'] }.merge(extra_options)
@spawner = ApplicationSpawner.new(stub.app_root, options)
begin
@spawner.start
return @spawner.spawn_application
ensure
@spawner.stop rescue nil
end
end
end
describe "conservative spawning" do
it_should_behave_like "a minimal spawner"
it_should_behave_like "handling errors in application initialization"
it "calls the starting_worker_process event, with forked=true, after environment.rb has been loaded" do
use_rails_stub('foobar') do |stub|
File.append(stub.environment_rb, %q{
PhusionPassenger.on_event(:starting_worker_process) do |forked|
File.append("result.txt", "forked = #{forked}\n")
end
File.append("result.txt", "end of environment.rb\n");
})
spawn_stub_application(stub).close
spawn_stub_application(stub).close
# Give some time for the starting_worker_process hook to be executed.
sleep 0.2
contents = File.read("#{stub.app_root}/result.txt")
contents.should == "end of environment.rb\n" +
"forked = false\n" +
"end of environment.rb\n" +
"forked = false\n"
end
end
def spawn_stub_application(stub, extra_options = {})
options = { "lowest_user" => CONFIG['lowest_user'] }.merge(extra_options)
@spawner = ApplicationSpawner.new(stub.app_root, options)
return @spawner.spawn_application!
end
end
end
Process.euid == ApplicationSpawner::ROOT_UID &&
describe("ApplicationSpawner privilege lowering support") do
include TestHelper
describe "regular spawning" do
it_should_behave_like "a spawner that supports lowering of privileges"
def spawn_stub_application(options = {})
options = {
"lower_privilege" => true,
"lowest_user" => CONFIG['lowest_user']
}.merge(options)
@spawner = ApplicationSpawner.new(@stub.app_root, options)
@spawner.start
begin
app = @spawner.spawn_application
yield app
ensure
app.close if app
@spawner.stop
end
end
end
describe "conservative spawning" do
it_should_behave_like "a spawner that supports lowering of privileges"
def spawn_stub_application(options = {})
options = {
"lower_privilege" => true,
"lowest_user" => CONFIG['lowest_user']
}.merge(options)
@spawner = ApplicationSpawner.new(@stub.app_root, options)
begin
app = @spawner.spawn_application!
yield app
ensure
app.close if app
end
end
end
end
| 27.475 | 115 | 0.717243 |
3856da150b05089167cefeb46c8a7f531fcdc4cd | 224 | desc 'Continuous Integration task'
task 'ci' do
derive_versions
sh "bundle exec buildr package doc jacoco:report PRODUCT_VERSION=#{ENV['PRODUCT_VERSION']} PREVIOUS_PRODUCT_VERSION=#{ENV['PREVIOUS_PRODUCT_VERSION']}"
end
| 37.333333 | 153 | 0.799107 |
8779c227578fdbf82d2757c2b35a34e7d9101925 | 2,049 | require 'thread'
require 'listen/internals/thread_pool'
module Listen
module Adapter
# Adapter implementation for Mac OS X `FSEvents`.
#
class Darwin < Base
OS_REGEXP = /darwin(1.+)?$/i
# The default delay between checking for changes.
DEFAULTS = { latency: 0.1 }
private
# NOTE: each directory gets a DIFFERENT callback!
def _configure(dir, &callback)
require 'rb-fsevent'
opts = { latency: options.latency }
@workers ||= ::Queue.new
@workers << FSEvent.new.tap do |worker|
_log :debug, "fsevent: watching: #{dir.to_s.inspect}"
worker.watch(dir.to_s, opts, &callback)
end
end
def _run
first = @workers.pop
# NOTE: _run is called within a thread, so run every other
# worker in it's own thread
_run_workers_in_background(_to_array(@workers))
_run_worker(first)
end
def _process_event(dir, event)
_log :debug, "fsevent: processing event: #{event.inspect}"
event.each do |path|
new_path = Pathname.new(path.sub(/\/$/, ''))
_log :debug, "fsevent: #{new_path}"
# TODO: does this preserve symlinks?
rel_path = new_path.relative_path_from(dir).to_s
_queue_change(:dir, dir, rel_path, recursive: true)
end
end
def _run_worker(worker)
_log :debug, "fsevent: running worker: #{worker.inspect}"
worker.run
rescue
_log_exception 'fsevent: running worker failed: %s:%s called from: %s', caller
end
def _run_workers_in_background(workers)
workers.each do |worker|
# NOTE: while passing local variables to the block below is not
# thread safe, using 'worker' from the enumerator above is ok
Listen::Internals::ThreadPool.add { _run_worker(worker) }
end
end
def _to_array(queue)
workers = []
workers << queue.pop until queue.empty?
workers
end
end
end
end
| 28.458333 | 86 | 0.603221 |
2114db12ef1c6594f0c2bf1c9f44c28c378143ac | 524 | class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
helper_method :current_user
helper_method :users_daily_activities
def current_user
@current_user ||= User.find_by_id(session[:user_id]) unless session[:user_id] == nil
end
def logged_in?
!!session[:user_id]
end
def authorize
redirect_to '/login' unless current_user
end
def users_daily_activities
current_user.daily_activities.collect do |activity|
activity.name
end
end
end
| 18.714286 | 88 | 0.742366 |
ff6965a211c4e33035fceb4d65780b111773c275 | 494 | class CreateMeritBadges < ActiveRecord::Migration
def change
create_table :merit_badges do |t|
t.string :name
t.string :year_created
t.boolean :eagle_required, default: false
t.boolean :discontinued, default: false
t.string :bsa_advancement_id
t.string :patch_image_url
t.string :mb_org_url
t.string :mb_org_worksheet_pdf_url
t.string :mb_org_worksheet_doc_url
t.timestamps
end
add_index :merit_badges, :name
end
end
| 26 | 49 | 0.700405 |
616e70d1731111a71af54ee78969dc238171ba02 | 74 | class NonDefaultDirCell < Cell::ViewModel
def show
render
end
end
| 12.333333 | 41 | 0.72973 |
2176cad22e1db8c3d04ecd2bad34c97bb4aa7c5b | 379 | # == Schema Information
#
# Table name: societes
#
# id :integer not null, primary key
# name :string
# created_at :datetime not null
# updated_at :datetime not null
#
class Societe < ApplicationRecord
has_many :ticket
def make_exists(name)
find_or_create_by(name: name)
end
def subclassname
name if subclass
end
end
| 18.95 | 53 | 0.643799 |
87ea7578dce9af0f28e375ea720cdd251b5605aa | 189 | require_dependency 'support/application_record'
module Support
module Common
class ActivityType < ApplicationRecord
self.table_name = 'common.activity_types'
end
end
end
| 18.9 | 47 | 0.767196 |
6218b8eb5d2fca56de5e3cce1469e17a1cc6a02b | 1,631 | #
# Be sure to run `pod lib lint SignalBox.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'SignalBox'
s.version = '0.1.0'
s.summary = 'A short description of SignalBox.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/jimmy/SignalBox'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'jimmy' => '[email protected]' }
s.source = { :git => 'https://github.com/jimmy/SignalBox.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '11.0'
s.ios.private_header_files = "Pod/Classes/**/*.h"
s.source_files = 'SignalBox/Classes/**/*'
# s.resource_bundles = {
# 'SignalBox' => ['SignalBox/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 37.930233 | 99 | 0.635806 |
bb7061ceeac4ac8fe60b5ad090b1785518dd5ebf | 1,238 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe V0::FeedbacksController, type: :controller do
let(:params) do
{
description: 'I liked this page',
target_page: '/some/example/page.html',
owner_email: '[email protected]'
}
end
let(:missing_params) { params.reject { |k, _v| k == :target_page } }
let(:empty_params) { params.merge(description: '') }
before do
@request.env['HTTP_ACCEPT'] = 'application/json'
@request.env['CONTENT_TYPE'] = 'application/json'
end
it 'responds with 202' do
post :create, params
expect(response).to have_http_status(:accepted)
expect(response.header['Content-Type']).to include('application/json')
end
it 'responds with param error when required params are missing' do
post :create, missing_params
expect(response).to have_http_status(:bad_request)
expect(response.body).to include('The required parameter \\"target_page\\", is missing')
end
it 'responds with param error when required params are null or empty' do
post :create, empty_params
expect(response).to have_http_status(:bad_request)
expect(response.body).to include('The required parameter \\"description\\", is missing')
end
end
| 31.74359 | 92 | 0.705977 |
21a39ae70a353fe84db43caefebfd40a24d692de | 12,731 | require File.expand_path("../../base", __FILE__)
require "pathname"
require 'tempfile'
describe Vagrant::BoxCollection, :skip_windows do
include_context "unit"
let(:box_class) { Vagrant::Box }
let(:environment) { isolated_environment }
subject { described_class.new(environment.boxes_dir) }
it "should tell us the directory it is using" do
expect(subject.directory).to eq(environment.boxes_dir)
end
describe "#all" do
it "should return an empty array when no boxes are there" do
expect(subject.all).to eq([])
end
it "should return the boxes and their providers" do
# Create some boxes
environment.box3("foo", "1.0", :virtualbox)
environment.box3("foo", "1.0", :vmware)
environment.box3("bar", "0", :ec2)
environment.box3("foo-VAGRANTSLASH-bar", "1.0", :virtualbox)
environment.box3("foo-VAGRANTCOLON-colon", "1.0", :virtualbox)
# Verify some output
results = subject.all
expect(results.length).to eq(5)
expect(results.include?(["foo", "1.0", :virtualbox])).to be
expect(results.include?(["foo", "1.0", :vmware])).to be
expect(results.include?(["bar", "0", :ec2])).to be
expect(results.include?(["foo/bar", "1.0", :virtualbox])).to be
expect(results.include?(["foo:colon", "1.0", :virtualbox])).to be
end
it 'does not raise an exception when a file appears in the boxes dir' do
Tempfile.new('a_file', environment.boxes_dir)
expect { subject.all }.to_not raise_error
end
end
describe "#clean" do
it "removes the directory if no other versions of the box exists" do
# Create a few boxes, immediately destroy them
environment.box3("foo", "1.0", :virtualbox)
environment.box3("foo", "1.0", :vmware)
# Delete them all
subject.all.each do |parts|
subject.find(parts[0], parts[2], ">= 0").destroy!
end
# Cleanup
subject.clean("foo")
# Make sure the whole directory is empty
expect(environment.boxes_dir.children).to be_empty
end
it "doesn't remove the directory if a provider exists" do
# Create a few boxes, immediately destroy them
environment.box3("foo", "1.0", :virtualbox)
environment.box3("foo", "1.0", :vmware)
# Delete them all
subject.find("foo", :virtualbox, ">= 0").destroy!
# Cleanup
subject.clean("foo")
# Make sure the whole directory is not empty
expect(environment.boxes_dir.children).to_not be_empty
# Make sure the results still exist
results = subject.all
expect(results.length).to eq(1)
expect(results.include?(["foo", "1.0", :vmware])).to be
end
it "doesn't remove the directory if a version exists" do
# Create a few boxes, immediately destroy them
environment.box3("foo", "1.0", :virtualbox)
environment.box3("foo", "1.2", :virtualbox)
# Delete them all
subject.find("foo", :virtualbox, ">= 1.1").destroy!
# Cleanup
subject.clean("foo")
# Make sure the whole directory is not empty
expect(environment.boxes_dir.children).to_not be_empty
# Make sure the results still exist
results = subject.all
expect(results.length).to eq(1)
expect(results.include?(["foo", "1.0", :virtualbox])).to be
end
end
describe "#find" do
it "returns nil if the box does not exist" do
expect(subject.find("foo", :i_dont_exist, ">= 0")).to be_nil
end
it "returns a box if the box does exist" do
# Create the "box"
environment.box3("foo", "0", :virtualbox)
# Actual test
result = subject.find("foo", :virtualbox, ">= 0")
expect(result).to_not be_nil
expect(result).to be_kind_of(box_class)
expect(result.name).to eq("foo")
expect(result.metadata_url).to be_nil
end
it "returns a box if the box does exist, with no constraints" do
# Create the "box"
environment.box3("foo", "0", :virtualbox)
# Actual test
result = subject.find("foo", :virtualbox, nil)
expect(result).to_not be_nil
expect(result).to be_kind_of(box_class)
expect(result.name).to eq("foo")
expect(result.metadata_url).to be_nil
end
it "sets a metadata URL if it has one" do
# Create the "box"
environment.box3("foo", "0", :virtualbox,
metadata_url: "foourl")
# Actual test
result = subject.find("foo", :virtualbox, ">= 0")
expect(result).to_not be_nil
expect(result).to be_kind_of(box_class)
expect(result.name).to eq("foo")
expect(result.metadata_url).to eq("foourl")
end
it "sets the metadata URL to an authenticated URL if it has one" do
hook = double("hook")
subject = described_class.new(environment.boxes_dir, hook: hook)
# Create the "box"
environment.box3("foo", "0", :virtualbox,
metadata_url: "foourl")
expect(hook).to receive(:call).with { |name, env|
expect(name).to eq(:authenticate_box_url)
expect(env[:box_urls]).to eq(["foourl"])
true
}.and_return(box_urls: ["bar"])
# Actual test
result = subject.find("foo", :virtualbox, ">= 0")
expect(result).to_not be_nil
expect(result).to be_kind_of(box_class)
expect(result.name).to eq("foo")
expect(result.metadata_url).to eq("bar")
end
it "returns latest version matching constraint" do
# Create the "box"
environment.box3("foo", "1.0", :virtualbox)
environment.box3("foo", "1.5", :virtualbox)
# Actual test
result = subject.find("foo", :virtualbox, ">= 0")
expect(result).to_not be_nil
expect(result).to be_kind_of(box_class)
expect(result.name).to eq("foo")
expect(result.version).to eq("1.5")
end
it "can satisfy complex constraints" do
# Create the "box"
environment.box3("foo", "0.1", :virtualbox)
environment.box3("foo", "1.0", :virtualbox)
environment.box3("foo", "2.1", :virtualbox)
# Actual test
result = subject.find("foo", :virtualbox, ">= 0.9, < 1.5")
expect(result).to_not be_nil
expect(result).to be_kind_of(box_class)
expect(result.name).to eq("foo")
expect(result.version).to eq("1.0")
end
it "handles prerelease versions" do
# Create the "box"
environment.box3("foo", "0.1.0-alpha.1", :virtualbox)
environment.box3("foo", "0.1.0-alpha.2", :virtualbox)
# Actual test
result = subject.find("foo", :virtualbox, ">= 0")
expect(result).to_not be_nil
expect(result).to be_kind_of(box_class)
expect(result.name).to eq("foo")
expect(result.version).to eq("0.1.0-alpha.2")
end
it "returns nil if a box's constraints can't be satisfied" do
# Create the "box"
environment.box3("foo", "0.1", :virtualbox)
environment.box3("foo", "1.0", :virtualbox)
environment.box3("foo", "2.1", :virtualbox)
# Actual test
result = subject.find("foo", :virtualbox, "> 1.0, < 1.5")
expect(result).to be_nil
end
end
describe "#add" do
it "should add a valid box to the system" do
box_path = environment.box2_file(:virtualbox)
# Add the box
box = subject.add(box_path, "foo", "1.0", providers: :virtualbox)
expect(box).to be_kind_of(box_class)
expect(box.name).to eq("foo")
expect(box.provider).to eq(:virtualbox)
# Verify we can find it as well
expect(subject.find("foo", :virtualbox, "1.0")).to_not be_nil
end
it "should add a box with a name with '/' in it" do
box_path = environment.box2_file(:virtualbox)
# Add the box
box = subject.add(box_path, "foo/bar", "1.0")
expect(box).to be_kind_of(box_class)
expect(box.name).to eq("foo/bar")
expect(box.provider).to eq(:virtualbox)
# Verify we can find it as well
expect(subject.find("foo/bar", :virtualbox, "1.0")).to_not be_nil
end
it "should add a box without specifying a provider" do
box_path = environment.box2_file(:vmware)
# Add the box
box = subject.add(box_path, "foo", "1.0")
expect(box).to be_kind_of(box_class)
expect(box.name).to eq("foo")
expect(box.provider).to eq(:vmware)
end
it "should store a metadata URL" do
box_path = environment.box2_file(:virtualbox)
subject.add(
box_path, "foo", "1.0",
metadata_url: "bar")
box = subject.find("foo", :virtualbox, "1.0")
expect(box.metadata_url).to eq("bar")
end
it "should add a V1 box" do
# Create a V1 box.
box_path = environment.box1_file
# Add the box
box = subject.add(box_path, "foo", "1.0")
expect(box).to be_kind_of(box_class)
expect(box.name).to eq("foo")
expect(box.provider).to eq(:virtualbox)
end
it "should raise an exception if the box already exists" do
prev_box_name = "foo"
prev_box_provider = :virtualbox
prev_box_version = "1.0"
# Create the box we're adding
environment.box3(prev_box_name, "1.0", prev_box_provider)
# Attempt to add the box with the same name
box_path = environment.box2_file(prev_box_provider)
expect {
subject.add(box_path, prev_box_name,
prev_box_version, providers: prev_box_provider)
}.to raise_error(Vagrant::Errors::BoxAlreadyExists)
end
it "should replace the box if force is specified" do
prev_box_name = "foo"
prev_box_provider = :vmware
prev_box_version = "1.0"
# Setup the environment with the box pre-added
environment.box3(prev_box_name, prev_box_version, prev_box_provider)
# Attempt to add the box with the same name
box_path = environment.box2_file(prev_box_provider, metadata: { "replaced" => "yes" })
box = subject.add(box_path, prev_box_name, prev_box_version, force: true)
expect(box.metadata["replaced"]).to eq("yes")
end
it "should raise an exception if the box already exists and no provider is given" do
# Create some box file
box_name = "foo"
box_path = environment.box2_file(:vmware)
# Add it once, successfully
expect { subject.add(box_path, box_name, "1.0") }.to_not raise_error
# Add it again, and fail!
expect { subject.add(box_path, box_name, "1.0") }.
to raise_error(Vagrant::Errors::BoxAlreadyExists)
end
it "should raise an exception and not add the box if the provider doesn't match" do
box_name = "foo"
good_provider = :virtualbox
bad_provider = :vmware
# Create a VirtualBox box file
box_path = environment.box2_file(good_provider)
# Add the box but with an invalid provider, verify we get the proper
# error.
expect { subject.add(box_path, box_name, "1.0", providers: bad_provider) }.
to raise_error(Vagrant::Errors::BoxProviderDoesntMatch)
# Verify the box doesn't exist
expect(subject.find(box_name, bad_provider, "1.0")).to be_nil
end
it "should raise an exception if you add an invalid box file" do
# Tar Header information
CHECKSUM_OFFSET = 148
CHECKSUM_LENGTH = 8
f = Tempfile.new(['vagrant_testing', '.tar'])
begin
# Corrupt the tar by writing over the checksum field
f.seek(CHECKSUM_OFFSET)
f.write("\0"*CHECKSUM_LENGTH)
f.close
expect { subject.add(f.path, "foo", "1.0") }.
to raise_error(Vagrant::Errors::BoxUnpackageFailure)
ensure
f.close
f.unlink
end
end
end
describe "#upgrade_v1_1_v1_5" do
let(:boxes_dir) { environment.boxes_dir }
before do
# Create all the various box directories
@foo_path = environment.box2("foo", "virtualbox")
@vbox_path = environment.box2("precise64", "virtualbox")
@vmware_path = environment.box2("precise64", "vmware")
@v1_path = environment.box("v1box")
end
it "upgrades the boxes" do
subject.upgrade_v1_1_v1_5
# The old paths should not exist anymore
expect(@foo_path).to_not exist
expect(@vbox_path).to_not exist
expect(@vmware_path).to_not exist
expect(@v1_path.join("box.ovf")).to_not exist
# New paths should exist
foo_path = boxes_dir.join("foo", "0", "virtualbox")
vbox_path = boxes_dir.join("precise64", "0", "virtualbox")
vmware_path = boxes_dir.join("precise64", "0", "vmware")
v1_path = boxes_dir.join("v1box", "0", "virtualbox")
expect(foo_path).to exist
expect(vbox_path).to exist
expect(vmware_path).to exist
expect(v1_path).to exist
end
end
end
| 32.23038 | 92 | 0.632001 |
392e755d019a1665d39b12299c80b62e374a0c6b | 552 | module QDM
# app/models/qdm/patient_characteristic_payer.rb
class PatientCharacteristicPayer < DataElement
include Mongoid::Document
embedded_in :patient
field :relevantPeriod, type: QDM::Interval
field :qdmTitle, type: String, default: 'Patient Characteristic Payer'
field :hqmfOid, type: String, default: '2.16.840.1.113883.10.20.28.4.58'
field :qdmCategory, type: String, default: 'patient_characteristic'
field :qdmStatus, type: String, default: 'payer'
field :qdmVersion, type: String, default: '5.6'
end
end
| 39.428571 | 76 | 0.733696 |
392a0b935ba4d3bbb0881c82b9e851194ba75159 | 253 | require 'rack/test'
require 'rspec'
require 'pry'
ENV['RACK_ENV'] = 'test'
require File.expand_path '../../app.rb', __FILE__
module RSpecMixin
include Rack::Test::Methods
def app() MakeChange end
end
RSpec.configure { |c| c.include RSpecMixin }
| 16.866667 | 49 | 0.711462 |
21cf147abba5e82308bb45118c95d846b8e69d17 | 665 | #
# Cookbook Name:: runit_test
# Recipe:: default
#
# Copyright 2012, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'runit::default'
| 31.666667 | 74 | 0.75188 |
e93bdab8284a1df3668dcbaf775879d7d6eb4dd3 | 706 | cask 'hockey' do
version '2.0.14'
sha256 '4cd6dc945378a520818ace687c89dd1356d5f975731a7848e37cad2e12278a31'
url 'https://rink.hockeyapp.net/api/2/apps/67503a7926431872c4b6c1549f5bd6b1/app_versions/396?format=zip'
appcast 'https://rink.hockeyapp.net/api/2/apps/67503a7926431872c4b6c1549f5bd6b1',
checkpoint: 'b27878765070c937cc053e44afa37680cf48c554506f8ddc72ec83cd4487bd9f'
name 'HockeyApp'
homepage 'http://hockeyapp.net/releases/mac/'
license :unknown # TODO: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'HockeyApp.app'
binary 'HockeyApp.app/Contents/Resources/puck'
postflight do
suppress_move_to_applications
end
end
| 37.157895 | 112 | 0.793201 |
ff368d18cedc015541c3657b66a460ec67d86d76 | 1,404 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
class CustomValue::VersionStrategy < CustomValue::ARObjectStrategy
private
def ar_class
Version
end
def ar_object(value)
Version.find_by(id: value)
end
end
| 33.428571 | 91 | 0.757835 |
7aaf357dc7eb8bfe2350df2e0252b740689a76d3 | 1,105 | require 'rails/generators'
require 'rails/generators/migration'
require 'rails/generators/active_record'
class VanityGenerator < Rails::Generators::Base
include Rails::Generators::Migration
source_root File.expand_path('../templates', __dir__)
def self.next_migration_number(path)
::ActiveRecord::Generators::Base.next_migration_number(path)
end
def create_migration_file
migration_template "vanity_migration.rb.erb", destination("vanity_migration.rb"), migration_version: migration_version
migration_template "add_unique_indexes_migration.rb.erb", destination("add_vanity_unique_indexes.rb"), migration_version: migration_version
migration_template "add_participants_unique_index_migration.rb.erb", destination("add_participants_unique_index_migration.rb"), migration_version: migration_version
end
private
def destination(name)
File.join(Rails.root, 'db', 'migrate', name)
end
def versioned?
ActiveRecord::VERSION::MAJOR >= 5
end
def migration_version
"[#{ActiveRecord::VERSION::MAJOR}.#{ActiveRecord::VERSION::MINOR}]" if versioned?
end
end
| 33.484848 | 168 | 0.786425 |
d500df2c5c4471d62a01280cca33012a07533b8b | 292 | # frozen_string_literal: true
class ConferenceScheduleUpdateMailJob < ApplicationJob
queue_as :default
def perform(conference)
conference.subscriptions.each do |subscription|
Mailbot.conference_schedule_update_mail(conference, subscription.user).deliver_now
end
end
end
| 24.333333 | 88 | 0.804795 |
7a527b219820e57389866fe6960b0716142e848b | 497 | class Hash
def to_ostruct
convert_to_ostruct_recursive(self)
end
private
def convert_to_ostruct_recursive(obj)
result = obj
if result.is_a? Hash
result = result.dup.symbolize_keys
result.each do |key, val|
result[key] = convert_to_ostruct_recursive(val)
end
result = OpenStruct.new(result)
elsif result.is_a? Array
result = result.map { |r| convert_to_ostruct_recursive(r) }
end
result
end
end
| 20.708333 | 67 | 0.639839 |
e8f882f17e199a907036c64f8fe604a10c683311 | 334 | module Util
module Hashname
def self.calc_hashname(name)
name.chars.inject(5381) do |hash, ch|
max_int = (hash << 5) + hash + ch.ord
force_overflow_unsigned(max_int.to_f)
end
end
def self.force_overflow_unsigned(i)
i % (2**32).to_f # or equivalently: i & 0xffffffff
end
end
end
| 20.875 | 56 | 0.625749 |
26fbb4f2df8b2bd7aedc1099b71f2b3d9ed53d53 | 163 | module CommonModels
class Reminder < ActiveRecord::Base
self.table_name = 'project_service.reminders'
belongs_to :project
belongs_to :user
end
end
| 20.375 | 49 | 0.748466 |
39765ceeb69b54d7e3f59e08d77290f753f7a46d | 67 | module PolymerGoldRails
class Engine < ::Rails::Engine
end
end
| 13.4 | 32 | 0.746269 |
013bf3a3dcc201429aa06c576e4f0af515c5f40e | 273 | def filter_by_prev_char(s, prev = 'a')
s.chars.each_cons(2).map do |p, c|
c if p == prev
end.compact.join
end
def filter_file(f_file, h_file, filter_prev_char = 'a')
f_file.each_line do |line|
h_file.puts filter_by_prev_char(line, filter_prev_char)
end
end | 24.818182 | 59 | 0.714286 |
01fdb1d872c04a49d8428285f7ca883711076546 | 8,584 | ## NewRelic instrumentation for DataMapper
#
# Instrumenting DM has different key challenges versus AR:
#
# 1. The hooking of SQL logging in DM is decoupled from any knowledge of the
# Model#method that invoked it. But on the positive side, the duration is
# already calculated for you (and it happens inside the C-based DO code, so
# it's faster than a Ruby equivalent).
#
# 2. There are a lot more entry points that need to be hooked in order to
# understand call flow: DM::Model (model class) vs. DM::Resource (model
# instance) vs. DM::Collection (collection of model instances). And
# others.
#
# 3. Strategic Eager Loading (SEL) combined with separately-grouped
# lazy-loaded attributes presents a unique problem for tying resulting
# SEL-invoked SQL calls to their proper scope.
#
# NOTE: On using "Database" versus "ActiveRecord" as base metric name
#
# Using "Database" as the metric name base seems to properly identify methods
# as being DB-related in call graphs, but certain New Relic views that show
# aggregations of DB CPM, etc still seem to rely solely on "ActiveRecord"
# being the base name, thus AFAICT "Database" calls to this are lost. (Though
# I haven't yet tested "Database/SQL/{find/save/destroy/all}" yet, as it seems
# like an intuitively good name to use.)
#
# So far I think these are the rules:
#
# - ActiveRecord/{find/save/destroy} populates "Database Throughput" and
# "Database Response Time" in the Database tab. [non-scoped]
#
# - ActiveRecord/all populates the main Overview tab of DB time. (still
# unsure about this one). [non-scoped]
#
# These metrics are represented as :push_scope => false or included as the
# non-first metric in trace_execution_scoped() (docs say only first counts
# towards scope) so they don't show up ine normal call graph/trace.
DependencyDetection.defer do
@name = :data_mapper
depends_on do
defined?(::DataMapper)
end
depends_on do
defined?(DataMapper::Model)
end
depends_on do
defined?(DataMapper::Resource)
end
depends_on do
defined?(DataMapper::Collection)
end
executes do
::NewRelic::Agent.logger.info 'Installing DataMapper instrumentation'
end
executes do
DataMapper::Model.class_eval do
add_method_tracer :get, 'ActiveRecord/#{self.name}/get'
add_method_tracer :first, 'ActiveRecord/#{self.name}/first'
add_method_tracer :last, 'ActiveRecord/#{self.name}/last'
add_method_tracer :all, 'ActiveRecord/#{self.name}/all'
add_method_tracer :create, 'ActiveRecord/#{self.name}/create'
add_method_tracer :create!, 'ActiveRecord/#{self.name}/create'
add_method_tracer :update, 'ActiveRecord/#{self.name}/update'
add_method_tracer :update!, 'ActiveRecord/#{self.name}/update'
add_method_tracer :destroy, 'ActiveRecord/#{self.name}/destroy'
add_method_tracer :destroy!, 'ActiveRecord/#{self.name}/destroy'
# For dm-aggregates and partial dm-ar-finders support:
for method in [ :aggregate, :find, :find_by_sql ] do
next unless method_defined? method
add_method_tracer(method, 'ActiveRecord/#{self.name}/' + method.to_s)
end
end
end
executes do
DataMapper::Resource.class_eval do
add_method_tracer :update, 'ActiveRecord/#{self.class.name[/[^:]*$/]}/update'
add_method_tracer :update!, 'ActiveRecord/#{self.class.name[/[^:]*$/]}/update'
add_method_tracer :save, 'ActiveRecord/#{self.class.name[/[^:]*$/]}/save'
add_method_tracer :save!, 'ActiveRecord/#{self.class.name[/[^:]*$/]}/save'
add_method_tracer :destroy, 'ActiveRecord/#{self.class.name[/[^:]*$/]}/destroy'
add_method_tracer :destroy!, 'ActiveRecord/#{self.class.name[/[^:]*$/]}/destroy'
end
end
executes do
DataMapper::Collection.class_eval do
# DM's Collection instance methods
add_method_tracer :get, 'ActiveRecord/#{self.name}/get'
add_method_tracer :first, 'ActiveRecord/#{self.name}/first'
add_method_tracer :last, 'ActiveRecord/#{self.name}/last'
add_method_tracer :all, 'ActiveRecord/#{self.name}/all'
add_method_tracer :lazy_load, 'ActiveRecord/#{self.name}/lazy_load'
add_method_tracer :create, 'ActiveRecord/#{self.name}/create'
add_method_tracer :create!, 'ActiveRecord/#{self.name}/create'
add_method_tracer :update, 'ActiveRecord/#{self.name}/update'
add_method_tracer :update!, 'ActiveRecord/#{self.name}/update'
add_method_tracer :destroy, 'ActiveRecord/#{self.name}/destroy'
add_method_tracer :destroy!, 'ActiveRecord/#{self.name}/destroy'
# For dm-aggregates support:
for method in [ :aggregate ] do
next unless method_defined? method
add_method_tracer(method, 'ActiveRecord/#{self.name}/' + method.to_s)
end
end
end
end
DependencyDetection.defer do
depends_on do
defined?(DataMapper) && defined?(DataMapper::Adapters) && defined?(DataMapper::Adapters::DataObjectsAdapter)
end
executes do
# Catch the two entry points into DM::Repository::Adapter that bypass CRUD
# (for when SQL is run directly).
DataMapper::Adapters::DataObjectsAdapter.class_eval do
add_method_tracer :select, 'ActiveRecord/#{self.class.name[/[^:]*$/]}/select'
add_method_tracer :execute, 'ActiveRecord/#{self.class.name[/[^:]*$/]}/execute'
end
end
end
DependencyDetection.defer do
depends_on do
defined?(DataMapper) && defined?(DataMapper::Validations) && defined?(DataMapper::Validations::ClassMethods)
end
# DM::Validations overrides Model#create, but currently in a way that makes it
# impossible to instrument from one place. I've got a patch pending inclusion
# to make it instrumentable by putting the create method inside ClassMethods.
# This will pick it up if/when that patch is accepted.
executes do
DataMapper::Validations::ClassMethods.class_eval do
next unless method_defined? :create
add_method_tracer :create, 'ActiveRecord/#{self.name}/create'
end
end
end
DependencyDetection.defer do
depends_on do
defined?(DataMapper) && defined?(DataMapper::Transaction)
end
# NOTE: DM::Transaction basically calls commit() twice, so as-is it will show
# up in traces twice -- second time subordinate to the first's scope. Works
# well enough.
executes do
DataMapper::Transaction.module_eval do
add_method_tracer :commit, 'ActiveRecord/#{self.class.name[/[^:]*$/]}/commit'
end
end
end
module NewRelic
module Agent
module Instrumentation
module DataMapperInstrumentation
# Unlike in AR, log is called in DM after the query actually ran,
# complete with metrics. Since DO has already calculated the
# duration, there's nothing more to measure, so just record and log.
#
# We rely on the assumption that all possible entry points have been
# hooked with tracers, ensuring that notice_sql attaches this SQL to
# the proper call scope.
def log(msg)
return unless NewRelic::Agent.is_execution_traced?
return unless operation = case NewRelic::Helper.correctly_encoded(msg.query)
when /^\s*select/i then 'find'
when /^\s*(update|insert)/i then 'save'
when /^\s*delete/i then 'destroy'
else nil
end
# FYI: self.to_s will yield connection URI string.
duration = msg.duration / 1000000.0
# Attach SQL to current segment/scope.
NewRelic::Agent.instance.transaction_sampler.notice_sql(msg.query, nil, duration)
# Record query duration associated with each of the desired metrics.
metrics = [ "ActiveRecord/#{operation}", 'ActiveRecord/all' ]
metrics.each do |metric|
NewRelic::Agent.instance.stats_engine.get_stats_no_scope(metric).trace_call(duration)
end
ensure
super
end
end # DataMapperInstrumentation
end # Instrumentation
end # Agent
end # NewRelic
DependencyDetection.defer do
depends_on do
defined?(DataObjects) && defined?(DataObjects::Connection)
end
executes do
DataObjects::Connection.class_eval do
include ::NewRelic::Agent::Instrumentation::DataMapperInstrumentation
end
end
end
| 37.160173 | 112 | 0.675792 |
e97fd97456685a889b97f43df22d20543960c2f7 | 2,022 | #! /usr/bin/env ruby
require 'spec_helper'
require 'puppet_spec/files'
describe Puppet::Settings do
include PuppetSpec::Files
def minimal_default_settings
{ :noop => {:default => false, :desc => "noop"} }
end
def define_settings(section, settings_hash)
settings.define_settings(section, minimal_default_settings.update(settings_hash))
end
let(:settings) { Puppet::Settings.new }
it "should be able to make needed directories" do
define_settings(:main,
:maindir => {
:default => tmpfile("main"),
:type => :directory,
:desc => "a",
}
)
settings.use(:main)
expect(File.directory?(settings[:maindir])).to be_truthy
end
it "should make its directories with the correct modes" do
define_settings(:main,
:maindir => {
:default => tmpfile("main"),
:type => :directory,
:desc => "a",
:mode => 0750
}
)
settings.use(:main)
expect(Puppet::FileSystem.stat(settings[:maindir]).mode & 007777).to eq(0750)
end
it "reparses configuration if configuration file is touched", :if => !Puppet.features.microsoft_windows? do
config = tmpfile("config")
define_settings(:main,
:config => {
:type => :file,
:default => config,
:desc => "a"
},
:environment => {
:default => 'dingos',
:desc => 'test',
}
)
Puppet[:filetimeout] = '1s'
File.open(config, 'w') do |file|
file.puts <<-EOF
[main]
environment=toast
EOF
end
settings.initialize_global_settings
expect(settings[:environment]).to eq('toast')
# First reparse establishes WatchedFiles
settings.reparse_config_files
sleep 1
File.open(config, 'w') do |file|
file.puts <<-EOF
[main]
environment=bacon
EOF
end
# Second reparse if later than filetimeout, reparses if changed
settings.reparse_config_files
expect(settings[:environment]).to eq('bacon')
end
end
| 22.466667 | 109 | 0.610287 |
edc0c37a82237b020fff1e6b4af176fe5caff79b | 193 | require_relative '../../spec_helper'
require_relative '../../shared/queue/num_waiting'
describe "Queue#num_waiting" do
it_behaves_like :queue_num_waiting, :num_waiting, -> { Queue.new }
end
| 27.571429 | 68 | 0.751295 |
398a8fc17196266abbe1224a5d7da8de1e61acb7 | 124 | require 'test_helper'
class AttractionTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.5 | 46 | 0.709677 |
bfc04845c9c684824f6a8c96dc1a3f063952c92c | 163 | # frozen_string_literal: true
require_relative "irb/version"
module All
module Irb
class Error < StandardError; end
# Your code goes here...
end
end
| 14.818182 | 36 | 0.717791 |
1dfb725b44c4674e634a4d15b360eba07774dcf2 | 2,315 |
class PreprocessinatorIncludesHandler
constructor :configurator, :tool_executor, :task_invoker, :file_path_utils, :yaml_wrapper, :file_wrapper
# shallow includes: only those headers a source file explicitly includes
def invoke_shallow_includes_list(filepath)
@task_invoker.invoke_test_shallow_include_lists( [@file_path_utils.form_preprocessed_includes_list_filepath(filepath)] )
end
# ask the preprocessor for a make-style dependency rule of only the headers the source file immediately includes
def form_shallow_dependencies_rule(filepath)
# change filename (prefix of '_') to prevent preprocessor from finding include files in temp directory containing file it's scanning
temp_filepath = @file_path_utils.form_temp_path(filepath, '_')
# read the file and replace all include statements with a decorated version
# (decorating the names creates file names that don't exist, thus preventing the preprocessor
# from snaking out and discovering the entire include path that winds through the code)
contents = @file_wrapper.read(filepath)
contents.gsub!( /#include\s+\"\s*(\S+)\s*\"/, "#include \"\\1\"\n#include \"@@@@\\1\"" )
@file_wrapper.write( temp_filepath, contents )
# extract the make-style dependency rule telling the preprocessor to
# ignore the fact that it can't find the included files
command = @tool_executor.build_command_line(@configurator.tools_test_includes_preprocessor, temp_filepath)
shell_result = @tool_executor.exec(command[:line], command[:options])
return shell_result[:output]
end
# headers only; ignore any crazy .c includes
def extract_shallow_includes(make_rule)
list = []
header_extension = @configurator.extension_header
headers = make_rule.scan(/(\S+#{'\\'+header_extension})/).flatten # escape slashes before dot file extension
headers.uniq!
headers.map! { |header| header.sub(/(@@@@)|(.+\/)/, '') }
headers.sort!
headers.each_with_index do |header, index|
break if (headers.size == (index-1))
list << header if (header == headers[index + 1])
end
return list
end
def write_shallow_includes_list(filepath, list)
@yaml_wrapper.dump(filepath, list)
end
end
| 41.339286 | 137 | 0.711015 |
bf9b322f19912f136ced712279a504228be9da28 | 4,776 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "my_tasks_api_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 45.056604 | 114 | 0.762353 |
e84e9477a6ac75bc7a4fab12961eff2c53f6a0ab | 178 | class CreateMuscleGroups < ActiveRecord::Migration[5.0]
def change
create_table :muscle_groups do |t|
t.string :name
t.timestamps null: false
end
end
end
| 19.777778 | 55 | 0.691011 |
38b6448fc7c4070bf0bcd76d15c540f601082384 | 529 | require "quandl/data/version"
require 'quandl/support'
require "quandl/operation"
require 'quandl/babelfish'
require 'quandl/error/date_parse_error'
require 'quandl/data/attributes'
require 'quandl/data/cleaning'
require 'quandl/data/operations'
require 'quandl/data/format'
require 'quandl/data/validations'
require 'quandl/data/logging'
module Quandl
class Data
include Attributes
include Cleaning
include Operations
include Validations
include Logging if defined?(QUANDL_LOGGER) && QUANDL_LOGGER == true
end
end
| 21.16 | 69 | 0.799622 |
e994819a4fa123346fb051060c950e07ad799f73 | 968 | class VariantDetailPresenter < VariantIndexPresenter
def as_json(opts = {})
super.merge(
{
evidence_items: variant.evidence_items.map { |ei| EvidenceItemIndexPresenter.new(ei) },
variant_groups: variant.variant_groups.map { |vg| VariantGroupIndexPresenter.new(vg) },
assertions: variant.assertions.map { |a| AssertionIndexPresenter.new(a) },
variant_aliases: variant.variant_aliases.map(&:name),
hgvs_expressions: variant.hgvs_expressions.map(&:expression),
clinvar_entries: variant.clinvar_entries.map(&:clinvar_id),
lifecycle_actions: LifecyclePresenter.new(variant),
sources: variant.sources.map { |s| SourcePresenter.new(s) },
allele_registry_id: variant.allele_registry_id,
allele_registry_hgvs: HgvsExpression.allele_registry_hgvs(variant),
provisional_values: ProvisionalValuesPresenter.new(variant),
errors: variant.errors.to_h
}
)
end
end
| 46.095238 | 95 | 0.716942 |
38f6db2e8bffff170d01bc33b07730811cddc1d3 | 128 | module Approvals
module DSL
def verify(object, options = {})
Approval.new(object, options).verify
end
end
end
| 16 | 42 | 0.664063 |
28eb0d064349c0bafd6121004b70c63e8f270072 | 825 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Bigquery
module DataTransfer
module V1
VERSION = "0.2.1"
end
end
end
end
end
| 26.612903 | 74 | 0.72 |
e9eda2feee75e9e109f492e161edfc3d3f1bc1fc | 2,427 | require "test_helper"
class LinkValidatorTest < ActiveSupport::TestCase
class Dummy
include Mongoid::Document
field "body", type: String
field "assignee", type: String
GOVSPEAK_FIELDS = [:body].freeze
validates_with LinkValidator
end
context "links" do
should "not be verified for blank govspeak fields" do
doc = Dummy.new(body: nil)
assert_nothing_raised do
doc.valid?
end
assert_empty doc.errors
end
should "not contain empty array for errors on fields" do
doc = Dummy.new(body: "Nothing is invalid")
assert doc.valid?
assert_empty doc.errors[:body]
end
should "start with http[s]://, mailto: or /" do
doc = Dummy.new(body: "abc [external](external.com)")
assert doc.invalid?
assert_includes doc.errors.keys, :body
doc = Dummy.new(body: "abc [external](http://external.com)")
assert doc.valid?
doc = Dummy.new(body: "abc [internal](/internal)")
assert doc.valid?
end
should "not contain hover text" do
doc = Dummy.new(body: 'abc [foobar](http://foobar.com "hover")')
assert doc.invalid?
assert_includes doc.errors.keys, :body
end
should "validate smart quotes as normal quotes" do
doc = Dummy.new(body: "abc [foobar](http://foobar.com “hover”)")
assert doc.invalid?
assert_includes doc.errors.keys, :body
end
should "not set rel=external" do
doc = Dummy.new(body: 'abc [foobar](http://foobar.com){:rel="external"}')
assert doc.invalid?
assert_includes doc.errors.keys, :body
end
should "show multiple errors" do
doc = Dummy.new(body: 'abc [foobar](foobar.com "bar"){:rel="external"}')
assert doc.invalid?
assert_equal 3, doc.errors[:body].first.length
end
should "only show each error once" do
doc = Dummy.new(body: "abc [link1](foobar.com), ghi [link2](bazquux.com)")
assert doc.invalid?
assert_equal 1, doc.errors[:body].first.length
end
should "be validated when any attribute of the document changes" do
# already published document having link validation errors
doc = Dummy.new(body: "abc [link1](foobar.com), ghi [link2](bazquux.com)")
doc.save(validate: false)
doc.assignee = "4fdef0000000000000000001"
assert doc.invalid?
assert_equal 1, doc.errors[:body].first.length
end
end
end
| 28.552941 | 80 | 0.649361 |
62d4579df3f8e2778911383b76a3bddefbb3a6fb | 1,141 | class Libtirpc < Formula
desc "Port of Sun's Transport-Independent RPC library to Linux"
homepage "https://sourceforge.net/projects/libtirpc/"
url "https://downloads.sourceforge.net/project/libtirpc/libtirpc/1.3.1/libtirpc-1.3.1.tar.bz2"
sha256 "245895caf066bec5e3d4375942c8cb4366adad184c29c618d97f724ea309ee17"
license "BSD-3-Clause"
bottle do
cellar :any_skip_relocation
sha256 "3484de52349101c590fc661fd45209aa0f403f35303ca795078d3c3056134ac8" => :x86_64_linux
end
depends_on "krb5"
depends_on :linux
def install
system "./configure",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <rpc/des_crypt.h>
#include <stdio.h>
int main () {
char key[] = "My8digitkey1234";
if (sizeof(key) != 16)
return 1;
des_setparity(key);
printf("%lu\\n", sizeof(key));
return 0;
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-I#{include}/tirpc", "-ltirpc", "-o", "test"
system "./test"
end
end
| 27.829268 | 96 | 0.644172 |
21d0b7801d929b81a28f813c7dcdf0d7593643f5 | 147 | # frozen_string_literal: true
module DropboxApi::Metadata
class MetadataV2 < Base
field :metadata, DropboxApi::Metadata::Resource
end
end
| 18.375 | 51 | 0.768707 |
8767042950fd217cfe3986a966136475dc537868 | 13,153 | # frozen-string-literal: true
#
# The pg_hstore_ops extension adds support to Sequel's DSL to make
# it easier to call PostgreSQL hstore functions and operators.
#
# To load the extension:
#
# Sequel.extension :pg_hstore_ops
#
# The most common usage is taking an object that represents an SQL
# expression (such as a :symbol), and calling Sequel.hstore_op with it:
#
# h = Sequel.hstore_op(:hstore_column)
#
# If you have also loaded the pg_hstore extension, you can use
# Sequel.hstore as well:
#
# h = Sequel.hstore(:hstore_column)
#
# Also, on most Sequel expression objects, you can call the hstore
# method:
#
# h = Sequel[:hstore_column].hstore
#
# If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc],
# or you have loaded the core_refinements extension
# and have activated refinements for the file, you can also use Symbol#hstore:
#
# h = :hstore_column.hstore
#
# This creates a Sequel::Postgres::HStoreOp object that can be used
# for easier querying:
#
# h - 'a' # hstore_column - CAST('a' AS text)
# h['a'] # hstore_column -> 'a'
#
# h.concat(:other_hstore_column) # ||
# h.has_key?('a') # ?
# h.contain_all(:array_column) # ?&
# h.contain_any(:array_column) # ?|
# h.contains(:other_hstore_column) # @>
# h.contained_by(:other_hstore_column) # <@
#
# h.defined # defined(hstore_column)
# h.delete('a') # delete(hstore_column, 'a')
# h.each # each(hstore_column)
# h.keys # akeys(hstore_column)
# h.populate(:a) # populate_record(a, hstore_column)
# h.record_set(:a) # (a #= hstore_column)
# h.skeys # skeys(hstore_column)
# h.slice(:a) # slice(hstore_column, a)
# h.svals # svals(hstore_column)
# h.to_array # hstore_to_array(hstore_column)
# h.to_matrix # hstore_to_matrix(hstore_column)
# h.values # avals(hstore_column)
#
# Here are a couple examples for updating an existing hstore column:
#
# # Add a key, or update an existing key with a new value
# DB[:tab].update(h: Sequel.hstore_op(:h).concat('c'=>3))
#
# # Delete a key
# DB[:tab].update(h: Sequel.hstore_op(:h).delete('k1'))
#
# On PostgreSQL 14+, The hstore <tt>[]</tt> method will use subscripts instead of being
# the same as +get+, if the value being wrapped is an identifer:
#
# Sequel.hstore_op(:hstore_column)['a'] # hstore_column['a']
# Sequel.hstore_op(Sequel[:h][:s])['a'] # h.s['a']
#
# This support allows you to use hstore subscripts in UPDATE statements to update only
# part of a column:
#
# h = Sequel.hstore_op(:h)
# DB[:t].update(h['key1'] => 'val1', h['key2'] => 'val2')
# # UPDATE "t" SET "h"['key1'] = 'val1', "h"['key2'] = 'val2'
#
# See the PostgreSQL hstore function and operator documentation for more
# details on what these functions and operators do.
#
# If you are also using the pg_hstore extension, you should load it before
# loading this extension. Doing so will allow you to use HStore#op to get
# an HStoreOp, allowing you to perform hstore operations on hstore literals.
#
# Some of these methods will accept ruby arrays and convert them automatically to
# PostgreSQL arrays if you have the pg_array extension loaded. Some of these methods
# will accept ruby hashes and convert them automatically to PostgreSQL hstores if the
# pg_hstore extension is loaded. Methods representing expressions that return
# PostgreSQL arrays will have the returned expression automatically wrapped in a
# Postgres::ArrayOp if the pg_array_ops extension is loaded.
#
# Related module: Sequel::Postgres::HStoreOp
#
module Sequel
module Postgres
# The HStoreOp class is a simple container for a single object that
# defines methods that yield Sequel expression objects representing
# PostgreSQL hstore operators and functions.
#
# In the method documentation examples, assume that:
#
# hstore_op = :hstore.hstore
class HStoreOp < Sequel::SQL::Wrapper
CONCAT = ["(".freeze, " || ".freeze, ")".freeze].freeze
CONTAIN_ALL = ["(".freeze, " ?& ".freeze, ")".freeze].freeze
CONTAIN_ANY = ["(".freeze, " ?| ".freeze, ")".freeze].freeze
CONTAINS = ["(".freeze, " @> ".freeze, ")".freeze].freeze
CONTAINED_BY = ["(".freeze, " <@ ".freeze, ")".freeze].freeze
HAS_KEY = ["(".freeze, " ? ".freeze, ")".freeze].freeze
LOOKUP = ["(".freeze, " -> ".freeze, ")".freeze].freeze
RECORD_SET = ["(".freeze, " #= ".freeze, ")".freeze].freeze
# Delete entries from an hstore using the subtraction operator:
#
# hstore_op - 'a' # (hstore - 'a')
def -(other)
other = if other.is_a?(String) && !other.is_a?(Sequel::LiteralString)
Sequel.cast_string(other)
else
wrap_input_array(wrap_input_hash(other))
end
HStoreOp.new(super)
end
# Lookup the value for the given key in an hstore:
#
# hstore_op['a'] # (hstore -> 'a')
def [](key)
if key.is_a?(Array) || (defined?(Sequel::Postgres::PGArray) && key.is_a?(Sequel::Postgres::PGArray)) || (defined?(Sequel::Postgres::ArrayOp) && key.is_a?(Sequel::Postgres::ArrayOp))
wrap_output_array(Sequel::SQL::PlaceholderLiteralString.new(LOOKUP, [value, wrap_input_array(key)]))
else
v = case @value
when Symbol, SQL::Identifier, SQL::QualifiedIdentifier
HStoreSubscriptOp.new(self, key)
else
Sequel::SQL::PlaceholderLiteralString.new(LOOKUP, [value, key])
end
Sequel::SQL::StringExpression.new(:NOOP, v)
end
end
# Check if the receiver contains all of the keys in the given array:
#
# hstore_op.contain_all(:a) # (hstore ?& a)
def contain_all(other)
bool_op(CONTAIN_ALL, wrap_input_array(other))
end
# Check if the receiver contains any of the keys in the given array:
#
# hstore_op.contain_any(:a) # (hstore ?| a)
def contain_any(other)
bool_op(CONTAIN_ANY, wrap_input_array(other))
end
# Check if the receiver contains all entries in the other hstore:
#
# hstore_op.contains(:h) # (hstore @> h)
def contains(other)
bool_op(CONTAINS, wrap_input_hash(other))
end
# Check if the other hstore contains all entries in the receiver:
#
# hstore_op.contained_by(:h) # (hstore <@ h)
def contained_by(other)
bool_op(CONTAINED_BY, wrap_input_hash(other))
end
# Check if the receiver contains a non-NULL value for the given key:
#
# hstore_op.defined('a') # defined(hstore, 'a')
def defined(key)
Sequel::SQL::BooleanExpression.new(:NOOP, function(:defined, key))
end
# Delete the matching entries from the receiver:
#
# hstore_op.delete('a') # delete(hstore, 'a')
def delete(key)
HStoreOp.new(function(:delete, wrap_input_array(wrap_input_hash(key))))
end
# Transform the receiver into a set of keys and values:
#
# hstore_op.each # each(hstore)
def each
function(:each)
end
# Check if the receiver contains the given key:
#
# hstore_op.has_key?('a') # (hstore ? 'a')
def has_key?(key)
bool_op(HAS_KEY, key)
end
alias include? has_key?
alias key? has_key?
alias member? has_key?
alias exist? has_key?
# Return the receiver.
def hstore
self
end
# Return the keys as a PostgreSQL array:
#
# hstore_op.keys # akeys(hstore)
def keys
wrap_output_array(function(:akeys))
end
alias akeys keys
# Merge a given hstore into the receiver:
#
# hstore_op.merge(:a) # (hstore || a)
def merge(other)
HStoreOp.new(Sequel::SQL::PlaceholderLiteralString.new(CONCAT, [self, wrap_input_hash(other)]))
end
alias concat merge
# Create a new record populated with entries from the receiver:
#
# hstore_op.populate(:a) # populate_record(a, hstore)
def populate(record)
SQL::Function.new(:populate_record, record, self)
end
# Update the values in a record using entries in the receiver:
#
# hstore_op.record_set(:a) # (a #= hstore)
def record_set(record)
Sequel::SQL::PlaceholderLiteralString.new(RECORD_SET, [record, value])
end
# Return the keys as a PostgreSQL set:
#
# hstore_op.skeys # skeys(hstore)
def skeys
function(:skeys)
end
# Return an hstore with only the keys in the given array:
#
# hstore_op.slice(:a) # slice(hstore, a)
def slice(keys)
HStoreOp.new(function(:slice, wrap_input_array(keys)))
end
# Return the values as a PostgreSQL set:
#
# hstore_op.svals # svals(hstore)
def svals
function(:svals)
end
# Return a flattened array of the receiver with alternating
# keys and values:
#
# hstore_op.to_array # hstore_to_array(hstore)
def to_array
wrap_output_array(function(:hstore_to_array))
end
# Return a nested array of the receiver, with arrays of
# 2 element (key/value) arrays:
#
# hstore_op.to_matrix # hstore_to_matrix(hstore)
def to_matrix
wrap_output_array(function(:hstore_to_matrix))
end
# Return the values as a PostgreSQL array:
#
# hstore_op.values # avals(hstore)
def values
wrap_output_array(function(:avals))
end
alias avals values
private
# Return a placeholder literal with the given str and args, wrapped
# in a boolean expression, used by operators that return booleans.
def bool_op(str, other)
Sequel::SQL::BooleanExpression.new(:NOOP, Sequel::SQL::PlaceholderLiteralString.new(str, [value, other]))
end
# Return a function with the given name, and the receiver as the first
# argument, with any additional arguments given.
def function(name, *args)
SQL::Function.new(name, self, *args)
end
# Wrap argument in a PGArray if it is an array
def wrap_input_array(obj)
if obj.is_a?(Array) && Sequel.respond_to?(:pg_array)
Sequel.pg_array(obj)
else
obj
end
end
# Wrap argument in an Hstore if it is a hash
def wrap_input_hash(obj)
if obj.is_a?(Hash) && Sequel.respond_to?(:hstore)
Sequel.hstore(obj)
else
obj
end
end
# Wrap argument in a PGArrayOp if supported
def wrap_output_array(obj)
if Sequel.respond_to?(:pg_array_op)
Sequel.pg_array_op(obj)
else
obj
end
end
end
# Represents hstore subscripts. This is abstracted because the
# subscript support depends on the database version.
class HStoreSubscriptOp < SQL::Expression
SUBSCRIPT = ["".freeze, "[".freeze, "]".freeze].freeze
# The expression being subscripted
attr_reader :expression
# The subscript to use
attr_reader :sub
# Set the expression and subscript to the given arguments
def initialize(expression, sub)
@expression = expression
@sub = sub
freeze
end
# Use subscripts instead of -> operator on PostgreSQL 14+
def to_s_append(ds, sql)
server_version = ds.db.server_version
frag = server_version && server_version >= 140000 ? SUBSCRIPT : HStoreOp::LOOKUP
ds.literal_append(sql, Sequel::SQL::PlaceholderLiteralString.new(frag, [@expression, @sub]))
end
# Support transforming of hstore subscripts
def sequel_ast_transform(transformer)
self.class.new(transformer.call(@expression), transformer.call(@sub))
end
end
module HStoreOpMethods
# Wrap the receiver in an HStoreOp so you can easily use the PostgreSQL
# hstore functions and operators with it.
def hstore
HStoreOp.new(self)
end
end
# :nocov:
if defined?(HStore)
# :nocov:
class HStore
# Wrap the receiver in an HStoreOp so you can easily use the PostgreSQL
# hstore functions and operators with it.
def op
HStoreOp.new(self)
end
end
end
end
module SQL::Builders
# Return the object wrapped in an Postgres::HStoreOp.
def hstore_op(v)
case v
when Postgres::HStoreOp
v
else
Postgres::HStoreOp.new(v)
end
end
end
class SQL::GenericExpression
include Sequel::Postgres::HStoreOpMethods
end
class LiteralString
include Sequel::Postgres::HStoreOpMethods
end
end
# :nocov:
if Sequel.core_extensions?
class Symbol
include Sequel::Postgres::HStoreOpMethods
end
end
if defined?(Sequel::CoreRefinements)
module Sequel::CoreRefinements
refine Symbol do
include Sequel::Postgres::HStoreOpMethods
end
end
end
# :nocov:
| 31.770531 | 189 | 0.632479 |
91b4e023c067a06a2838498101b0a5fc472a5d30 | 3,337 | require File.join(File.dirname(File.expand_path(__FILE__)), "spec_helper")
context "A paginated dataset" do
before do
@d = Sequel::Dataset.new(nil)
@d.meta_def(:count) {153}
@paginated = @d.paginate(1, 20)
end
specify "should raise an error if the dataset already has a limit" do
proc{@d.limit(10).paginate(1,10)}.should raise_error(Sequel::Error)
proc{@paginated.paginate(2,20)}.should raise_error(Sequel::Error)
end
specify "should set the limit and offset options correctly" do
@paginated.opts[:limit].should == 20
@paginated.opts[:offset].should == 0
end
specify "should set the page count correctly" do
@paginated.page_count.should == 8
@d.paginate(1, 50).page_count.should == 4
end
specify "should set the current page number correctly" do
@paginated.current_page.should == 1
@d.paginate(3, 50).current_page.should == 3
end
specify "should return the next page number or nil if we're on the last" do
@paginated.next_page.should == 2
@d.paginate(4, 50).next_page.should be_nil
end
specify "should return the previous page number or nil if we're on the first" do
@paginated.prev_page.should be_nil
@d.paginate(4, 50).prev_page.should == 3
end
specify "should return the page range" do
@paginated.page_range.should == (1..8)
@d.paginate(4, 50).page_range.should == (1..4)
end
specify "should return the record range for the current page" do
@paginated.current_page_record_range.should == (1..20)
@d.paginate(4, 50).current_page_record_range.should == (151..153)
@d.paginate(5, 50).current_page_record_range.should == (0..0)
end
specify "should return the record count for the current page" do
@paginated.current_page_record_count.should == 20
@d.paginate(3, 50).current_page_record_count.should == 50
@d.paginate(4, 50).current_page_record_count.should == 3
@d.paginate(5, 50).current_page_record_count.should == 0
end
specify "should know if current page is last page" do
@paginated.last_page?.should be_false
@d.paginate(2, 20).last_page?.should be_false
@d.paginate(5, 30).last_page?.should be_false
@d.paginate(6, 30).last_page?.should be_true
end
specify "should know if current page is first page" do
@paginated.first_page?.should be_true
@d.paginate(1, 20).first_page?.should be_true
@d.paginate(2, 20).first_page?.should be_false
end
specify "should work with fixed sql" do
ds = @d.clone(:sql => 'select * from blah')
ds.meta_def(:count) {150}
ds.paginate(2, 50).sql.should == 'SELECT * FROM (select * from blah) AS t1 LIMIT 50 OFFSET 50'
end
end
context "Dataset#each_page" do
before do
@d = Sequel::Dataset.new(nil).from(:items)
@d.meta_def(:count) {153}
end
specify "should raise an error if the dataset already has a limit" do
proc{@d.limit(10).each_page(10){}}.should raise_error(Sequel::Error)
end
specify "should iterate over each page in the resultset as a paginated dataset" do
a = []
@d.each_page(50) {|p| a << p}
a.map {|p| p.sql}.should == [
'SELECT * FROM items LIMIT 50 OFFSET 0',
'SELECT * FROM items LIMIT 50 OFFSET 50',
'SELECT * FROM items LIMIT 50 OFFSET 100',
'SELECT * FROM items LIMIT 50 OFFSET 150',
]
end
end
| 33.37 | 98 | 0.682349 |
f7440a5865c4eec0ad77a97f2f025703d8bf3f6a | 39,945 | module Sequel
class Dataset
# ---------------------
# :section: 2 - Methods that execute code on the database
# These methods all execute the dataset's SQL on the database.
# They don't return modified datasets, so if used in a method chain
# they should be the last method called.
# ---------------------
# Action methods defined by Sequel that execute code on the database.
ACTION_METHODS = (<<-METHS).split.map{|x| x.to_sym}
<< [] all avg count columns columns! delete each
empty? fetch_rows first first! get import insert interval last
map max min multi_insert paged_each range select_hash select_hash_groups select_map select_order_map
single_record single_value sum to_hash to_hash_groups truncate update
METHS
# Inserts the given argument into the database. Returns self so it
# can be used safely when chaining:
#
# DB[:items] << {:id=>0, :name=>'Zero'} << DB[:old_items].select(:id, name)
def <<(arg)
insert(arg)
self
end
# Returns the first record matching the conditions. Examples:
#
# DB[:table][:id=>1] # SELECT * FROM table WHERE (id = 1) LIMIT 1
# # => {:id=1}
def [](*conditions)
raise(Error, ARRAY_ACCESS_ERROR_MSG) if (conditions.length == 1 and conditions.first.is_a?(Integer)) or conditions.length == 0
first(*conditions)
end
# Returns an array with all records in the dataset. If a block is given,
# the array is iterated over after all items have been loaded.
#
# DB[:table].all # SELECT * FROM table
# # => [{:id=>1, ...}, {:id=>2, ...}, ...]
#
# # Iterate over all rows in the table
# DB[:table].all{|row| p row}
def all(&block)
_all(block){|a| each{|r| a << r}}
end
# Returns the average value for the given column/expression.
# Uses a virtual row block if no argument is given.
#
# DB[:table].avg(:number) # SELECT avg(number) FROM table LIMIT 1
# # => 3
# DB[:table].avg{function(column)} # SELECT avg(function(column)) FROM table LIMIT 1
# # => 1
def avg(column=Sequel.virtual_row(&Proc.new))
aggregate_dataset.get{avg(column).as(:avg)}
end
# Returns the columns in the result set in order as an array of symbols.
# If the columns are currently cached, returns the cached value. Otherwise,
# a SELECT query is performed to retrieve a single row in order to get the columns.
#
# If you are looking for all columns for a single table and maybe some information about
# each column (e.g. database type), see <tt>Database#schema</tt>.
#
# DB[:table].columns
# # => [:id, :name]
def columns
return @columns if @columns
ds = unfiltered.unordered.naked.clone(:distinct => nil, :limit => 1, :offset=>nil)
ds.each{break}
@columns = ds.instance_variable_get(:@columns)
@columns || []
end
# Ignore any cached column information and perform a query to retrieve
# a row in order to get the columns.
#
# DB[:table].columns!
# # => [:id, :name]
def columns!
@columns = nil
columns
end
# Returns the number of records in the dataset. If an argument is provided,
# it is used as the argument to count. If a block is provided, it is
# treated as a virtual row, and the result is used as the argument to
# count.
#
# DB[:table].count # SELECT count(*) AS count FROM table LIMIT 1
# # => 3
# DB[:table].count(:column) # SELECT count(column) AS count FROM table LIMIT 1
# # => 2
# DB[:table].count{foo(column)} # SELECT count(foo(column)) AS count FROM table LIMIT 1
# # => 1
def count(arg=(no_arg=true), &block)
if no_arg
if block
arg = Sequel.virtual_row(&block)
aggregate_dataset.get{count(arg).as(:count)}
else
aggregate_dataset.get{count{}.*.as(:count)}.to_i
end
elsif block
raise Error, 'cannot provide both argument and block to Dataset#count'
else
aggregate_dataset.get{count(arg).as(:count)}
end
end
# Deletes the records in the dataset. The returned value should be
# number of records deleted, but that is adapter dependent.
#
# DB[:table].delete # DELETE * FROM table
# # => 3
def delete(&block)
sql = delete_sql
if uses_returning?(:delete)
returning_fetch_rows(sql, &block)
else
execute_dui(sql)
end
end
# Iterates over the records in the dataset as they are yielded from the
# database adapter, and returns self.
#
# DB[:table].each{|row| p row} # SELECT * FROM table
#
# Note that this method is not safe to use on many adapters if you are
# running additional queries inside the provided block. If you are
# running queries inside the block, you should use +all+ instead of +each+
# for the outer queries, or use a separate thread or shard inside +each+.
def each
if row_proc = @row_proc
fetch_rows(select_sql){|r| yield row_proc.call(r)}
else
fetch_rows(select_sql){|r| yield r}
end
self
end
# Returns true if no records exist in the dataset, false otherwise
#
# DB[:table].empty? # SELECT 1 AS one FROM table LIMIT 1
# # => false
def empty?
ds = @opts[:order] ? unordered : self
ds.get(Sequel::SQL::AliasedExpression.new(1, :one)).nil?
end
# If a integer argument is given, it is interpreted as a limit, and then returns all
# matching records up to that limit. If no argument is passed,
# it returns the first matching record. If any other type of
# argument(s) is passed, it is given to filter and the
# first matching record is returned. If a block is given, it is used
# to filter the dataset before returning anything.
#
# If there are no records in the dataset, returns nil (or an empty
# array if an integer argument is given).
#
# Examples:
#
# DB[:table].first # SELECT * FROM table LIMIT 1
# # => {:id=>7}
#
# DB[:table].first(2) # SELECT * FROM table LIMIT 2
# # => [{:id=>6}, {:id=>4}]
#
# DB[:table].first(:id=>2) # SELECT * FROM table WHERE (id = 2) LIMIT 1
# # => {:id=>2}
#
# DB[:table].first("id = 3") # SELECT * FROM table WHERE (id = 3) LIMIT 1
# # => {:id=>3}
#
# DB[:table].first("id = ?", 4) # SELECT * FROM table WHERE (id = 4) LIMIT 1
# # => {:id=>4}
#
# DB[:table].first{id > 2} # SELECT * FROM table WHERE (id > 2) LIMIT 1
# # => {:id=>5}
#
# DB[:table].first("id > ?", 4){id < 6} # SELECT * FROM table WHERE ((id > 4) AND (id < 6)) LIMIT 1
# # => {:id=>5}
#
# DB[:table].first(2){id < 2} # SELECT * FROM table WHERE (id < 2) LIMIT 2
# # => [{:id=>1}]
def first(*args, &block)
ds = block ? filter(&block) : self
if args.empty?
ds.single_record
else
args = (args.size == 1) ? args.first : args
if args.is_a?(Integer)
ds.limit(args).all
else
ds.filter(args).single_record
end
end
end
# Calls first. If first returns nil (signaling that no
# row matches), raise a Sequel::NoMatchingRow exception.
def first!(*args, &block)
first(*args, &block) || raise(Sequel::NoMatchingRow)
end
# Return the column value for the first matching record in the dataset.
# Raises an error if both an argument and block is given.
#
# DB[:table].get(:id) # SELECT id FROM table LIMIT 1
# # => 3
#
# ds.get{sum(id)} # SELECT sum(id) AS v FROM table LIMIT 1
# # => 6
#
# You can pass an array of arguments to return multiple arguments,
# but you must make sure each element in the array has an alias that
# Sequel can determine:
#
# DB[:table].get([:id, :name]) # SELECT id, name FROM table LIMIT 1
# # => [3, 'foo']
#
# DB[:table].get{[sum(id).as(sum), name]} # SELECT sum(id) AS sum, name FROM table LIMIT 1
# # => [6, 'foo']
def get(column=(no_arg=true; nil), &block)
ds = naked
if block
raise(Error, ARG_BLOCK_ERROR_MSG) unless no_arg
ds = ds.select(&block)
column = ds.opts[:select]
column = nil if column.is_a?(Array) && column.length < 2
else
ds = if column.is_a?(Array)
ds.select(*column)
else
ds.select(auto_alias_expression(column))
end
end
if column.is_a?(Array)
if r = ds.single_record
r.values_at(*hash_key_symbols(column))
end
else
ds.single_value
end
end
# Inserts multiple records into the associated table. This method can be
# used to efficiently insert a large number of records into a table in a
# single query if the database supports it. Inserts
# are automatically wrapped in a transaction.
#
# This method is called with a columns array and an array of value arrays:
#
# DB[:table].import([:x, :y], [[1, 2], [3, 4]])
# # INSERT INTO table (x, y) VALUES (1, 2)
# # INSERT INTO table (x, y) VALUES (3, 4)
#
# This method also accepts a dataset instead of an array of value arrays:
#
# DB[:table].import([:x, :y], DB[:table2].select(:a, :b))
# # INSERT INTO table (x, y) SELECT a, b FROM table2
#
# Options:
# :commit_every :: Open a new transaction for every given number of records.
# For example, if you provide a value of 50, will commit
# after every 50 records.
# :return :: When the :value is :primary_key, returns an array of
# autoincremented primary key values for the rows inserted.
# :server :: Set the server/shard to use for the transaction and insert
# queries.
# :slice :: Same as :commit_every, :commit_every takes precedence.
def import(columns, values, opts=OPTS)
return @db.transaction{insert(columns, values)} if values.is_a?(Dataset)
return if values.empty?
raise(Error, IMPORT_ERROR_MSG) if columns.empty?
ds = opts[:server] ? server(opts[:server]) : self
if slice_size = opts.fetch(:commit_every, opts.fetch(:slice, default_import_slice))
offset = 0
rows = []
while offset < values.length
rows << ds._import(columns, values[offset, slice_size], opts)
offset += slice_size
end
rows.flatten
else
ds._import(columns, values, opts)
end
end
# Inserts values into the associated table. The returned value is generally
# the value of the primary key for the inserted row, but that is adapter dependent.
#
# +insert+ handles a number of different argument formats:
# no arguments or single empty hash :: Uses DEFAULT VALUES
# single hash :: Most common format, treats keys as columns an values as values
# single array :: Treats entries as values, with no columns
# two arrays :: Treats first array as columns, second array as values
# single Dataset :: Treats as an insert based on a selection from the dataset given,
# with no columns
# array and dataset :: Treats as an insert based on a selection from the dataset
# given, with the columns given by the array.
#
# Examples:
#
# DB[:items].insert
# # INSERT INTO items DEFAULT VALUES
#
# DB[:items].insert({})
# # INSERT INTO items DEFAULT VALUES
#
# DB[:items].insert([1,2,3])
# # INSERT INTO items VALUES (1, 2, 3)
#
# DB[:items].insert([:a, :b], [1,2])
# # INSERT INTO items (a, b) VALUES (1, 2)
#
# DB[:items].insert(:a => 1, :b => 2)
# # INSERT INTO items (a, b) VALUES (1, 2)
#
# DB[:items].insert(DB[:old_items])
# # INSERT INTO items SELECT * FROM old_items
#
# DB[:items].insert([:a, :b], DB[:old_items])
# # INSERT INTO items (a, b) SELECT * FROM old_items
def insert(*values, &block)
sql = insert_sql(*values)
if uses_returning?(:insert)
returning_fetch_rows(sql, &block)
else
execute_insert(sql)
end
end
# Returns the interval between minimum and maximum values for the given
# column/expression. Uses a virtual row block if no argument is given.
#
# DB[:table].interval(:id) # SELECT (max(id) - min(id)) FROM table LIMIT 1
# # => 6
# DB[:table].interval{function(column)} # SELECT (max(function(column)) - min(function(column))) FROM table LIMIT 1
# # => 7
def interval(column=Sequel.virtual_row(&Proc.new))
aggregate_dataset.get{(max(column) - min(column)).as(:interval)}
end
# Reverses the order and then runs #first with the given arguments and block. Note that this
# will not necessarily give you the last record in the dataset,
# unless you have an unambiguous order. If there is not
# currently an order for this dataset, raises an +Error+.
#
# DB[:table].order(:id).last # SELECT * FROM table ORDER BY id DESC LIMIT 1
# # => {:id=>10}
#
# DB[:table].order(Sequel.desc(:id)).last(2) # SELECT * FROM table ORDER BY id ASC LIMIT 2
# # => [{:id=>1}, {:id=>2}]
def last(*args, &block)
raise(Error, 'No order specified') unless @opts[:order]
reverse.first(*args, &block)
end
# Maps column values for each record in the dataset (if a column name is
# given), or performs the stock mapping functionality of +Enumerable+ otherwise.
# Raises an +Error+ if both an argument and block are given.
#
# DB[:table].map(:id) # SELECT * FROM table
# # => [1, 2, 3, ...]
#
# DB[:table].map{|r| r[:id] * 2} # SELECT * FROM table
# # => [2, 4, 6, ...]
#
# You can also provide an array of column names:
#
# DB[:table].map([:id, :name]) # SELECT * FROM table
# # => [[1, 'A'], [2, 'B'], [3, 'C'], ...]
def map(column=nil, &block)
if column
raise(Error, ARG_BLOCK_ERROR_MSG) if block
return naked.map(column) if row_proc
if column.is_a?(Array)
super(){|r| r.values_at(*column)}
else
super(){|r| r[column]}
end
else
super(&block)
end
end
# Returns the maximum value for the given column/expression.
# Uses a virtual row block if no argument is given.
#
# DB[:table].max(:id) # SELECT max(id) FROM table LIMIT 1
# # => 10
# DB[:table].max{function(column)} # SELECT max(function(column)) FROM table LIMIT 1
# # => 7
def max(column=Sequel.virtual_row(&Proc.new))
aggregate_dataset.get{max(column).as(:max)}
end
# Returns the minimum value for the given column/expression.
# Uses a virtual row block if no argument is given.
#
# DB[:table].min(:id) # SELECT min(id) FROM table LIMIT 1
# # => 1
# DB[:table].min{function(column)} # SELECT min(function(column)) FROM table LIMIT 1
# # => 0
def min(column=Sequel.virtual_row(&Proc.new))
aggregate_dataset.get{min(column).as(:min)}
end
# This is a front end for import that allows you to submit an array of
# hashes instead of arrays of columns and values:
#
# DB[:table].multi_insert([{:x => 1}, {:x => 2}])
# # INSERT INTO table (x) VALUES (1)
# # INSERT INTO table (x) VALUES (2)
#
# Be aware that all hashes should have the same keys if you use this calling method,
# otherwise some columns could be missed or set to null instead of to default
# values.
#
# This respects the same options as #import.
def multi_insert(hashes, opts=OPTS)
return if hashes.empty?
columns = hashes.first.keys
import(columns, hashes.map{|h| columns.map{|c| h[c]}}, opts)
end
# Yields each row in the dataset, but interally uses multiple queries as needed to
# process the entire result set without keeping all rows in the dataset in memory,
# even if the underlying driver buffers all query results in memory.
#
# Because this uses multiple queries internally, in order to remain consistent,
# it also uses a transaction internally. Additionally, to work correctly, the dataset
# must have unambiguous order. Using an ambiguous order can result in an infinite loop,
# as well as subtler bugs such as yielding duplicate rows or rows being skipped.
#
# Sequel checks that the datasets using this method have an order, but it cannot
# ensure that the order is unambiguous.
#
# Options:
# :rows_per_fetch :: The number of rows to fetch per query. Defaults to 1000.
# :strategy :: The strategy to use for paging of results. By default this is :offset,
# for using an approach with a limit and offset for every page. This can
# be set to :filter, which uses a limit and a filter that excludes
# rows from previous pages. In order for this strategy to work, you must be
# selecting the columns you are ordering by, and none of the columns can contain
# NULLs. Note that some Sequel adapters have optimized implementations that will
# use cursors or streaming regardless of the :strategy option used.
# :filter_values :: If the :strategy=>:filter option is used, this option should be a proc
# that accepts the last retreived row for the previous page and an array of
# ORDER BY expressions, and returns an array of values relating to those
# expressions for the last retrieved row. You will need to use this option
# if your ORDER BY expressions are not simple columns, if they contain
# qualified identifiers that would be ambiguous unqualified, if they contain
# any identifiers that are aliased in SELECT, and potentially other cases.
#
# Examples:
#
# DB[:table].order(:id).paged_each{|row| }
# # SELECT * FROM table ORDER BY id LIMIT 1000
# # SELECT * FROM table ORDER BY id LIMIT 1000 OFFSET 1000
# # ...
#
# DB[:table].order(:id).paged_each(:rows_per_fetch=>100){|row| }
# # SELECT * FROM table ORDER BY id LIMIT 100
# # SELECT * FROM table ORDER BY id LIMIT 100 OFFSET 100
# # ...
#
# DB[:table].order(:id).paged_each(:strategy=>:filter){|row| }
# # SELECT * FROM table ORDER BY id LIMIT 1000
# # SELECT * FROM table WHERE id > 1001 ORDER BY id LIMIT 1000
# # ...
#
# DB[:table].order(:table__id).paged_each(:strategy=>:filter,
# :filter_values=>proc{|row, exprs| [row[:id]]}){|row| }
# # SELECT * FROM table ORDER BY id LIMIT 1000
# # SELECT * FROM table WHERE id > 1001 ORDER BY id LIMIT 1000
# # ...
def paged_each(opts=OPTS)
unless @opts[:order]
raise Sequel::Error, "Dataset#paged_each requires the dataset be ordered"
end
unless block_given?
return enum_for(:paged_each, opts)
end
total_limit = @opts[:limit]
offset = @opts[:offset]
if server = @opts[:server]
opts = opts.merge(:server=>server)
end
rows_per_fetch = opts[:rows_per_fetch] || 1000
strategy = if offset || total_limit
:offset
else
opts[:strategy] || :offset
end
db.transaction(opts) do
case strategy
when :filter
filter_values = opts[:filter_values] || proc{|row, exprs| exprs.map{|e| row[hash_key_symbol(e)]}}
base_ds = ds = limit(rows_per_fetch)
while ds
last_row = nil
ds.each do |row|
last_row = row
yield row
end
ds = (base_ds.where(ignore_values_preceding(last_row, &filter_values)) if last_row)
end
else
offset ||= 0
num_rows_yielded = rows_per_fetch
total_rows = 0
while num_rows_yielded == rows_per_fetch && (total_limit.nil? || total_rows < total_limit)
if total_limit && total_rows + rows_per_fetch > total_limit
rows_per_fetch = total_limit - total_rows
end
num_rows_yielded = 0
limit(rows_per_fetch, offset).each do |row|
num_rows_yielded += 1
total_rows += 1 if total_limit
yield row
end
offset += rows_per_fetch
end
end
end
self
end
# Returns a +Range+ instance made from the minimum and maximum values for the
# given column/expression. Uses a virtual row block if no argument is given.
#
# DB[:table].range(:id) # SELECT max(id) AS v1, min(id) AS v2 FROM table LIMIT 1
# # => 1..10
# DB[:table].interval{function(column)} # SELECT max(function(column)) AS v1, min(function(column)) AS v2 FROM table LIMIT 1
# # => 0..7
def range(column=Sequel.virtual_row(&Proc.new))
if r = aggregate_dataset.select{[min(column).as(v1), max(column).as(v2)]}.first
(r[:v1]..r[:v2])
end
end
# Returns a hash with key_column values as keys and value_column values as
# values. Similar to to_hash, but only selects the columns given.
#
# DB[:table].select_hash(:id, :name) # SELECT id, name FROM table
# # => {1=>'a', 2=>'b', ...}
#
# You can also provide an array of column names for either the key_column,
# the value column, or both:
#
# DB[:table].select_hash([:id, :foo], [:name, :bar]) # SELECT * FROM table
# # {[1, 3]=>['a', 'c'], [2, 4]=>['b', 'd'], ...}
#
# When using this method, you must be sure that each expression has an alias
# that Sequel can determine. Usually you can do this by calling the #as method
# on the expression and providing an alias.
def select_hash(key_column, value_column)
_select_hash(:to_hash, key_column, value_column)
end
# Returns a hash with key_column values as keys and an array of value_column values.
# Similar to to_hash_groups, but only selects the columns given.
#
# DB[:table].select_hash_groups(:name, :id) # SELECT id, name FROM table
# # => {'a'=>[1, 4, ...], 'b'=>[2, ...], ...}
#
# You can also provide an array of column names for either the key_column,
# the value column, or both:
#
# DB[:table].select_hash_groups([:first, :middle], [:last, :id]) # SELECT * FROM table
# # {['a', 'b']=>[['c', 1], ['d', 2], ...], ...}
#
# When using this method, you must be sure that each expression has an alias
# that Sequel can determine. Usually you can do this by calling the #as method
# on the expression and providing an alias.
def select_hash_groups(key_column, value_column)
_select_hash(:to_hash_groups, key_column, value_column)
end
# Selects the column given (either as an argument or as a block), and
# returns an array of all values of that column in the dataset. If you
# give a block argument that returns an array with multiple entries,
# the contents of the resulting array are undefined. Raises an Error
# if called with both an argument and a block.
#
# DB[:table].select_map(:id) # SELECT id FROM table
# # => [3, 5, 8, 1, ...]
#
# DB[:table].select_map{id * 2} # SELECT (id * 2) FROM table
# # => [6, 10, 16, 2, ...]
#
# You can also provide an array of column names:
#
# DB[:table].select_map([:id, :name]) # SELECT id, name FROM table
# # => [[1, 'A'], [2, 'B'], [3, 'C'], ...]
#
# If you provide an array of expressions, you must be sure that each entry
# in the array has an alias that Sequel can determine. Usually you can do this
# by calling the #as method on the expression and providing an alias.
def select_map(column=nil, &block)
_select_map(column, false, &block)
end
# The same as select_map, but in addition orders the array by the column.
#
# DB[:table].select_order_map(:id) # SELECT id FROM table ORDER BY id
# # => [1, 2, 3, 4, ...]
#
# DB[:table].select_order_map{id * 2} # SELECT (id * 2) FROM table ORDER BY (id * 2)
# # => [2, 4, 6, 8, ...]
#
# You can also provide an array of column names:
#
# DB[:table].select_order_map([:id, :name]) # SELECT id, name FROM table ORDER BY id, name
# # => [[1, 'A'], [2, 'B'], [3, 'C'], ...]
#
# If you provide an array of expressions, you must be sure that each entry
# in the array has an alias that Sequel can determine. Usually you can do this
# by calling the #as method on the expression and providing an alias.
def select_order_map(column=nil, &block)
_select_map(column, true, &block)
end
# Returns the first record in the dataset, or nil if the dataset
# has no records. Users should probably use +first+ instead of
# this method.
def single_record
clone(:limit=>1).each{|r| return r}
nil
end
# Returns the first value of the first record in the dataset.
# Returns nil if dataset is empty. Users should generally use
# +get+ instead of this method.
def single_value
if r = ungraphed.naked.single_record
r.values.first
end
end
# Returns the sum for the given column/expression.
# Uses a virtual row block if no column is given.
#
# DB[:table].sum(:id) # SELECT sum(id) FROM table LIMIT 1
# # => 55
# DB[:table].sum{function(column)} # SELECT sum(function(column)) FROM table LIMIT 1
# # => 10
def sum(column=Sequel.virtual_row(&Proc.new))
aggregate_dataset.get{sum(column).as(:sum)}
end
# Returns a hash with one column used as key and another used as value.
# If rows have duplicate values for the key column, the latter row(s)
# will overwrite the value of the previous row(s). If the value_column
# is not given or nil, uses the entire hash as the value.
#
# DB[:table].to_hash(:id, :name) # SELECT * FROM table
# # {1=>'Jim', 2=>'Bob', ...}
#
# DB[:table].to_hash(:id) # SELECT * FROM table
# # {1=>{:id=>1, :name=>'Jim'}, 2=>{:id=>2, :name=>'Bob'}, ...}
#
# You can also provide an array of column names for either the key_column,
# the value column, or both:
#
# DB[:table].to_hash([:id, :foo], [:name, :bar]) # SELECT * FROM table
# # {[1, 3]=>['Jim', 'bo'], [2, 4]=>['Bob', 'be'], ...}
#
# DB[:table].to_hash([:id, :name]) # SELECT * FROM table
# # {[1, 'Jim']=>{:id=>1, :name=>'Jim'}, [2, 'Bob'=>{:id=>2, :name=>'Bob'}, ...}
def to_hash(key_column, value_column = nil)
h = {}
if value_column
return naked.to_hash(key_column, value_column) if row_proc
if value_column.is_a?(Array)
if key_column.is_a?(Array)
each{|r| h[r.values_at(*key_column)] = r.values_at(*value_column)}
else
each{|r| h[r[key_column]] = r.values_at(*value_column)}
end
else
if key_column.is_a?(Array)
each{|r| h[r.values_at(*key_column)] = r[value_column]}
else
each{|r| h[r[key_column]] = r[value_column]}
end
end
elsif key_column.is_a?(Array)
each{|r| h[r.values_at(*key_column)] = r}
else
each{|r| h[r[key_column]] = r}
end
h
end
# Returns a hash with one column used as key and the values being an
# array of column values. If the value_column is not given or nil, uses
# the entire hash as the value.
#
# DB[:table].to_hash_groups(:name, :id) # SELECT * FROM table
# # {'Jim'=>[1, 4, 16, ...], 'Bob'=>[2], ...}
#
# DB[:table].to_hash_groups(:name) # SELECT * FROM table
# # {'Jim'=>[{:id=>1, :name=>'Jim'}, {:id=>4, :name=>'Jim'}, ...], 'Bob'=>[{:id=>2, :name=>'Bob'}], ...}
#
# You can also provide an array of column names for either the key_column,
# the value column, or both:
#
# DB[:table].to_hash_groups([:first, :middle], [:last, :id]) # SELECT * FROM table
# # {['Jim', 'Bob']=>[['Smith', 1], ['Jackson', 4], ...], ...}
#
# DB[:table].to_hash_groups([:first, :middle]) # SELECT * FROM table
# # {['Jim', 'Bob']=>[{:id=>1, :first=>'Jim', :middle=>'Bob', :last=>'Smith'}, ...], ...}
def to_hash_groups(key_column, value_column = nil)
h = {}
if value_column
return naked.to_hash_groups(key_column, value_column) if row_proc
if value_column.is_a?(Array)
if key_column.is_a?(Array)
each{|r| (h[r.values_at(*key_column)] ||= []) << r.values_at(*value_column)}
else
each{|r| (h[r[key_column]] ||= []) << r.values_at(*value_column)}
end
else
if key_column.is_a?(Array)
each{|r| (h[r.values_at(*key_column)] ||= []) << r[value_column]}
else
each{|r| (h[r[key_column]] ||= []) << r[value_column]}
end
end
elsif key_column.is_a?(Array)
each{|r| (h[r.values_at(*key_column)] ||= []) << r}
else
each{|r| (h[r[key_column]] ||= []) << r}
end
h
end
# Truncates the dataset. Returns nil.
#
# DB[:table].truncate # TRUNCATE table
# # => nil
def truncate
execute_ddl(truncate_sql)
end
# Updates values for the dataset. The returned value is generally the
# number of rows updated, but that is adapter dependent. +values+ should
# a hash where the keys are columns to set and values are the values to
# which to set the columns.
#
# DB[:table].update(:x=>nil) # UPDATE table SET x = NULL
# # => 10
#
# DB[:table].update(:x=>Sequel.expr(:x)+1, :y=>0) # UPDATE table SET x = (x + 1), y = 0
# # => 10
def update(values=OPTS, &block)
sql = update_sql(values)
if uses_returning?(:update)
returning_fetch_rows(sql, &block)
else
execute_dui(sql)
end
end
# Run the given SQL and return an array of all rows. If a block is given,
# each row is yielded to the block after all rows are loaded. See with_sql_each.
def with_sql_all(sql, &block)
_all(block){|a| with_sql_each(sql){|r| a << r}}
end
# Execute the given SQL and return the number of rows deleted. This exists
# solely as an optimization, replacing with_sql(sql).delete. It's significantly
# faster as it does not require cloning the current dataset.
def with_sql_delete(sql)
execute_dui(sql)
end
alias with_sql_update with_sql_delete
# Run the given SQL and yield each returned row to the block.
#
# This method should not be called on a shared dataset if the columns selected
# in the given SQL do not match the columns in the receiver.
def with_sql_each(sql)
if row_proc = @row_proc
fetch_rows(sql){|r| yield row_proc.call(r)}
else
fetch_rows(sql){|r| yield r}
end
self
end
# Run the given SQL and return the first row, or nil if no rows were returned.
# See with_sql_each.
def with_sql_first(sql)
with_sql_each(sql){|r| return r}
nil
end
# Run the given SQL and return the first value in the first row, or nil if no
# rows were returned. For this to make sense, the SQL given should select
# only a single value. See with_sql_each.
def with_sql_single_value(sql)
if r = with_sql_first(sql)
r.values.first
end
end
# Execute the given SQL and (on most databases) return the primary key of the
# inserted row.
def with_sql_insert(sql)
execute_insert(sql)
end
protected
# Internals of #import. If primary key values are requested, use
# separate insert commands for each row. Otherwise, call #multi_insert_sql
# and execute each statement it gives separately.
def _import(columns, values, opts)
trans_opts = opts.merge(:server=>@opts[:server])
if opts[:return] == :primary_key
@db.transaction(trans_opts){values.map{|v| insert(columns, v)}}
else
stmts = multi_insert_sql(columns, values)
@db.transaction(trans_opts){stmts.each{|st| execute_dui(st)}}
end
end
# Return an array of arrays of values given by the symbols in ret_cols.
def _select_map_multiple(ret_cols)
map{|r| r.values_at(*ret_cols)}
end
# Returns an array of the first value in each row.
def _select_map_single
map{|r| r.values.first}
end
private
# Internals of all and with_sql_all
def _all(block)
a = []
yield a
post_load(a)
a.each(&block) if block
a
end
# Internals of +select_hash+ and +select_hash_groups+
def _select_hash(meth, key_column, value_column)
select(*(key_column.is_a?(Array) ? key_column : [key_column]) + (value_column.is_a?(Array) ? value_column : [value_column])).
send(meth, hash_key_symbols(key_column), hash_key_symbols(value_column))
end
# Internals of +select_map+ and +select_order_map+
def _select_map(column, order, &block)
ds = ungraphed.naked
columns = Array(column)
virtual_row_columns(columns, block)
select_cols = order ? columns.map{|c| c.is_a?(SQL::OrderedExpression) ? c.expression : c} : columns
ds = ds.order(*columns.map{|c| unaliased_identifier(c)}) if order
if column.is_a?(Array) || (columns.length > 1)
ds.select(*select_cols)._select_map_multiple(hash_key_symbols(select_cols))
else
ds.select(auto_alias_expression(select_cols.first))._select_map_single
end
end
# Automatically alias the given expression if it does not have an identifiable alias.
def auto_alias_expression(v)
case v
when LiteralString, Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression
v
else
SQL::AliasedExpression.new(v, :v)
end
end
# The default number of rows that can be inserted in a single INSERT statement via import.
# The default is for no limit.
def default_import_slice
nil
end
# Set the server to use to :default unless it is already set in the passed opts
def default_server_opts(opts)
@db.sharded? ? {:server=>@opts[:server] || :default}.merge(opts) : opts
end
# Execute the given select SQL on the database using execute. Use the
# :read_only server unless a specific server is set.
def execute(sql, opts=OPTS, &block)
db = @db
db.execute(sql, db.sharded? ? {:server=>@opts[:server] || :read_only}.merge(opts) : opts, &block)
end
# Execute the given SQL on the database using execute_ddl.
def execute_ddl(sql, opts=OPTS, &block)
@db.execute_ddl(sql, default_server_opts(opts), &block)
nil
end
# Execute the given SQL on the database using execute_dui.
def execute_dui(sql, opts=OPTS, &block)
@db.execute_dui(sql, default_server_opts(opts), &block)
end
# Execute the given SQL on the database using execute_insert.
def execute_insert(sql, opts=OPTS, &block)
@db.execute_insert(sql, default_server_opts(opts), &block)
end
# Return a plain symbol given a potentially qualified or aliased symbol,
# specifying the symbol that is likely to be used as the hash key
# for the column when records are returned. Return nil if no hash key
# can be determined
def _hash_key_symbol(s, recursing=false)
case s
when Symbol
_, c, a = split_symbol(s)
(a || c).to_sym
when SQL::Identifier, SQL::Wrapper
_hash_key_symbol(s.value, true)
when SQL::QualifiedIdentifier
_hash_key_symbol(s.column, true)
when SQL::AliasedExpression
_hash_key_symbol(s.alias, true)
when String
s.to_sym if recursing
end
end
# Return a plain symbol given a potentially qualified or aliased symbol,
# specifying the symbol that is likely to be used as the hash key
# for the column when records are returned. Raise Error if the hash key
# symbol cannot be returned.
def hash_key_symbol(s)
if v = _hash_key_symbol(s)
v
else
raise(Error, "#{s.inspect} is not supported, should be a Symbol, SQL::Identifier, SQL::QualifiedIdentifier, or SQL::AliasedExpression")
end
end
# If s is an array, return an array with the given hash key symbols.
# Otherwise, return a hash key symbol for the given expression
# If a hash key symbol cannot be determined, raise an error.
def hash_key_symbols(s)
s.is_a?(Array) ? s.map{|c| hash_key_symbol(c)} : hash_key_symbol(s)
end
# Returns an expression that will ignore values preceding the given row, using the
# receiver's current order. This yields the row and the array of order expressions
# to the block, which should return an array of values to use.
def ignore_values_preceding(row)
@opts[:order].map{|v| v.is_a?(SQL::OrderedExpression) ? v.expression : v}
order_exprs = @opts[:order].map do |v|
if v.is_a?(SQL::OrderedExpression)
descending = v.descending
v = v.expression
else
descending = false
end
[v, descending]
end
row_values = yield(row, order_exprs.map{|e| e.first})
last_expr = []
cond = order_exprs.zip(row_values).map do |(v, descending), value|
expr = last_expr + [SQL::BooleanExpression.new(descending ? :< : :>, v, value)]
last_expr += [SQL::BooleanExpression.new(:'=', v, value)]
Sequel.&(*expr)
end
Sequel.|(*cond)
end
# Modify the identifier returned from the database based on the
# identifier_output_method.
def output_identifier(v)
v = 'untitled' if v == ''
(i = identifier_output_method) ? v.to_s.send(i).to_sym : v.to_sym
end
# This is run inside .all, after all of the records have been loaded
# via .each, but before any block passed to all is called. It is called with
# a single argument, an array of all returned records. Does nothing by
# default, added to make the model eager loading code simpler.
def post_load(all_records)
end
# Called by insert/update/delete when returning is used.
# Yields each row as a plain hash to the block if one is given, or returns
# an array of plain hashes for all rows if a block is not given
def returning_fetch_rows(sql, &block)
if block
default_server.fetch_rows(sql, &block)
nil
else
rows = []
default_server.fetch_rows(sql){|r| rows << r}
rows
end
end
# Return the unaliased part of the identifier. Handles both
# implicit aliases in symbols, as well as SQL::AliasedExpression
# objects. Other objects are returned as is.
def unaliased_identifier(c)
case c
when Symbol
table, column, aliaz = split_symbol(c)
if aliaz
table ? SQL::QualifiedIdentifier.new(table, column) : Sequel.identifier(column)
else
c
end
when SQL::AliasedExpression
c.expression
when SQL::OrderedExpression
case expr = c.expression
when Symbol, SQL::AliasedExpression
SQL::OrderedExpression.new(unaliased_identifier(expr), c.descending, :nulls=>c.nulls)
else
c
end
else
c
end
end
end
end
| 38.00666 | 143 | 0.607861 |
261a4f07e816791defc3b8c6d9c0ba46fee529d6 | 794 | # compute NPV given cash flows and IRR
def npv (cf, irr)
(0...cf.length).inject(0) { |npv, t| npv + (cf[t]/(1+irr)**t) }
end
# compute derivative of the NPV with respect to IRR
# d(C_t * (1+IRR)**t)/dIRR = -t * C_t / (1+IRR)**(t-1)
#
def dnpv (cf, irr)
(1...cf.length).inject(0) { |npv, t| npv - (t*cf[t]/(1+irr)**(t-1)) }
end
# solve for IRR with newton's method: x_{n+1} = x_n - f(x) / f'(x)
def irr (cf)
irr = 0.5
it = 0
begin
begin
oirr = irr
irr -= npv(cf, irr) / dnpv(cf, irr)
it += 1
return nil if it > 50
end until (irr - oirr).abs < 0.0001
rescue ZeroDivisionError
return nil
end
irr
end
puts irr([-100,30,35,40,45])
puts irr([-1.0,1.0])
puts irr([-1000.0,999.99])
puts irr([-1000.0,999.0])
puts irr([100,10,10,10])
puts irr([0.0])
puts irr([])
| 21.459459 | 70 | 0.573048 |
1d3ee9334bf4d5fbffc4b38b239b6dbe30228f10 | 2,454 | module Providers
class EditInitialAllocationsController < ApplicationController
def edit
flow = EditInitialRequestFlow.new(params: params)
if request.post? && flow.valid?
redirect_to flow.redirect_path
else
render flow.template, locals: flow.locals
end
end
def update
update_allocation
@allocation = Allocation.includes(:provider, :accredited_body)
.find(params[:id])
.first
redirect_to provider_recruitment_cycle_allocation_path(
provider_code: allocation.accredited_body.provider_code,
recruitment_cycle_year: recruitment_cycle.year,
training_provider_code: allocation.provider.provider_code,
id: allocation.id,
)
end
def delete
allocation.destroy
redirect_to confirm_deletion_provider_recruitment_cycle_allocation_path(
provider_code: provider.provider_code,
recruitment_cycle_year: recruitment_cycle.year,
training_provider_code: training_provider.provider_code,
)
end
def confirm_deletion
@allocation = Allocation.new(request_type: AllocationsView::RequestType::DECLINED)
@training_provider = training_provider
@provider = provider
@recruitment_cycle = recruitment_cycle
render template: "providers/allocations/show"
end
private
def update_allocation
allocation.number_of_places = params[:number_of_places].to_i
allocation.set_all_dirty!
allocation.save
end
def training_provider
return @training_provider if @training_provider
p = Provider.new(recruitment_cycle_year: recruitment_cycle.year, provider_code: params[:training_provider_code])
@training_provider = p.show_any(recruitment_cycle_year: recruitment_cycle.year).first
end
def provider
@provider ||= Provider
.where(recruitment_cycle_year: recruitment_cycle.year)
.find(params[:provider_code])
.first
end
def recruitment_cycle
return @recruitment_cycle if @recruitment_cycle
cycle_year = params.fetch(:year, Settings.current_cycle)
@recruitment_cycle = RecruitmentCycle.find(cycle_year).first
end
def allocation
@allocation ||= Allocation.includes(:provider, :accredited_body)
.find(params[:id])
.first
end
end
end
| 29.566265 | 118 | 0.684597 |
e9d197956687a3e19e46b38b697c0e88e7d753e5 | 12,137 | # frozen_string_literal: true
shared_examples 'event subscriber component' do
describe 'subscriber component behavior' do
describe '#subscribe_to_scope' do
let!(:deposit_event) { build_event_class('deposit') }
let!(:deposit_created_event) { build_event_class('deposit.created') }
let!(:deposit_created_tomorrow_event) { build_event_class('deposit.created.tomorrow') }
let!(:user_event) { build_event_class('user') }
let!(:user_created_event) { build_event_class('user.created') }
let!(:user_created_yesterday_event) { build_event_class('user.created.yesterday') }
it 'can subscribe an object to a list of events with passed event routing-based pattern' do
delegator = gen_symb(only_letters: true)
# subscribe to 'user.created' and 'user.created.yesterday' events
subscribeable.subscribe_to_scope 'user.created.#', delegator: delegator
expect(deposit_event.observers).to be_empty
expect(deposit_created_event.observers).to be_empty
expect(deposit_created_tomorrow_event.observers).to be_empty
expect(user_event.observers).to be_empty
expect(user_created_yesterday_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(user_created_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
# subscribe to 'desposit', 'deposit.created' and 'deposit.created.tomorrow' events
subscribeable.subscribe_to_scope 'deposit.#', delegator: delegator
expect(user_event.observers).to be_empty
expect(deposit_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(deposit_created_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(deposit_created_tomorrow_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(user_created_yesterday_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(user_created_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
# subscribe to 'user' event only
subscribeable.subscribe_to_scope 'user', delegator: delegator
expect(deposit_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(deposit_created_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(deposit_created_tomorrow_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(user_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(user_created_yesterday_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(user_created_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
# subscribe to nothing
subscribeable.subscribe_to_scope gen_str, delegator: delegator
expect(deposit_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(deposit_created_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(deposit_created_tomorrow_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(user_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(user_created_yesterday_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
expect(user_created_event.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: delegator)
)
end
end
describe '#subscribe_to' do
let!(:event_class) { build_event_class('test_event') }
let!(:another_event_class) { build_event_class('another_test_event') }
it 'can subscribe an object to an event with an event class (by Class object)' do
# subscribe to Event class
subscribeable.subscribe_to event_class, delegator: :test_call
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :test_call)
)
expect(another_event_class.observers).to be_empty
# subscribe to Event class
subscribeable.subscribe_to another_event_class, delegator: :uber_call
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :test_call)
)
expect(another_event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :uber_call)
)
# subscribe to non-event class
expect do
subscribeable.subscribe_to gen_class, delegator: :uber_call
end.to raise_error(EvilEvents::NonManagedEventClassError)
end
it 'can subscribe an object to an event with event type field (by String object)' do
# subscribe to existing event
subscribeable.subscribe_to event_class.type, delegator: :invoke
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :invoke)
)
expect(another_event_class.observers).to be_empty
# subscribe to existing event
subscribeable.subscribe_to another_event_class.type, delegator: :invoke
expect(another_event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :invoke)
)
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :invoke)
)
# subscribe to unexistent event
expect do
subscribeable.subscribe_to gen_str, delegator: gen_symb
end.to raise_error(EvilEvents::NonManagedEventClassError)
end
it 'can subscribe to the list of events with event type alias pattern (by Regexp object)' do
# subscribe to test_event
subscribeable.subscribe_to /\Atest_[a-z]+\z/i, delegator: :process
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :process)
)
expect(another_event_class.observers).to be_empty
subscribeable.subscribe_to /\Aanother_.+\z/i, delegator: :invoke
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :process)
)
expect(another_event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :invoke)
)
# subscribe to all
subscribeable.subscribe_to /.+/, delegator: :call
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :process),
have_attributes(source_object: subscribeable, delegator: :call)
)
expect(another_event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :invoke),
have_attributes(source_object: subscribeable, delegator: :call)
)
# subscribe to nothing
subscribeable.subscribe_to /#{gen_str}/, delegator: gen_symb
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :process),
have_attributes(source_object: subscribeable, delegator: :call)
)
expect(another_event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :invoke),
have_attributes(source_object: subscribeable, delegator: :call)
)
end
it 'can subscribe to the list of events with conditional proc (by Proc object)' do
expect(event_class.observers).to be_empty
expect(another_event_class.observers).to be_empty
# true for all even types
subscribeable.subscribe_to -> (event_type) { event_type.match(/.+/) }, delegator: :boot
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :boot)
)
expect(another_event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :boot)
)
# false for all event types
subscribeable.subscribe_to -> (_event_type) { false }
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :boot)
)
expect(another_event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :boot)
)
# true for test_event only
subscribeable.subscribe_to -> (event_type) { event_type == 'test_event' }
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :boot),
have_attributes(source_object: subscribeable, delegator: :call)
)
expect(another_event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :boot)
)
# true for another_test_event only
subscribeable.subscribe_to -> (event_type) { event_type == 'another_test_event' }
expect(event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :boot),
have_attributes(source_object: subscribeable, delegator: :call)
)
expect(another_event_class.observers).to contain_exactly(
have_attributes(source_object: subscribeable, delegator: :boot),
have_attributes(source_object: subscribeable, delegator: :call)
)
end
it 'delegator: can subscribe with globally preconfigured default delegator' do
global_delegator = gen_symb(only_letters: true)
EvilEvents::Core::Bootstrap[:config].configure do |config|
config.subscriber.default_delegator = global_delegator
end
expect do
subscribeable.subscribe_to event_class
end.to change { event_class.observers.size }.from(0).to(1)
expect(event_class.observers.last.delegator).to eq(global_delegator)
end
it 'raises ArgumentError for non-string/non-class event type argument' do
expect do
subscribeable.subscribe_to event_class.new
end.to raise_error(EvilEvents::ArgumentError)
expect(event_class.observers).to be_empty
end
it 'raises non-managed-error (without side effects) when the event class is not registered' do
expect do
subscribeable.subscribe_to BasicObject
end.to raise_error(EvilEvents::NonManagedEventClassError)
expect(event_class.observers).to be_empty
end
it 'raises non-managed-error (without side effects) when ' \
'an event with passed type isnt registered' do
expect do
subscribeable.subscribe_to gen_str
end.to raise_error(EvilEvents::NonManagedEventClassError)
expect(event_class.observers).to be_empty
end
end
end
end
| 43.192171 | 100 | 0.702398 |
f819df03b1980e6f56705d26fb45cfbaf6838118 | 37 | class UnzipError < StandardError
end
| 12.333333 | 32 | 0.837838 |
b95b046adecdb921320e209653b5a25cb182fa38 | 500 | # frozen_string_literal: true
require File.expand_path('boot', __dir__)
require "rails"
require "active_model/railtie"
require "action_controller/railtie"
require "action_view/railtie"
require "action_view/storybook/engine"
require "sprockets/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Dummy
class Application < Rails::Application
config.secret_key_base = "foo"
end
end
| 23.809524 | 56 | 0.784 |
bb24b10db132b15a9b14fe2218f68aa690128cd0 | 2,417 | # frozen_string_literal: true
require 'injection_vulnerability_library'
module ApiTester
# Tests injection cases
module InjectionModule
def self.go(contract)
reports = []
contract.endpoints.each do |endpoint|
endpoint.methods.each do |method|
reports.concat inject_payload contract.base_url, endpoint, method
end
end
reports
end
def self.inject_payload(base_url, endpoint, method)
reports = []
sql_injections = InjectionVulnerabilityLibrary.sql_vulnerabilities
method.request.fields.each do |field|
sql_injections.each do |injection|
injection_value = "#{field.default_value}#{injection}"
payload = method.request.altered_payload field_name: field.name,
value: injection_value
response = endpoint.call base_url: base_url,
method: method,
payload: payload,
headers: method.request.default_headers
next if check_response(response, endpoint)
reports << InjectionReport.new('sql',
endpoint.url,
payload,
response)
end
end
reports
end
def self.check_response(response, endpoint)
response.code == 200 || check_error(response, endpoint)
end
def self.check_error(response, endpoint)
evaluator = ApiTester::ResponseEvaluator.new(
actual_body: response.body,
expected_fields: endpoint.bad_request_response
)
missing_fields = evaluator.missing_fields
extra_fields = evaluator.extra_fields
response.code == endpoint.bad_request_response.code &&
missing_fields.size.zero? && extra_fields.size.zero?
end
end
end
# Report for InjectionModule
class InjectionReport
attr_accessor :injection_type, :url, :payload, :response
def initialize(injection_type, url, payload, response)
self.injection_type = injection_type
self.url = url
self.payload = payload
self.response = response
end
def print
puts "Found potential #{injection_type}: "
puts " Requested #{url} with payload:"
puts " #{payload}"
puts ' Received: '
puts " #{response}"
end
end
| 30.594937 | 75 | 0.609433 |
1d01a3b29eae5741830d97e381f5873b3e966443 | 1,158 | # frozen_string_literal: true
module Masterfiles
module TargetMarkets
module Country
class New
def self.call(parent_id, form_values: nil, form_errors: nil, remote: true)
ui_rule = UiRules::Compiler.new(:country, :new, form_values: form_values, region_id: parent_id)
rules = ui_rule.compile
form_action = if parent_id.nil?
'/masterfiles/target_markets/destination_countries'
else
"/masterfiles/target_markets/destination_regions/#{parent_id}/destination_countries"
end
layout = Crossbeams::Layout::Page.build(rules) do |page|
page.form_object ui_rule.form_object
page.form_values form_values
page.form_errors form_errors
page.form do |form|
form.action form_action
form.remote! if remote
form.add_field :destination_region_id
form.add_field :country_name
form.add_field :iso_country_code
end
end
layout
end
end
end
end
end
| 32.166667 | 110 | 0.588083 |
1858420c3dcf9a2ad6954ba7144e3cbcdf56c5c1 | 1,908 | require "spec_helper"
require "json"
describe PastInterlibraryLoans do
let(:query) { {"$filter" => "RequestType eq 'Loan' and (TransactionStatus eq 'Request Finished' or startswith(TransactionStatus, 'Cancelled'))", "$top" => "15"} }
context "three loans" do
before(:each) do
stub_illiad_get_request(url: "Transaction/UserRequests/testhelp", body: File.read("./spec/fixtures/illiad_requests.json"), query: query)
end
subject do
PastInterlibraryLoans.for(uniqname: "testhelp", count: 25)
end
context "#count" do
it "returns total request item count" do
expect(subject.count).to eq(25)
end
end
context "#each" do
it "iterates over request objects" do
items = ""
subject.each do |item|
items += item.class.name
end
expect(items).to eq("PastInterlibraryLoan" * 5)
end
end
context "#empty?" do
it "returns a boolean" do
expect(subject.empty?).to eq(false)
end
end
context "#item_text" do
it "returns 'item' if there is only one loan, or 'items' if there is not" do
expect(subject.item_text).to eq("items")
end
end
end
context "no count given" do
before(:each) do
stub_illiad_get_request(url: "Transaction/UserRequests/testhelp", body: File.read("./spec/fixtures/illiad_requests.json"), query: {"$filter": query["$filter"]})
end
subject do
PastInterlibraryLoans.for(uniqname: "testhelp", limit: "1", count: nil)
end
context "#count" do
it "returns total number of transactions" do
expect(subject.count).to eq(5)
end
end
context "#each" do
it "returns limit number of Loan objects" do
items = ""
subject.each do |item|
items += item.class.name
end
expect(items).to eq("PastInterlibraryLoan" * 1)
end
end
end
end
| 31.278689 | 166 | 0.627883 |
b9b22f93959109db2f4d0989fac72bd58dd1d5fc | 2,029 | require 'test_helper'
class TxnTest < Test::Unit::TestCase
def setup
mkdir File.join(File.dirname(__FILE__), 'tmp')
@env = Bdb::Env.new(0)
env_flags = Bdb::DB_CREATE | # Create the environment if it does not already exist.
Bdb::DB_INIT_TXN | # Initialize transactions
Bdb::DB_INIT_LOCK | # Initialize locking.
Bdb::DB_INIT_LOG | # Initialize logging
Bdb::DB_INIT_MPOOL # Initialize the in-memory cache.
@env.open(File.join(File.dirname(__FILE__), 'tmp'), env_flags, 0);
@db = @env.db
@db.open(nil, 'db1.db', nil, Bdb::Db::BTREE, Bdb::DB_CREATE | Bdb::DB_AUTO_COMMIT, 0)
end
def teardown
@db.close(0)
@env.close
rm_rf File.join(File.dirname(__FILE__), 'tmp')
end
def test_commit
txn = @env.txn_begin(nil, 0)
@db.put(txn, 'key', 'value', 0)
txn.commit(0)
assert_equal 'value', @db.get(nil, 'key', nil, 0)
end
def test_abort
txn = @env.txn_begin(nil, 0)
@db.put(txn, 'key', 'value', 0)
txn.abort
assert_nil @db.get(nil, 'key', nil, 0)
end
def test_id
txn = @env.txn_begin(nil, 0)
@db.put(txn, 'key', 'value', 0)
assert txn.tid
txn.commit(0)
assert_equal 'value', @db.get(nil, 'key', nil, 0)
end
def test_timeout
txn = @env.txn_begin(nil, 0)
txn.set_timeout(10, Bdb::DB_SET_TXN_TIMEOUT)
@db.put(txn, 'key', 'value', 0)
txn.commit(0)
assert_equal 'value', @db.get(nil, 'key', nil, 0)
end
def test_stat
txn = @env.txn_begin(nil, 0)
@db.put(txn, 'key', 'value', 0)
txn.commit(0)
txn_stat = @env.txn_stat(0)
assert txn_stat
assert txn_stat['st_ncommits'] > 0
assert_equal 'value', @db.get(nil, 'key', nil, 0)
end
def test_stat_active
txn = @env.txn_begin(nil, 0)
@db.put(txn, 'key', 'value', 0)
txn_stat = @env.txn_stat(0)
txn.commit(0)
assert_equal 1, txn_stat['st_txnarray'].length
assert_equal 'value', @db.get(nil, 'key', nil, 0)
end
end
| 27.053333 | 93 | 0.59931 |
f795ef7fe3dc59d587e2feeab377f850a61562fd | 934 | class Unshield < Formula
desc "Extract files from InstallShield cabinet files"
homepage "https://github.com/twogood/unshield"
url "https://github.com/twogood/unshield/archive/1.4.3.tar.gz"
sha256 "aa8c978dc0eb1158d266eaddcd1852d6d71620ddfc82807fe4bf2e19022b7bab"
license "MIT"
revision 1
head "https://github.com/twogood/unshield.git"
bottle do
sha256 "d64e0c93743d7d50858bd5c46c76b8fa79183b5ee4643361202f53378a88cc05" => :catalina
sha256 "ec5db176e7f9557645cfdb63062802d37a8e516f39f1e53037e37ed398992b3b" => :mojave
sha256 "c68a5391b55e5101979c69d174160564d88edc7263afa140fd69ce289c6662ed" => :high_sierra
sha256 "96cc0aa68d191d1bc98d09a48abaa44b58b4e979bfcec3b2abc384c30d56684d" => :sierra
end
depends_on "cmake" => :build
depends_on "[email protected]"
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
system bin/"unshield", "-V"
end
end
| 32.206897 | 93 | 0.770878 |
ed675718f2682e55e5cf26bfe147a9b023f915ba | 393 | class HelpController < ApplicationController
skip_before_filter :authenticated?, :only => [:master_authentication]
before_filter :authenticated_master?, :only => [:master_authentication]
def authentication
respond_with(["authorized"], :status => 200, :location => nil)
end
def master_authentication
respond_with(["authorized"], :status => 200, :location => nil)
end
end
| 32.75 | 73 | 0.732824 |
398e5c5259d218839bc67378d100de26118e835e | 4,807 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2018_10_05_072051) do
create_table "connections", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.integer "source_id"
t.integer "target_id"
t.string "color"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["color"], name: "index_connections_on_color"
t.index ["source_id"], name: "index_connections_on_source_id_and_source_type"
t.index ["target_id"], name: "index_connections_on_target_id_and_target_type"
end
create_table "contracts", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "contracts_users", id: false, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.bigint "contract_id", null: false
t.bigint "user_id", null: false
t.index ["contract_id", "user_id"], name: "index_contracts_users_on_contract_id_and_user_id"
t.index ["user_id", "contract_id"], name: "index_contracts_users_on_user_id_and_contract_id"
end
create_table "nodes", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.string "type"
t.string "name"
t.string "text"
t.string "timestamp"
t.string "parameter"
t.float "number_from"
t.float "number_to"
t.date "date_from"
t.date "date_to"
t.bigint "statement_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["statement_id"], name: "index_nodes_on_statement_id"
end
create_table "statements", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.bigint "contract_id"
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.text "diagram_xml", limit: 4294967295
t.index ["contract_id"], name: "index_statements_on_contract_id"
end
create_table "taggings", id: :integer, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.integer "tag_id"
t.string "taggable_type"
t.integer "taggable_id"
t.string "tagger_type"
t.integer "tagger_id"
t.string "context", limit: 128
t.datetime "created_at"
t.index ["context"], name: "index_taggings_on_context"
t.index ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], name: "taggings_idx", unique: true
t.index ["tag_id"], name: "index_taggings_on_tag_id"
t.index ["taggable_id", "taggable_type", "context"], name: "index_taggings_on_taggable_id_and_taggable_type_and_context"
t.index ["taggable_id", "taggable_type", "tagger_id", "context"], name: "taggings_idy"
t.index ["taggable_id"], name: "index_taggings_on_taggable_id"
t.index ["taggable_type"], name: "index_taggings_on_taggable_type"
t.index ["tagger_id", "tagger_type"], name: "index_taggings_on_tagger_id_and_tagger_type"
t.index ["tagger_id"], name: "index_taggings_on_tagger_id"
end
create_table "tags", id: :integer, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.string "name", collation: "utf8_bin"
t.integer "taggings_count", default: 0
t.index ["name"], name: "index_tags_on_name", unique: true
end
create_table "users", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "role"
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0, null: false
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
end
| 44.509259 | 129 | 0.722696 |
e8efe55c2363f61f906b4a55390b2707c930898a | 237 | class Ping < SlackRubyBotServer::Events::AppMentions::Mention
mention 'ping'
def self.call(data)
client = Slack::Web::Client.new(token: data.team.token)
client.chat_postMessage(channel: data.channel, text: 'pong')
end
end
| 26.333333 | 64 | 0.725738 |
abd66340419cc34857f925ef1ad6961075aba2fa | 159 | # frozen_string_literal: true
require_relative "utilities/enum"
require_relative "utilities/boolean_attributes"
require_relative "utilities/common_functions"
| 26.5 | 47 | 0.867925 |
e28dc30a8c95ed81de20d28ed56de5decc99f7ae | 38 | module WebStat
VERSION = "0.5.1"
end | 12.666667 | 19 | 0.684211 |
26f5f3ead567a255dbb1f2dfa8893cadc828873d | 210 | class String
def robust_split
case self
when /,/
self.split(/,/).collect(&:strip)
when /\s/
self.split(/\s/).collect(&:strip)
else
[self.strip]
end
end
end
| 16.153846 | 41 | 0.514286 |
333c8a2508cca75ea970cd1b1b0d4c739627779a | 871 | # frozen_string_literal: true
class Survey::OptionsType
@@options_types = { multi_choices: 1,
single_choice: 2,
number: 3,
text: 4,
multi_choices_with_text: 5,
single_choice_with_text: 6,
multi_choices_with_number: 7,
single_choice_with_number: 8,
large_text: 9 }
def self.options_types
@@options_types
end
def self.options_types_title
titled = {}
Survey::OptionsType.options_types.each { |k, v| titled[k.to_s.titleize] = v }
titled
end
def self.options_type_ids
@@options_types.values
end
def self.options_type_keys
@@options_types.keys
end
@@options_types.each do |key, val|
define_singleton_method key.to_s do
val
end
end
end
| 22.921053 | 81 | 0.576349 |
1a497e0f3045d0c2aa187c7a6e992183cf749244 | 1,084 | require_dependency 'google_sign_in/redirect_protector'
class GoogleSignIn::CallbacksController < GoogleSignIn::BaseController
def show
redirect_to flash[:proceed_to], flash: { google_sign_in: google_sign_in_response }
rescue GoogleSignIn::RedirectProtector::Violation => error
logger.error error.message
head :bad_request
end
private
def proceed_to_url
flash[:proceed_to].tap { |url| GoogleSignIn::RedirectProtector.ensure_same_origin(url, request.url) }
end
def google_sign_in_response
if valid_request? && params[:code].present?
{ id_token: id_token }
else
{ error: error_message_for(params[:error]) }
end
rescue OAuth2::Error => error
{ error: error_message_for(error.code) }
end
def valid_request?
flash[:state].present? && params[:state] == flash[:state]
end
def id_token
client.auth_code.get_token(params[:code])['id_token']
end
def error_message_for(error_code)
error_code.presence_in(GoogleSignIn::OAUTH2_ERRORS) || "invalid_request"
end
end
| 28.526316 | 107 | 0.70572 |
1c82b116550f9ba1a858737dd9893e0a7a591eef | 794 | # frozen_string_literal: true
class Accounts::SessionsController < Devise::SessionsController
# before_action :configure_sign_in_params, only: [:create]
# GET /resource/sign_in
# def new
# super
# end
# POST /resource/sign_in
# def create
# super
# end
# DELETE /resource/sign_out
# def destroy
# super
# end
def demo
demo_account = Account.create_demo_account
# アカウントを削除するJob仕込む
RemoveDemoAccountJob.set(wait: 1.hour).perform_later(demo_account.id)
redirect_to action: 'new', try_demo: true, demo_account: demo_account.email
end
# protected
# If you have extra params to permit, append them to the sanitizer.
# def configure_sign_in_params
# devise_parameter_sanitizer.permit(:sign_in, keys: [:attribute])
# end
end
| 21.459459 | 79 | 0.711587 |
7aca467dfbc68f16d52559107023101ea2931cc6 | 923 | # coding: utf-8
# vim: et ts=2 sw=2
require 'hrr_rb_ssh/logger'
module HrrRbSsh
class Authentication
class Method
class Password < Method
NAME = 'password'
PREFERENCE = 10
def initialize transport, options
@logger = Logger.new(self.class.name)
@authenticator = options.fetch( 'authentication_password_authenticator', Authenticator.new { false } )
end
def authenticate userauth_request_message
@logger.info { "authenticate" }
@logger.debug { "userauth request: " + userauth_request_message.inspect }
username = userauth_request_message[:'user name']
password = userauth_request_message[:'plaintext password']
context = Context.new(username, password)
@authenticator.authenticate context
end
end
end
end
end
require 'hrr_rb_ssh/authentication/method/password/context'
| 28.84375 | 112 | 0.664139 |
1a3bdb5ced26658a558fb07c6d4eb26fb7a11328 | 2,167 | class MasterMind
#Written by Jeremy Herzberg; [email protected]; www.jeremyherzberg.com
#MasterMind algorithm derived by notable researchers (https://en.wikipedia.org/wiki/Mastermind_(board_game)#Algorithms)
#This algorithm works by first producing all permutations of possible answers and then eliminating possiablities based
#if the possibility produces the same peg result when tested against the guess as when the guess tested against the answer
#AFTER ALL, THE SOLUTION WILL PRODUCE THE SAME PEG RESULT WHEN TESTED AGAINST THE GUESS AS WHEN THE GUESS IS TESTED AGAINST THE SOLUTION
#set all attr_accessor to att_reader upon deployment, set to accessor for testing only.. I will leave it like this for you
attr_accessor :human_solution,:guess,:number_of_guesses
def initialize(human_solution)
@human_solution = human_solution
@guess = [1,1,2,2]
end
def read_pegs(guess, solution)
pegs = {B: 0, W: 0}
accounting_array = [0,0,0,0,0,0,0] #positions 1-6 in this array correspond to each possiable number in a solution, position 0 is used to hold values that shouldn't be counted per the rules of mastermind
guess_copy = guess.dup
#this loop assigns black pegs and adds remaining pegs to the solution array
solution.each_index { |x|
if guess_copy[x] == solution[x] #this is a black peg
pegs[:B] += 1
guess_copy[x] = 0 #to make sure we don't count this value again in the next loop
else #else update the accounting array
accounting_array[solution[x]] += 1
end
}
#this loop assigns white pegs by finding entries
guess_copy.each_index { |x|
if accounting_array[guess_copy[x]] != 0 #check
accounting_array[guess_copy[x]] -= 1
pegs[:W] += 1
end
}
return pegs
end
def guess_solution
@number_of_guesses = 1
all_permutations = [1,2,3,4,5,6].repeated_permutation(4).to_a
until read_pegs(@guess,@human_solution) == {B:4, W:0}
all_permutations = all_permutations.select {|x| read_pegs(x,@guess) == read_pegs(@guess,@human_solution)}
@guess = all_permutations.sample
@number_of_guesses += 1
end
end
end
| 30.097222 | 204 | 0.721274 |
ac40aa58832e89767482a038dbec1490f157a08f | 9,767 | =begin
#Custom Workflow Actions
#Create custom workflow actions
The version of the OpenAPI document: v4
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'date'
module Hubspot
module Automation
module Actions
class ExtensionActionDefinitionPatch
# The URL that will accept an HTTPS request each time workflows executes the custom action.
attr_accessor :action_url
# Whether this custom action is published to customers.
attr_accessor :published
# The list of input fields to display in this custom action.
attr_accessor :input_fields
attr_accessor :object_request_options
# A list of dependencies between the input fields. These configure when the input fields should be visible.
attr_accessor :input_field_dependencies
# The user-facing labels for the custom action.
attr_accessor :labels
# The object types that this custom action supports.
attr_accessor :object_types
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'action_url' => :'actionUrl',
:'published' => :'published',
:'input_fields' => :'inputFields',
:'object_request_options' => :'objectRequestOptions',
:'input_field_dependencies' => :'inputFieldDependencies',
:'labels' => :'labels',
:'object_types' => :'objectTypes'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'action_url' => :'String',
:'published' => :'Boolean',
:'input_fields' => :'Array<InputFieldDefinition>',
:'object_request_options' => :'ObjectRequestOptions',
:'input_field_dependencies' => :'Array<OneOfSingleFieldDependencyConditionalSingleFieldDependency>',
:'labels' => :'Hash<String, ActionLabels>',
:'object_types' => :'Array<String>'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `Hubspot::Automation::Actions::ExtensionActionDefinitionPatch` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `Hubspot::Automation::Actions::ExtensionActionDefinitionPatch`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'action_url')
self.action_url = attributes[:'action_url']
end
if attributes.key?(:'published')
self.published = attributes[:'published']
end
if attributes.key?(:'input_fields')
if (value = attributes[:'input_fields']).is_a?(Array)
self.input_fields = value
end
end
if attributes.key?(:'object_request_options')
self.object_request_options = attributes[:'object_request_options']
end
if attributes.key?(:'input_field_dependencies')
if (value = attributes[:'input_field_dependencies']).is_a?(Array)
self.input_field_dependencies = value
end
end
if attributes.key?(:'labels')
if (value = attributes[:'labels']).is_a?(Hash)
self.labels = value
end
end
if attributes.key?(:'object_types')
if (value = attributes[:'object_types']).is_a?(Array)
self.object_types = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
action_url == o.action_url &&
published == o.published &&
input_fields == o.input_fields &&
object_request_options == o.object_request_options &&
input_field_dependencies == o.input_field_dependencies &&
labels == o.labels &&
object_types == o.object_types
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[action_url, published, input_fields, object_request_options, input_field_dependencies, labels, object_types].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
Hubspot::Automation::Actions.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
end
end
| 35.007168 | 242 | 0.576738 |
790f72264229808f76ba55feb7bb325f649c685e | 3,786 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "congross_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.152174 | 102 | 0.756999 |
7a7f98d2af1d5b5e66ec2c8d0889ca2717df7a0a | 1,359 | #!/usr/bin/env ruby
require "bundler/setup"
require "ray_tracer"
require 'benchmark/ips'
SPINNER = '+/-\|/-\\'.split("").cycle
module RT
module_function
def render_sphere_ch06
light_source = RT::Light.new(RT::Point[10, 10, -10], Color[1.0, 0.2, 1.0])
canvas = Canvas.new(200, 200, default_color: RT::Color[0.25, 0.25, 0.25])
sphere = Sphere.new
canvas.iterator do |x, y|
world_point = RT::Point[(x - 100).to_f, (y - 150).to_f, 80.0]
light_vector = (world_point - light_source.position).to_vector
ray = RT::Ray.new(light_source.position, light_vector.normalize)
xs = sphere.intersect(ray)
if RT::Intersection.hit?(xs)
putc SPINNER.next
putc 0x08
hit = RT::Intersection.hit(xs)
point = ray.position(hit.t)
normalv = hit.object.normal_at(point) # hit.object == sphere
eyev = -ray.direction
color = hit.object.material.lighting(light_source, point, eyev, normalv)
canvas[x + 75, y] = color
else
#canvas[x + 75, 150 - y] = RT::WHITE
end
end
return canvas
end
end
c = RT.render_sphere_ch06
file = "artifacts/ch06-sphere.ppm"
File.open(file, "w") { |f| f.write c.to_ppm(4096.0) }
system("open", file)
#Benchmark.ips do |bm|
# bm.config(time: 15, warmup: 2)
# bm.report("render") { RT.render_circle_ch05}
#end
#
| 26.647059 | 80 | 0.628403 |
e21b922e833706b2d7b17019fd372a098530bb47 | 8,823 | require 'rails_helper'
RSpec.describe Bookings::ProfileAttributesConvertor, type: :model do
subject { described_class.new({}) } # Default empty profile
describe "#profile_attrs" do
context 'with completed profile' do
let(:completed_attrs) do
build(:school_profile, :completed).attributes
end
subject do
described_class.new(completed_attrs).attributes
end
it { is_expected.to include(dbs_requires_check: true) }
it { is_expected.to include(dbs_policy_details: 'Must have recent dbs check') }
it { is_expected.to include(individual_requirements: /Must be applying to or have applied to our, or a partner school/) }
it { is_expected.to include(individual_requirements: /Must have a degree/) }
it { is_expected.to include(individual_requirements: /They must live within 8 miles of the school/) }
it { is_expected.to include(individual_requirements: /Make sure photo is clear/) }
it { is_expected.to include(individual_requirements: /Some other requirements/) }
it { is_expected.to include(description_details: 'Horse archery') }
it { is_expected.to include(dress_code_business: true) }
it { is_expected.to include(dress_code_cover_tattoos: true) }
it { is_expected.to include(dress_code_remove_piercings: true) }
it { is_expected.to include(dress_code_smart_casual: true) }
it { is_expected.to include(dress_code_other_details: 'Must have nice hat') }
it { is_expected.to include(admin_contact_email: '[email protected]') }
it { is_expected.to include(admin_contact_email_secondary: '[email protected]') }
it { is_expected.to include(admin_contact_phone: '+441234567890') }
it { is_expected.to include(primary_phase: true) }
it { is_expected.to include(secondary_phase: true) }
it { is_expected.to include(college_phase: true) }
it { is_expected.to include(key_stage_early_years: true) }
it { is_expected.to include(key_stage_1: true) }
it { is_expected.to include(key_stage_2: true) }
it { is_expected.to include(start_time: '8:15am') }
it { is_expected.to include(end_time: '4:30pm') }
it { is_expected.to include(flexible_on_times: true) }
it { is_expected.to include(flexible_on_times_details: 'We are very accommodating') }
it { is_expected.to include(experience_details: 'Mostly teaching') }
it { is_expected.to include(parking_provided: true) }
it { is_expected.to include(parking_details: 'Plenty of spaces') }
it { is_expected.to include(teacher_training_info: 'We offer teach training in house') }
it { is_expected.to include(teacher_training_url: 'https://example.com') }
it { is_expected.to include(administration_fee_amount_pounds: 123.45) }
it { is_expected.to include(administration_fee_description: 'General administration') }
it { is_expected.to include(administration_fee_interval: 'Daily') }
it { is_expected.to include(administration_fee_payment_method: 'Travelers Cheques') }
it { is_expected.to include(dbs_fee_amount_pounds: 200.00) }
it { is_expected.to include(dbs_fee_description: 'DBS check') }
it { is_expected.to include(dbs_fee_interval: 'One-off') }
it { is_expected.to include(dbs_fee_payment_method: 'Ethereum') }
it { is_expected.to include(other_fee_amount_pounds: 444.44) }
it { is_expected.to include(other_fee_description: 'Owl repellent / other protective gear') }
it { is_expected.to include(other_fee_interval: 'One-off') }
it { is_expected.to include(other_fee_payment_method: 'Stamps') }
it { is_expected.to include(supports_access_needs: true) }
it { is_expected.to include(access_needs_description: 'Here are some details') }
it { is_expected.to include(disability_confident: true) }
it { is_expected.to include(has_access_needs_policy: true) }
it { is_expected.to include(access_needs_policy_url: 'https://example.com/access-needs-policy') }
end
context 'with completed profile with blank fields' do
let(:model_attrs) do
model = build(:school_profile, :completed)
model.dbs_requirement_requires_check = false
model.dbs_requirement_dbs_policy_details = ''
model.dbs_requirement_no_dbs_policy_details = ''
model.candidate_requirement_dbs_requirement = 'never'
model.candidate_requirements_selection_on_teacher_training_course = false
model.candidate_requirements_selection_live_locally = false
model.candidate_requirements_selection_maximum_distance_from_school = false
model.candidate_requirements_selection_other = false
model.candidate_requirements_selection_not_on_another_training_course = false
model.candidate_requirements_selection_has_or_working_towards_degree = false
model.candidate_requirements_selection_provide_photo_identification = false
model.description_details = ' '
model.candidate_experience_detail_disabled_facilities = false
model.candidate_experience_detail_other_dress_requirements = false
model.admin_contact_email = ' '
model.phases_list_primary = false
model.phases_list_secondary = false
model.phases_list_college = true
model.phases_list_secondary_and_college = true
model.candidate_experience_detail_parking_provided = false
model.candidate_experience_detail_nearby_parking_details = 'somewhere further away'
model.experience_outline_provides_teacher_training = false
model.fees_administration_fees = false
model.fees_dbs_fees = false
model.fees_other_fees = false
model.access_needs_support_supports_access_needs = false
model.attributes
end
subject do
described_class.new(model_attrs).attributes
end
it { is_expected.to include(dbs_requires_check: false) }
it { is_expected.to include(dbs_policy_details: nil) }
it { is_expected.to include(individual_requirements: 'None') }
it { is_expected.to include(description_details: nil) }
it { is_expected.to include(dress_code_other_details: nil) }
it { is_expected.to include(admin_contact_email: nil) }
it { is_expected.to include(admin_contact_email_secondary: nil) }
it { is_expected.to include(admin_contact_phone: nil) }
it { is_expected.to include(primary_phase: false) }
it { is_expected.to include(secondary_phase: true) }
it { is_expected.to include(college_phase: true) }
it { is_expected.to include(key_stage_early_years: false) }
it { is_expected.to include(key_stage_1: false) }
it { is_expected.to include(key_stage_2: false) }
it { is_expected.to include(parking_provided: false) }
it { is_expected.to include(parking_details: 'somewhere further away') }
it { is_expected.to include(teacher_training_info: nil) }
it { is_expected.to include(teacher_training_url: nil) }
it { is_expected.to include(administration_fee_amount_pounds: nil) }
it { is_expected.to include(administration_fee_description: nil) }
it { is_expected.to include(administration_fee_interval: nil) }
it { is_expected.to include(administration_fee_payment_method: nil) }
it { is_expected.to include(dbs_fee_amount_pounds: nil) }
it { is_expected.to include(dbs_fee_description: nil) }
it { is_expected.to include(dbs_fee_interval: nil) }
it { is_expected.to include(dbs_fee_payment_method: nil) }
it { is_expected.to include(other_fee_amount_pounds: nil) }
it { is_expected.to include(other_fee_description: nil) }
it { is_expected.to include(other_fee_interval: nil) }
it { is_expected.to include(other_fee_payment_method: nil) }
it { is_expected.to include(supports_access_needs: false) } # rename this clashed with form model
it { is_expected.to include(access_needs_description: nil) }
it { is_expected.to include(disability_confident: nil) }
it { is_expected.to include(has_access_needs_policy: nil) }
it { is_expected.to include(access_needs_policy_url: nil) }
end
end
describe '#phase_ids' do
before { @early_years = create(:bookings_phase, :early_years) }
before { @primary = create(:bookings_phase, :primary) }
before { @secondary = create(:bookings_phase, :secondary) }
before { @college = create(:bookings_phase, :college) }
let(:attrs) { build(:school_profile, :completed).attributes }
subject { described_class.new(attrs).phase_ids }
it { is_expected.to include(@early_years.id) }
it { is_expected.to include(@primary.id) }
it { is_expected.to include(@secondary.id) }
it { is_expected.to include(@college.id) }
end
end
| 56.197452 | 127 | 0.725377 |
bf1446c50ed77627eb2fb02911af78e2ec94965d | 3,731 | module ActiveRecord
module Associations
class Preloader
class Association #:nodoc:
attr_reader :owners, :reflection, :preload_scope, :model, :klass
def initialize(klass, owners, reflection, preload_scope)
@klass = klass
@owners = owners
@reflection = reflection
@preload_scope = preload_scope
@model = owners.first && owners.first.class
@scope = nil
@owners_by_key = nil
end
def run
unless owners.first.association(reflection.name).loaded?
preload
end
end
def preload
raise NotImplementedError
end
def scope
@scope ||= build_scope
end
def records_for(ids)
scope.where(association_key.in(ids))
end
def table
klass.arel_table
end
# The name of the key on the associated records
def association_key_name
raise NotImplementedError
end
# This is overridden by HABTM as the condition should be on the foreign_key column in
# the join table
def association_key
table[association_key_name]
end
# The name of the key on the model which declares the association
def owner_key_name
raise NotImplementedError
end
# We're converting to a string here because postgres will return the aliased association
# key in a habtm as a string (for whatever reason)
def owners_by_key
@owners_by_key ||= owners.group_by do |owner|
key = owner[owner_key_name]
key && key.to_s
end
end
def options
reflection.options
end
private
def associated_records_by_owner
owners_map = owners_by_key
owner_keys = owners_map.keys.compact
if klass.nil? || owner_keys.empty?
records = []
else
# Some databases impose a limit on the number of ids in a list (in Oracle it's 1000)
# Make several smaller queries if necessary or make one query if the adapter supports it
sliced = owner_keys.each_slice(klass.connection.in_clause_length || owner_keys.size)
records = sliced.map { |slice| records_for(slice).to_a }.flatten
end
# Each record may have multiple owners, and vice-versa
records_by_owner = Hash[owners.map { |owner| [owner, []] }]
records.each do |record|
owner_key = record[association_key_name].to_s
owners_map[owner_key].each do |owner|
records_by_owner[owner] << record
end
end
records_by_owner
end
def reflection_scope
@reflection_scope ||= reflection.scope ? klass.unscoped.instance_exec(nil, &reflection.scope) : klass.unscoped
end
def build_scope
scope = klass.unscoped
scope.default_scoped = true
values = reflection_scope.values
preload_values = preload_scope.values
scope.where_values = Array(values[:where]) + Array(preload_values[:where])
scope.references_values = Array(values[:references]) + Array(preload_values[:references])
scope._select! preload_values[:select] || values[:select] || table[Arel.star]
scope.includes! preload_values[:includes] || values[:includes]
if options[:as]
scope.where!(klass.table_name => { reflection.type => model.base_class.sti_name })
end
scope
end
end
end
end
end
| 30.581967 | 120 | 0.590458 |
e8bb7ab71a7c508537986ea494027c42fffb4b4e | 213 | class CreateVisitors < ActiveRecord::Migration[5.2]
def change
create_table :visitors do |t|
t.string :remote_ip
t.timestamps
end
add_index :visitors, :remote_ip, unique: true
end
end
| 19.363636 | 51 | 0.685446 |
391444d1d4f2f69b06ee87528ed31590480b5823 | 1,151 | class Pmd < Formula
desc "Source code analyzer for Java, JavaScript, and more"
homepage "https://pmd.github.io"
url "https://github.com/pmd/pmd/releases/download/pmd_releases/6.36.0/pmd-bin-6.36.0.zip"
sha256 "a3aa27cfa8f72ca56aaaa1f56468ea1decfb1b0d1b57005b4f3c386cb80be7fe"
license "BSD-4-Clause"
bottle do
sha256 cellar: :any_skip_relocation, x86_64_linux: "bb838507485f5d8c8f275d3364c6a2b3addb86dbc1eb395e184cc856c3e07283"
end
depends_on "openjdk"
def install
rm Dir["bin/*.bat"]
libexec.install Dir["*"]
(bin/"pmd").write_env_script libexec/"bin/run.sh", Language::Java.overridable_java_home_env
end
def caveats
<<~EOS
Run with `pmd` (instead of `run.sh` as described in the documentation).
EOS
end
test do
(testpath/"java/testClass.java").write <<~EOS
public class BrewTestClass {
// dummy constant
public String SOME_CONST = "foo";
public boolean doTest () {
return true;
}
}
EOS
system "#{bin}/pmd", "pmd", "-d", "#{testpath}/java", "-R",
"rulesets/java/basic.xml", "-f", "textcolor", "-l", "java"
end
end
| 27.404762 | 121 | 0.664639 |
ab902b4702e6bfb814a79d2970dc1b47836a29ec | 3,707 | =begin
#Datadog API V1 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'date'
require 'time'
module DatadogAPIClient::V1
# The counts of monitor groups per different criteria.
class MonitorGroupSearchResponseCounts
include BaseGenericModel
# Whether the object has unparsed attributes
# @!visibility private
attr_accessor :_unparsed
# Search facets.
attr_accessor :status
# Search facets.
attr_accessor :type
# Attribute mapping from ruby-style variable name to JSON key.
# @!visibility private
def self.attribute_map
{
:'status' => :'status',
:'type' => :'type'
}
end
# Returns all the JSON keys this model knows about
# @!visibility private
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
# @!visibility private
def self.openapi_types
{
:'status' => :'Array<Object>',
:'type' => :'Array<Object>'
}
end
# List of attributes with nullable: true
# @!visibility private
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param attributes [Hash] Model attributes in the form of hash
# @!visibility private
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V1::MonitorGroupSearchResponseCounts` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `DatadogAPIClient::V1::MonitorGroupSearchResponseCounts`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'status')
if (value = attributes[:'status']).is_a?(Array)
self.status = value
end
end
if attributes.key?(:'type')
if (value = attributes[:'type']).is_a?(Array)
self.type = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
# @!visibility private
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
# @!visibility private
def valid?
true
end
# Checks equality by comparing each attribute.
# @param o [Object] Object to be compared
# @!visibility private
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
status == o.status &&
type == o.type
end
# @see the `==` method
# @param o [Object] Object to be compared
# @!visibility private
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
# @!visibility private
def hash
[status, type].hash
end
end
end
| 27.664179 | 232 | 0.652279 |
e8ffd15c76e7d0e71e703b71caadc27a6ae2889a | 354 | # coding : utf-8
# 負の整数・小数点も入力可能にする
require 'quickpack/validation'
class DecimalNumericCheckValidator < ActiveModel::EachValidator
include Quickpack::Validation
def validate_each(record, attribute, value)
record.errors.add(attribute, I18n.t('errors.messages.decimal_numeric_check')) unless value =~ /^(-?\d+(\.\d+)?$)/
end
end
| 25.285714 | 118 | 0.714689 |
0155fab90ceaa01183cd4e46120476e9aeebf457 | 6,982 | require 'rails_helper'
context 'when NOMIS is missing information' do
let(:prison_code) { 'LEI' }
let(:offender_no) { 'A1' }
let(:stub_keyworker_host) { Rails.configuration.keyworker_api_host }
let(:staff_id) { 111_111 }
describe 'when logged in as a POM' do
before do
stub_poms = [{ staffId: staff_id, position: RecommendationService::PRISON_POM }]
stub_request(:post, "#{ApiHelper::AUTH_HOST}/auth/oauth/token").
with(query: { grant_type: 'client_credentials' }).
to_return(body: {}.to_json)
stub_request(:get, "#{ApiHelper::T3}/users/example_user").
to_return(body: { staffId: staff_id }.to_json)
stub_request(:get, "#{ApiHelper::T3}/staff/#{staff_id}/emails").
to_return(body: [].to_json)
stub_request(:get, "#{ApiHelper::T3}/staff/roles/#{prison_code}/role/POM").
to_return(body: stub_poms.to_json)
signin_pom_user
stub_user staff_id: staff_id
end
describe 'the caseload page' do
context 'with an NPS offender with a determinate sentence, but no release dates' do
before do
stub_offenders = [build(:nomis_offender, offenderNo: offender_no,
imprisonmentStatus: 'SEC91',
sentence: build(:nomis_sentence_detail,
releaseDate: 30.years.from_now.iso8601,
sentenceStartDate: Time.zone.now.iso8601))]
stub_offenders_for_prison(prison_code, stub_offenders)
create(:allocation, nomis_offender_id: offender_no, primary_pom_nomis_id: staff_id)
create(:case_information, nomis_offender_id: offender_no, case_allocation: 'NPS')
end
it 'does not error' do
visit prison_staff_caseload_index_path(prison_code, staff_id)
expect(page).to have_content('Showing 1 - 1 of 1 results')
end
end
end
describe 'the prisoner page' do
before do
offender = build(:nomis_offender, offenderNo: offender_no, sentence: build(:nomis_sentence_detail, conditionalReleaseDate: Time.zone.today + 22.months))
stub_request(:get, "#{ApiHelper::T3}/staff/#{staff_id}").
to_return(body: { staffId: staff_id, firstName: "TEST", lastName: "MOIC" }.to_json)
stub_request(:post, "#{ApiHelper::T3}/offender-assessments/CATEGORY").
to_return(body: {}.to_json)
stub_request(:get, "#{stub_keyworker_host}/key-worker/#{prison_code}/offender/#{offender_no}").
to_return(body: {}.to_json)
stub_offender(offender)
create(:allocation, nomis_offender_id: offender_no, primary_pom_nomis_id: staff_id)
create(:case_information, nomis_offender_id: offender_no, case_allocation: 'NPS')
end
it 'does not error' do
visit prison_prisoner_path(prison_code, offender_no)
expect(page).to have_content('Prisoner information')
earliest_release_date = find('#earliest_release_date').text
expect(Date.parse(earliest_release_date)).to eq(Time.zone.today + 22.months)
end
end
describe 'the handover start page' do
before do
stub_offenders = [build(:nomis_offender, offenderNo: offender_no)]
stub_offenders_for_prison(prison_code, stub_offenders)
end
it 'does not error' do
create(:allocation, nomis_offender_id: offender_no, primary_pom_nomis_id: staff_id)
visit prison_staff_caseload_handover_start_path(prison_code, staff_id)
expect(page).to have_content('All cases for start of handover to the community in the next 30 days')
end
end
end
context 'when logged in as an SPO' do
before do
stub_request(:post, "#{ApiHelper::AUTH_HOST}/auth/oauth/token").
with(query: { grant_type: 'client_credentials' }).
to_return(body: {}.to_json)
signin_spo_user('example_SPO')
stub_request(:get, "#{ApiHelper::T3}/users/example_SPO").
to_return(body: { 'staffId': 754_732 }.to_json)
stub_request(:get, "#{ApiHelper::T3}/staff/754732/emails").
to_return(body: [].to_json)
end
context 'with an NPS offender with an indeterminate sentence, but no release dates' do
let(:booking_id) { 4 }
before do
stub_offender = build(:nomis_offender, offenderNo: offender_no)
stub_offenders_for_prison(prison_code, [stub_offender])
stub_request(:get, "#{ApiHelper::T3}/prisoners/#{offender_no}").
to_return(body: [stub_offender].to_json)
stub_request(:post, "#{ApiHelper::T3}/offender-assessments/CATEGORY").
to_return(body: {}.to_json)
stub_request(:get, "#{ApiHelper::T3}/bookings/#{booking_id}/mainOffence").
to_return(body: {}.to_json)
stub_poms = [{ staffId: staff_id, position: RecommendationService::PRISON_POM }]
stub_request(:get, "#{ApiHelper::T3}/staff/roles/#{prison_code}/role/POM").
to_return(body: stub_poms.to_json)
stub_request(:get, "#{ApiHelper::T3}/staff/#{staff_id}").
to_return(body: {}.to_json)
stub_request(:get, "#{ApiHelper::T3}/staff/#{staff_id}/emails").
to_return(body: [].to_json)
stub_request(:get, "#{stub_keyworker_host}/key-worker/#{prison_code}/offender/#{offender_no}").
to_return(body: {}.to_json)
create(:allocation, nomis_offender_id: offender_no, primary_pom_nomis_id: staff_id)
create(
:case_information,
nomis_offender_id: offender_no,
case_allocation: 'NPS',
welsh_offender: welsh
)
end
describe 'the pom details page' do
before { visit prison_pom_path(prison_code, staff_id) }
context 'with a welsh offender' do
let(:welsh) { 'Yes' }
context 'with a sentence start date post-policy' do
let(:sentence_start) { Time.zone.now }
it 'shows their allocated case' do
expect(page).to have_content(offender_no)
end
end
context 'with a sentence start date pre-policy' do
let(:sentence_start) { '01 Jan 2010'.to_date }
it 'shows their allocated case' do
expect(page).to have_content(offender_no)
end
end
end
context 'with an english offender' do
let(:welsh) { 'No' }
context 'with a sentence start date post-policy' do
let(:sentence_start) { Time.zone.now }
it 'shows their allocated case' do
expect(page).to have_content(offender_no)
end
end
context 'with a sentence start date pre-policy' do
let(:sentence_start) { '01 Jan 2010'.to_date }
it 'shows their allocated case' do
expect(page).to have_content(offender_no)
end
end
end
end
end
end
end
| 35.262626 | 160 | 0.631911 |
6132eb6142787c548697f6ee2ae3b35645be1b38 | 2,874 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20170726145619) do
create_table "comments", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t|
t.bigint "ticket_id"
t.text "message"
t.string "action"
t.bigint "user_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["ticket_id"], name: "index_comments_on_ticket_id"
t.index ["user_id"], name: "index_comments_on_user_id"
end
create_table "customers", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "tickets", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t|
t.string "title"
t.text "message"
t.string "status"
t.bigint "user_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "closed_at"
t.index ["user_id"], name: "index_tickets_on_user_id"
end
create_table "users", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t|
t.string "name"
t.bigint "customer_id"
t.string "role"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0, null: false
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.string "auth_token"
t.index ["auth_token"], name: "index_users_on_auth_token"
t.index ["customer_id"], name: "index_users_on_customer_id"
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
add_foreign_key "comments", "tickets"
add_foreign_key "comments", "users"
add_foreign_key "tickets", "users"
add_foreign_key "users", "customers"
end
| 40.478873 | 97 | 0.724426 |
aba26e8b5c8e9eb31fe8de4fbfb92d113885fa87 | 7,950 | require "test/unit"
require_relative "../lib/more_ruby"
class TestArray < Test::Unit::TestCase
def test_random
a = (1 .. 20).to_a
randoms = []
count = 20
count.times do
randoms << a.random
end
assert_equal(count, randoms.size)
randoms.compact!
assert_equal(count, randoms.size, ".random returned nils")
randoms.uniq!
assert_empty(randoms - a, ".random returned values not in the original array")
# Extremely unlikely to happen, but rand may roll in order for every call in this test
assert_not_equal(a, randoms, ".random did not return values in a random order")
end
def test_delete_random_size
orig_size = 20
a = (1 .. orig_size).to_a
count = 10
count.times do |i|
deleted = a.delete_random
assert_not_equal(orig_size, a.size, ".delete_random is not deleting from self")
assert_equal(orig_size - (i + 1), a.size, ".delete_random is deleting too many from self")
# TODO would be nice to test randomness, but that's pretty difficult
end
end
def test_delete_random_return
b = [3, 3, 3]
deleted = b.delete_random
assert_not_empty(b, ".delete_random is deleting by object, not by index")
assert_not_equal(b.uniq, b, ".delete_random is deleting by object, not by index")
assert_equal(deleted, b.random, ".delete_random is not returning the deleted item")
assert_not_nil(deleted, ".delete_random is not returning the deleted item")
end
def test_mean
a = (1 .. 6).to_a # mean is 3.5
expected = 3.5
mean = a.mean
assert_kind_of(Float, mean, ".mean did not return a Float")
assert_equal(mean, expected, ".mean is not returning the correct mean")
end
def test_av
a = (1 .. 6).to_a # mean is 3.5
expected = 3.5
mean = a.av
assert_kind_of(Float, mean, ".av did not return a Float")
assert_equal(mean, expected, ".av is not returning the correct mean")
end
def test_mean_mixed_numerics
a = [1, 2.0, -6789, 432.123456, 0, 0x04B, 01000, 0b1000]
expected = -719.859568
mean = a.mean
assert_kind_of(Float, mean, ".mean did not return a Float")
assert_equal(mean, expected, ".mean is not returning the correct mean")
end
def test_non_numeric_mean
assert_exception_message_correct_non_numeric_mean ["a", "b", "c"]
assert_exception_message_correct_non_numeric_mean ["1", "2", "c"]
assert_exception_message_correct_non_numeric_mean [1, 2, "c", 4]
assert_exception_message_correct_non_numeric_mean [1, 2, "3", 4]
assert_exception_message_correct_non_numeric_mean [1, nil, 3, 4]
assert_exception_message_correct_non_numeric_mean [1, false, true, 4]
end
def assert_exception_message_correct_non_numeric_mean(array)
exception = assert_raise(TypeError) {array.mean}
assert_equal("Cannot determine the mean of an array that contains non-Numeric objects.", exception.message)
end
def test_sum
a = (1 .. 6).to_a
expected = 21
sum = a.sum
assert_kind_of(Integer, sum, ".sum did not return a Integer")
assert_equal(sum, expected, ".sum is not returning the correct sum")
end
def test_sum_floats
a = (1 .. 6).to_a
a << 4.32
expected = 25.32
sum = a.sum
assert_kind_of(Float, sum, ".sum did not return a Float")
assert_equal(sum, expected, ".sum is not returning the correct sum")
end
def test_sum_non_numeric
a = (1 .. 6).to_a
a << "4.32"
exception = assert_raise(TypeError) {a.sum}
assert_equal("Array contained non-numeric non-nil elements; cannot sum contents.", exception.message)
end
def test_sum_with_nil
a = (1 .. 6).to_a
a << nil
a << 4.32
a << -26
expected = -0.68
sum = a.sum.signif(8) # need to work around floating-point precision issues
assert_kind_of(Float, sum, ".sum did not return a Float")
assert_equal(sum, expected, ".sum is not returning the correct sum")
end
def test_insert_flat
a = [1, 2, 3]
b = [4, 5]
expected = [4, 5, 1, 2, 3]
a.insert_flat(0, b)
assert_equal(expected, a, ".insert_flat failed")
end
def test_insert_flat_preserving_subarrays
a = [1, [2.1, 2.2], 3]
b = [4, 5]
expected = [1, [2.1, 2.2], 4, 5, 3]
a.insert_flat(-2, b)
assert_equal(expected.size, a.size, ".insert_flat failed to preserve preexisting subarray")
assert_equal(expected, a, ".insert_flat failed")
end
def test_all_kind_of
a = ["A string", :smybol, false, 1]
assert_false(a.all_kind_of?(String), ".all_kind_of? returned true when there were distinctly different types in the array")
assert(a.all_kind_of?(Object), ".all_kind_of? returned true when there were distinctly different types in the array")
b = (1 .. 4).to_a
assert(b.all_kind_of?(Numeric), ".all_kind_of? returned false when the array's contents were all subclasses of the questioned class")
assert(b.all_kind_of?(Integer), ".all_kind_of? returned false when the array's contents were all subclasses of the questioned class")
assert(b.all_kind_of?(Fixnum), ".all_kind_of? returned false when the array's contents were all instances of the questioned class")
b.insert(2, 2.0)
assert(b.all_kind_of?(Numeric), ".all_kind_of? returned false when the array's contents were all subclasses of the questioned class")
assert_false(b.all_kind_of?(Integer), ".all_kind_of? returned true when there were distinctly different types in the array")
assert_false(b.all_kind_of?(Fixnum), ".all_kind_of? returned true when there were distinctly different types in the array")
end
def test_all_instance_of
a = ["A string", :smybol, false, 1]
assert_false(a.all_instance_of?(String), ".all_instance_of? returned true when there were distinctly different types in the array")
assert_false(a.all_instance_of?(Object), ".all_instance_of? returned true when there were distinctly different types in the array")
b = (1 .. 4).to_a
assert_false(b.all_instance_of?(Numeric), ".all_instance_of? returned true when the array's contents were all subclasses of the questioned class")
assert_false(b.all_instance_of?(Integer), ".all_instance_of? returned true when the array's contents were all subclasses of the questioned class")
assert(b.all_instance_of?(Fixnum), ".all_instance_of? returned false when the array's contents were all instances of the questioned class")
b.insert(2, 2.0)
assert_false(b.all_instance_of?(Numeric), ".all_instance_of? returned true when the array's contents were all subclasses of the questioned class")
end
def test_wrap_fetch
a = [:a, :b, :c, :d]
assert_equal(:a, a.wrap_fetch(0), "wrap_fetch failed")
assert_equal(:d, a.wrap_fetch(3), "wrap_fetch failed")
assert_equal(:d, a.wrap_fetch(-1), "wrap_fetch failed")
assert_equal(:a, a.wrap_fetch(-4), "wrap_fetch failed")
assert_equal(:a, a.wrap_fetch(4), "wrap_fetch failed")
assert_equal(:b, a.wrap_fetch(5), "wrap_fetch failed")
assert_equal(:c, a.wrap_fetch(494), "wrap_fetch failed")
assert_equal(:c, a.wrap_fetch(-494), "wrap_fetch failed")
b = []
assert_nil(b.wrap_fetch(0), "wrap_fetch failed")
end
def test_modulo_fetch
a = [:a, :b, :c, :d]
assert_equal(:b, a.modulo_fetch(5), "modulo_fetch failed")
end
end | 41.19171 | 162 | 0.639245 |
392e876b64ba9aa4c2d4126bb946e4cdc5cf8da8 | 417 | require_relative '../achievement'
class TopTenMonth < Achievement
def initialize
super 'Top 10 month', 'top_ten_month', 'Place yourself top 10 for a month', 'Hardcore', 25
end
def achieved? user
from = Date.today.last_month.beginning_of_month
to = Date.today.last_month.end_of_month
UserSession.includes(:user).time_between(from, to).take(10).map(&:user).include?(user)
end
end | 27.8 | 98 | 0.707434 |
ace62e1191cc8c52e2116584fd687fb894c324d7 | 1,155 | cask '[email protected]' do
version '2018.4.13f1,497f083a43af'
sha256 :no_check
url "https://download.unity3d.com/download_unity/497f083a43af/MacEditorTargetInstaller/UnitySetup-Facebook-Games-Support-for-Editor-2018.4.13f1.pkg"
name 'Facebook Gameroom Build Support'
homepage 'https://unity3d.com/unity/'
pkg 'UnitySetup-Facebook-Games-Support-for-Editor-2018.4.13f1.pkg'
depends_on cask: '[email protected]'
preflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity.temp"
end
if File.exist? "/Applications/Unity-2018.4.13f1"
FileUtils.move "/Applications/Unity-2018.4.13f1", '/Applications/Unity'
end
end
postflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', "/Applications/Unity-2018.4.13f1"
end
if File.exist? '/Applications/Unity.temp'
FileUtils.move '/Applications/Unity.temp', '/Applications/Unity'
end
end
uninstall quit: 'com.unity3d.UnityEditor5.x',
delete: '/Applications/Unity-2018.4.13f1/PlaybackEngines/Facebook'
end
| 32.083333 | 150 | 0.722078 |
28cc7a280e7979d42e17ac0f2fc0c092f99cf725 | 4,137 | require File.join(File.dirname(__FILE__), "spec_helper")
describe "::ORI::Tools" do
mod = ::ORI::Tools
describe ".ansi" do
meth = :ansi
it "returns empty string if no attrs are given" do
mod.send(meth).should == ""
end
it "refuses to take unknown attributes" do
proc do
mod.send(meth, :kaka)
end.should raise_error(ArgumentError)
end
it "generally works" do
mod.send(meth, :red).should == "\e[31m"
mod.send(meth, :red, :on_green).should == "\e[31;42m"
end
end # .ansi
describe ".get_methods" do
meth = :get_methods
it "allows to fetch own methods only" do
ar = mod.send(meth, ::Sample::BasicInheritance::Son, :inspector_arg => false)
##p "ar", ar
h = Hash[*ar.flatten(1)]
##p "h", h
h["public_instance_methods"].should == ["son_public"]
h["protected_instance_methods"].should == ["son_protected"]
h["private_instance_methods"].should == ["son_private"]
h["public_methods"].should include("son_public_singleton", "papa_public_singleton", "grandpa_public_singleton")
h["protected_methods"].should include("son_protected_singleton", "papa_protected_singleton", "grandpa_protected_singleton")
h["private_methods"].should include("son_private_singleton", "papa_private_singleton", "grandpa_private_singleton")
end
it "supports MAV mode" do
ar = mod.send(meth, ::Sample::BasicInheritance::Son, :to_mav => true)
##p "ar", ar
ar.should include(["son_public", "#", :public], ["son_protected", "#", :protected], ["son_private", "#", :private])
ar.should include(["son_public_singleton", "::", :public], ["son_protected_singleton", "::", :protected], ["son_private_singleton", "::", :private])
ar.should include(["papa_public", "#", :public], ["papa_protected", "#", :protected], ["papa_private", "#", :private])
ar.should include(["papa_public_singleton", "::", :public], ["papa_protected_singleton", "::", :protected], ["papa_private_singleton", "::", :private])
ar = mod.send(meth, ::Sample::BasicExtension::OtherMo, :to_mav => true)
ar.should include(["public_meth", "::", :public], ["protected_meth", "::", :protected], ["private_meth", "::", :private])
ar = mod.send(meth, ::Sample::BasicExtension::Klass, :to_mav => true)
ar.should include(["public_meth", "::", :public], ["protected_meth", "::", :protected], ["private_meth", "::", :private])
end
end # .get_methods
describe ".get_module_name" do
meth = :get_module_name
it "works for normal classes and modules" do
mod.send(meth, Kernel).should == "Kernel"
mod.send(meth, String).should == "String"
end
it "works for class singletons" do
klass = class << String; self; end
mod.send(meth, klass).should == "String"
end
it "works for module singletons" do
klass = class << Enumerable; self; end
mod.send(meth, klass).should == "Enumerable"
end
it "works for instance singletons" do
klass = class << "kk"; self; end
mod.send(meth, klass).should == "String"
klass = class << []; self; end
mod.send(meth, klass).should == "Array"
klass = class << (class << []; self; end); self; end
mod.send(meth, klass).should == "Class"
end
it "works for namespaced names" do
klass = class << ::Sample::BasicExtension::Mo; self; end
mod.send(meth, klass).should == "Sample::BasicExtension::Mo"
klass = class << ::Sample::BasicExtension::Klass; self; end
mod.send(meth, klass).should == "Sample::BasicExtension::Klass"
klass = class << ::Sample::BasicExtension::Klass.new; self; end
mod.send(meth, klass).should == "Sample::BasicExtension::Klass"
end
end # .get_module_name
describe ".shell_escape" do
meth = :shell_escape
it "generally works" do
mod.send(meth, "").should == "''"
mod.send(meth, "one two").should == "one\\ two"
mod.send(meth, "one\ntwo").should == "one'\n'two"
mod.send(meth, "Kernel#`").should == "Kernel\\#\\`"
end
end # .shell_escape
end # ::ORI::Tools
| 37.609091 | 157 | 0.624365 |
6ab8ec00e44adf89bf912b266a9974070d67260e | 812 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core/handler/bind_tcp'
require 'msf/core/payload/windows/bind_tcp'
module MetasploitModule
CachedSize = 285
include Msf::Payload::Stager
include Msf::Payload::Windows::BindTcp
def initialize(info = {})
super(merge_info(info,
'Name' => 'Bind TCP Stager (Windows x86)',
'Description' => 'Listen for a connection (Windows x86)',
'Author' => ['hdm', 'skape', 'sf'],
'License' => MSF_LICENSE,
'Platform' => 'win',
'Arch' => ARCH_X86,
'Handler' => Msf::Handler::BindTcp,
'Convention' => 'sockedi',
'Stager' => { 'RequiresMidstager' => false }
))
end
end
| 27.066667 | 66 | 0.610837 |
110a3e2a93bf44fd62cb0548f75a9dc5f51ee5ef | 192 | class CreateDentists < ActiveRecord::Migration[5.2]
def change
create_table :dentists do |t|
t.string :name
t.string :email
t.string :password_digest
end
end
end
| 19.2 | 51 | 0.666667 |
6ab12b883ad0e2e8c4fb80ddb9ad91abcb2a3631 | 3,316 | class ConsensusGenomeCoverageService
include Callable
# Maximum number of bins (will be used)
MAX_NUM_BINS = 500
class NoDepthDataError < StandardError
def initialize(workflow_run)
super("No depth data available for workflow_run #{workflow_run.id}.")
end
end
def initialize(
workflow_run:,
max_num_bins: MAX_NUM_BINS,
cacheable_only: false
)
@workflow_run = workflow_run
@max_num_bins = max_num_bins
# cacheable_only will return cache-friendly data (e.g. basic stats)
# Included: coverage_breadth, coverage_depth, max_aligned_length, total_length
# Excluded: coverage, coverage_bin_size
@cacheable_only = cacheable_only
end
def call
return generate_coverage_viz
end
private
def generate_coverage_viz
depths = fetch_depths_data
return convert_to_coverage_data(depths)
end
def fetch_depths_data
depths = @workflow_run.output(ConsensusGenomeWorkflowRun::OUTPUT_DEPTHS)
raise NoDepthDataError, @workflow_run unless depths
depths = depths.split(/\n+/).map(&:to_i)
return depths
end
def convert_to_coverage_data(depths)
coverage_breadth = depths.count { |d| d > 0 }.to_f / depths.size
coverage_depth = depths.select { |d| d > 0 }.sum(0.0) / depths.size
max_aligned_length = depths.size
total_length = depths.size
if @cacheable_only
return {
coverage_breadth: coverage_breadth,
coverage_depth: coverage_depth,
max_aligned_length: max_aligned_length,
total_length: total_length,
}
end
# takes the histogram from depths file and converts to coverage viz data format
if depths.size <= @max_num_bins
num_bins = depths.size
bin_size = 1
else
num_bins = @max_num_bins
bin_size = depths.size.to_f / @max_num_bins
end
coverage = (0...num_bins).map do |idx|
bin_start = idx * bin_size
start_fraction = 1 - bin_start.modulo(1)
idx_start = bin_start.floor
bin_end = (idx + 1) * bin_size
end_fraction = bin_end.modulo(1)
idx_end = bin_end.ceil - 1
# compute average depth accounting for partial start and end fraction
depth_arr = depths[idx_start..idx_end]
weights = Array.new(depth_arr.size, 1.0)
weights[0] = start_fraction
# set end fraction only if we loaded the last cell
weights[weights.size - 1] = end_fraction if end_fraction > 0
avg_depth = depth_arr.zip(weights).map { |x, y| x * y }.sum(0.0) / (bin_end - bin_start)
breadth_arr = depths[idx_start..idx_end].map { |v| v > 0 ? 1 : 0 }
avg_breadth = breadth_arr.zip(weights).map { |x, y| x * y }.sum(0.0) / (bin_end - bin_start)
[
idx, # bin index
avg_depth.round(3),
avg_breadth.round(3),
# set number of contigs to 1 because we consider one posisble contig that aligns to the reference
1,
# TODO(tiago): review this stat (number of reads): should it be the same as depth?
0,
]
end
return {
total_length: total_length,
coverage: coverage,
coverage_bin_size: bin_size,
# TODO: fix to match longer alignment
max_aligned_length: max_aligned_length,
coverage_depth: coverage_depth,
coverage_breadth: coverage_breadth,
}
end
end
| 30.145455 | 105 | 0.675513 |
1c4e01c4a3826b3276045460e8b0c24ea314a90c | 119 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe User::AuthenticateFromSSO, type: :service do
end
| 17 | 59 | 0.806723 |
ac5f11f59e35268b466fcef244338539941f44de | 10,232 | require 'test_helper'
class BeanstreamTest < Test::Unit::TestCase
def setup
Base.mode = :test
@gateway = BeanstreamGateway.new(
:login => 'merchant id',
:user => 'username',
:password => 'password'
)
@credit_card = credit_card
@check = check(
:institution_number => '001',
:transit_number => '26729'
)
@amount = 1000
@options = {
:order_id => '1234',
:billing_address => {
:name => 'xiaobo zzz',
:phone => '555-555-5555',
:address1 => '1234 Levesque St.',
:address2 => 'Apt B',
:city => 'Montreal',
:state => 'QC',
:country => 'CA',
:zip => 'H2C1X8'
},
:email => '[email protected]',
:subtotal => 800,
:shipping => 100,
:tax1 => 100,
:tax2 => 100,
:custom => 'reference one'
}
@recurring_options = @options.merge(
:interval => { :unit => :months, :length => 1 },
:occurrences => 5)
end
def test_successful_purchase
@gateway.expects(:ssl_post).returns(successful_purchase_response)
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert_equal '10000028;15.00;P', response.authorization
end
def test_successful_test_request_in_production_environment
Base.mode = :production
@gateway.expects(:ssl_post).returns(successful_test_purchase_response)
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert response.test?
end
def test_unsuccessful_request
@gateway.expects(:ssl_post).returns(unsuccessful_purchase_response)
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_failure response
end
def test_avs_result
@gateway.expects(:ssl_post).returns(successful_purchase_response)
response = @gateway.purchase(@amount, @credit_card, @options)
assert_equal 'R', response.avs_result['code']
end
def test_ccv_result
@gateway.expects(:ssl_post).returns(successful_purchase_response)
response = @gateway.purchase(@amount, @credit_card, @options)
assert_equal 'M', response.cvv_result['code']
end
def test_successful_check_purchase
@gateway.expects(:ssl_post).returns(successful_check_purchase_response)
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert_equal '10000072;15.00;D', response.authorization
assert_equal 'Approved', response.message
end
def test_successful_purchase_with_check
@gateway.expects(:ssl_post).returns(successful_purchase_response)
assert response = @gateway.purchase(@amount, @check, @options)
assert_success response
assert_equal '10000028;15.00;P', response.authorization
end
def test_successful_purchase_with_vault
@gateway.expects(:ssl_post).returns(successful_purchase_response)
vault = rand(100000)+10001
assert response = @gateway.purchase(@amount, vault, @options)
assert_success response
assert_equal '10000028;15.00;P', response.authorization
end
# Testing Non-American countries
def test_german_address_sets_state_to_the_required_dummy_value
@gateway.expects(:commit).with(german_address_params_without_state)
billing = @options[:billing_address]
billing[:country] = 'DE'
billing[:city] = 'Berlin'
billing[:zip] = '12345'
billing[:state] = nil
@options[:shipping_address] = billing
@gateway.purchase(@amount, @credit_card, @options)
end
def test_brazilian_address_sets_state_and_zip_to_the_required_dummy_values
@gateway.expects(:commit).with(brazilian_address_params_without_zip_and_state)
billing = @options[:billing_address]
billing[:country] = 'BR'
billing[:city] = 'Rio de Janeiro'
billing[:zip] = nil
billing[:state] = nil
@options[:shipping_address] = billing
@gateway.purchase(@amount, @credit_card, @options)
end
def test_successful_recurring
@gateway.expects(:ssl_post).returns(successful_recurring_response)
response = assert_deprecation_warning(Gateway::RECURRING_DEPRECATION_MESSAGE) do
@gateway.recurring(@amount, @credit_card, @recurring_options)
end
assert_success response
assert_equal 'Approved', response.message
end
def test_successful_update_recurring
@gateway.expects(:ssl_post).returns(successful_recurring_response)
response = assert_deprecation_warning(Gateway::RECURRING_DEPRECATION_MESSAGE) do
@gateway.recurring(@amount, @credit_card, @recurring_options)
end
assert_success response
assert_equal 'Approved', response.message
@gateway.expects(:ssl_post).returns(successful_update_recurring_response)
response = assert_deprecation_warning(Gateway::RECURRING_DEPRECATION_MESSAGE) do
@gateway.update_recurring(@amount, @credit_card, @recurring_options.merge(:account_id => response.params["rbAccountId"]))
end
assert_success response
assert_equal "Request successful", response.message
end
def test_successful_cancel_recurring
@gateway.expects(:ssl_post).returns(successful_recurring_response)
response = assert_deprecation_warning(Gateway::RECURRING_DEPRECATION_MESSAGE) do
@gateway.recurring(@amount, @credit_card, @recurring_options)
end
assert_success response
assert_equal 'Approved', response.message
@gateway.expects(:ssl_post).returns(successful_cancel_recurring_response)
response = assert_deprecation_warning(Gateway::RECURRING_DEPRECATION_MESSAGE) do
@gateway.cancel_recurring(:account_id => response.params["rbAccountId"])
end
assert_success response
assert_equal "Request successful", response.message
end
def test_ip_is_being_sent
@gateway.expects(:ssl_post).with do |url, data|
data =~ /customerIP=123\.123\.123\.123/
end.returns(successful_purchase_response)
@options[:ip] = "123.123.123.123"
@gateway.purchase(@amount, @credit_card, @options)
end
private
def successful_purchase_response
"cvdId=1&trnType=P&trnApproved=1&trnId=10000028&messageId=1&messageText=Approved&trnOrderNumber=df5e88232a61dc1d0058a20d5b5c0e&authCode=TEST&errorType=N&errorFields=&responseType=T&trnAmount=15%2E00&trnDate=6%2F5%2F2008+5%3A26%3A53+AM&avsProcessed=0&avsId=0&avsResult=0&avsAddrMatch=0&avsPostalMatch=0&avsMessage=Address+Verification+not+performed+f"
end
def successful_test_purchase_response
"merchant_id=100200000&trnId=11011067&authCode=TEST&trnApproved=1&avsId=M&cvdId=1&messageId=1&messageText=Approved&trnOrderNumber=1234"
end
def unsuccessful_purchase_response
"merchant_id=100200000&trnId=11011069&authCode=&trnApproved=0&avsId=0&cvdId=6&messageId=16&messageText=Duplicate+transaction&trnOrderNumber=1234"
end
def successful_check_purchase_response
"trnApproved=1&trnId=10000072&messageId=1&messageText=Approved&trnOrderNumber=5d9f511363a0f35d37de53b4d74f5b&authCode=&errorType=N&errorFields=&responseType=T&trnAmount=15%2E00&trnDate=6%2F4%2F2008+6%3A33%3A55+PM&avsProcessed=0&avsId=0&avsResult=0&avsAddrMatch=0&avsPostalMatch=0&avsMessage=Address+Verification+not+performed+for+this+transaction%2E&trnType=D&paymentMethod=EFT&ref1=reference+one&ref2=&ref3=&ref4=&ref5="
end
def brazilian_address_params_without_zip_and_state
{ :shipProvince => '--', :shipPostalCode => '000000', :ordProvince => '--', :ordPostalCode => '000000', :ordCountry => 'BR', :trnCardOwner => 'Longbob Longsen', :shipCity => 'Rio de Janeiro', :ordAddress1 => '1234 Levesque St.', :ordShippingPrice => '1.00', :deliveryEstimate => nil, :shipName => 'xiaobo zzz', :trnCardNumber => '4242424242424242', :trnAmount => '10.00', :trnType => 'P', :ordAddress2 => 'Apt B', :ordTax1Price => '1.00', :shipEmailAddress => '[email protected]', :trnExpMonth => '09', :ordCity => 'Rio de Janeiro', :shipPhoneNumber => '555-555-5555', :ordName => 'xiaobo zzz', :trnExpYear => next_year, :trnOrderNumber => '1234', :shipCountry => 'BR', :ordTax2Price => '1.00', :shipAddress1 => '1234 Levesque St.', :ordEmailAddress => '[email protected]', :trnCardCvd => '123', :trnComments => nil, :shippingMethod => nil, :ref1 => 'reference one', :shipAddress2 => 'Apt B', :ordPhoneNumber => '555-555-5555', :ordItemPrice => '8.00' }
end
def german_address_params_without_state
{ :shipProvince => '--', :shipPostalCode => '12345', :ordProvince => '--', :ordPostalCode => '12345', :ordCountry => 'DE', :trnCardOwner => 'Longbob Longsen', :shipCity => 'Berlin', :ordAddress1 => '1234 Levesque St.', :ordShippingPrice => '1.00', :deliveryEstimate => nil, :shipName => 'xiaobo zzz', :trnCardNumber => '4242424242424242', :trnAmount => '10.00', :trnType => 'P', :ordAddress2 => 'Apt B', :ordTax1Price => '1.00', :shipEmailAddress => '[email protected]', :trnExpMonth => '09', :ordCity => 'Berlin', :shipPhoneNumber => '555-555-5555', :ordName => 'xiaobo zzz', :trnExpYear => next_year, :trnOrderNumber => '1234', :shipCountry => 'DE', :ordTax2Price => '1.00', :shipAddress1 => '1234 Levesque St.', :ordEmailAddress => '[email protected]', :trnCardCvd => '123', :trnComments => nil, :shippingMethod => nil, :ref1 => 'reference one', :shipAddress2 => 'Apt B', :ordPhoneNumber => '555-555-5555', :ordItemPrice => '8.00' }
end
def next_year
(Time.now.year + 1).to_s[/\d\d$/]
end
def successful_recurring_response
"trnApproved=1&trnId=10000072&messageId=1&messageText=Approved&trnOrderNumber=5d9f511363a0f35d37de53b4d74f5b&authCode=&errorType=N&errorFields=&responseType=T&trnAmount=15%2E00&trnDate=6%2F4%2F2008+6%3A33%3A55+PM&avsProcessed=0&avsId=0&avsResult=0&avsAddrMatch=0&avsPostalMatch=0&avsMessage=Address+Verification+not+performed+for+this+transaction%2E&trnType=D&paymentMethod=EFT&ref1=reference+one&ref2=&ref3=&ref4=&ref5="
end
def successful_update_recurring_response
"<response><code>1</code><message>Request successful</message></response>"
end
def successful_cancel_recurring_response
"<response><code>1</code><message>Request successful</message></response>"
end
end
| 42.811715 | 965 | 0.718041 |
1d9dc792313d6778346de919e4e6ab19e4bac1d4 | 375 | # encoding: utf-8
#gem style
#require 'prawn_report.rb'
#dev style
require File.expand_path(File.dirname(__FILE__) + "/simple_listing_people.rb")
require 'yaml'
data = YAML::load( File.open( File.expand_path(File.dirname(__FILE__) + "/data/people.yml") ) )
f = PeopleListing.new
f.params[:filters] =
[['Somente Ativos',''],
['Sexo','Qualquer']
]
puts f.draw(data)
| 20.833333 | 95 | 0.693333 |
394f7cbacdb9a42e6842583e1973b9b7d3b30f4d | 3,372 | # This file is copied to spec/ when you run 'rails generate rspec:install'
require 'spec_helper'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../spec/dummy/config/environment', __dir__)
ActiveRecord::Migrator.migrations_paths = [File.expand_path('../spec/dummy/db/migrate', __dir__)]
ActiveRecord::Migrator.migrations_paths << File.expand_path('../db/migrate', __dir__)
# Prevent database truncation if the environment is production
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'rspec/rails'
require 'factory_bot_rails'
require 'support/controller_routes'
require 'support/devise_request_spec_helpers'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec', 'support', '**', '*.rb')].each { |f| require f }
# Checks for pending migrations and applies them before tests are run.
# If you are not using ActiveRecord, you can remove these lines.
begin
ActiveRecord::Migration.maintain_test_schema!
rescue ActiveRecord::PendingMigrationError => e
puts e.to_s.strip
exit 1
end
RSpec.configure do |config|
config.include FactoryBot::Syntax::Methods
config.include ControllerRoutes, type: :controller
config.include ControllerRoutes, type: :routing
config.include DeviseRequestSpecHelpers, type: :request
config.include ExceptionHunter::Engine.routes.url_helpers
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
end
require 'shoulda/matchers'
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
| 41.121951 | 97 | 0.759193 |
bfa38bf3368cbf592cdf8d6878fd491f91dc9222 | 501 | # frozen_string_literal: true
module ActiveAuthorization
module ControllerConcern
module ClassMethods
private
def authorize_before_action!(**filter_options, &block)
before_action(**filter_options) do
params = AuthorizationParams.new(
seeker: current_user,
message_name: action_name,
receiver: nil
)
instance_exec(params, &block)
model_authorize!(**params)
end
end
end
end
end
| 21.782609 | 60 | 0.622754 |
62ae027667f9a72416ad5387e24891383e8359bf | 1,126 | class Mutations::BaseRejectOffer < Mutations::BaseMutation
null true
argument :offer_id, ID, required: true
argument :reject_reason, Types::CancelReasonTypeEnum, required: false
field :order_or_error, Mutations::OrderOrFailureUnionType, 'A union of success/failure', null: false
def resolve(offer_id:, reject_reason: nil)
offer = Offer.find(offer_id)
order = offer.order
authorize!(order)
# should check whether or not it's an offer-mode order
OrderValidator.validate_is_last_offer!(offer)
raise Errors::ValidationError, :cannot_reject_offer unless waiting_for_response?(offer)
OrderCancellationService.new(offer.order, current_user_id).reject!(sanitize_reject_reason(reject_reason))
{ order_or_error: { order: order.reload } }
rescue Errors::ApplicationError => e
{ order_or_error: { error: Types::ApplicationErrorType.from_application(e) } }
end
def authorize!(_order)
raise NotImplementedError
end
def waiting_for_response?(_offer)
raise NotImplementedError
end
def sanitize_reject_reason(_reject_reason)
raise NotImplementedError
end
end
| 29.631579 | 109 | 0.762877 |
91b5e5ad16c137cca12434ba928187c008e2eaad | 13,415 | require 'net/http'
require 'json'
require 'open-uri'
##Init Vars
$insuff_globe = "9062058446"
$insuff_tm = "9368723185"
$address = "9175744034"
$smart = "9213151819"
$sun = "9423532715"
$message = "TEST"
##SMS Normal APP
$uri = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/7117/requests")
$uri.query = "access_token=#{$accessToken}"
##SMS By App
$uriBp = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/3822/requests")
$uriBp.query = "app_secret=#{$appSecret}&app_id=#{$appId}"
##Charging Normal APP
content = open('').read
json = JSON.parse(content)
$increment1 = json['result'].last['reference_code'].to_i+1
$uric = URI.parse("http://devapi.globelabs.com.ph/payment/v1/transactions/amount/")
$uric.query = "access_token=#{$access_token}"
##Charging Bp APP
content = open('http://devapi.globelabs.com.ph/payments/251').read
json = JSON.parse(content)
$increment2 = json['result'].last['reference_code'].to_i+1
$uricBp = URI.parse("http://devapi.globelabs.com.ph/payment/v1/transactions/amount/")
$uricBp.query = "app_secret=#{$appSecret}&app_id=#{$appId}"
##Staging endpoints
#placeholder only
$Suri = URI.parse("http://devapi.globelabs.com.ph/staging/smsmessaging/v1/outbound/3822/requests")
$Suri.query = "access_token=#{$accessToken}"
$Suri = URI.parse("http://devapi.globelabs.com.ph/staging/smsmessaging/v1/outbound/3822/requests")
$Suri.query = "access_token=#{$accessToken}"
##Charging Staging
$Suric = URI.parse("http://devapi.globelabs.com.ph/payment/v1/transactions/amount/")
$Suric.query = "access_token=#{$access_token}"
##Charging Bp Staging
$SuricBp = URI.parse("http://devapi.globelabs.com.ph/payment/v1/transactions/amount/")
$SuricBp.query = "app_secret=#{$appSecret}&app_id=#{$appId}"
def pushSms(param)
case param
when 'ok'
#filler
when 'sms160'
$message = ("A" * 160) + ("B" * 10)
when 'sms320'
$message = ("A" * 320) + ("B" * 10)
when 'prefix'
$uri = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/21587117/requests")
$uri.query = "access_token=#{$accessToken}"
when 'extended'
$uri = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/215871171234567/requests")
$uri.query = "access_token=#{$accessToken}"
end
Net::HTTP.post_form($uri, {'address' => $address, 'message' => $message})
end
def pushSms_bp(param)
case param
when 'sms160'
$message = ("A" * 160) + ("B" * 10)
when 'sms320'
$message = ("A" * 320) + ("B" * 10)
when 'prefix'
$uriBp = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/21587117/requests")
$uriBp.query = "app_secret=#{$appSecret}&app_id=#{$appId}"
when 'extended'
$uri = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/215871171234567/requests")
$uriBp.query = "app_secret=#{$appSecret}&app_id=#{$appId}"
end
Net::HTTP.post_form($uriBp, {'address' => $address, 'message' => $message, 'passphrase' => $passphrase})
end
def xTelco(param)
case param
when 'ok'
#filler
when 'sms160'
$message = ("A" * 160) + ("B" * 10)
when 'sms320'
$message = ("A" * 320) + ("B" * 10)
when 'prefix'
$uriBp = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/21587117/requests")
$uriBp.query = "app_secret=#{$appSecret}&app_id=#{$appId}"
when 'extended'
$uri = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/215871171234567/requests")
$uriBp.query = "app_secret=#{$appSecret}&app_id=#{$appId}"
end
Net::HTTP.post_form($uriBp, {'address' => $smart, 'message' => $message, 'passphrase' => $passphrase})
end
def err(param)
case param
when 'invalidToken'
uri = $uri ; uri.query = "access_token=A11Y0uRb@$3RBe1onG2Us"
response = Net::HTTP.post_form(uri, {'address' => $address, 'message' => $message})
when 'token'
uri = $uri ; uri.query = ""
response = Net::HTTP.post_form(uri, {'address' => $address, 'message' => $message})
when 'address'
uri = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/7117/requests") ; uri.query = "access_token=#{$accessToken}"
response = Net::HTTP.post_form(uri, {'message' => $message})
when 'message'
uri = $uri ; uri.query = "access_token=#{$accessToken}"
response = Net::HTTP.post_form($uri, {'address' => $address})
when 'invalidSub'
response = Net::HTTP.post_form($uri, {'address' => 9062058446, 'message' => $message})
when 'nil_message'
response = Net::HTTP.post_form($uri, {'address' => $address, 'message' => ''})
when 'extended_err'
uri = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/711712345678/requests")
uri.query = $uri.query
response = Net::HTTP.post_form(uri, {'address' => $address, 'message' => $message})
###new tests
when 'chargingLength'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'amount' => "0.00", 'endUserId', => $address,'referenceCode' => 3822100000000001,
'transactionOperationStatus' => 'charged'})
when 'invalidRate'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'amount' => "0.50", 'endUserId', => $address,'referenceCode' => 3822100000000001,
'transactionOperationStatus' => 'charged'})
when 'chargingToken'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'amount' => "0.50", 'endUserId', => $address,'referenceCode' => 3822100000000001,
'transactionOperationStatus' => 'charged'})
when 'insuff_globe'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'amount' => "0.50", 'endUserId', => $insuff_globe,'referenceCode' => 3822100000000001,
'transactionOperationStatus' => 'charged'})
when 'insuff_tm'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'amount' => "0.50", 'endUserId', => $insuff_tm, 'referenceCode' => 3822100000000001,
'transactionOperationStatus' => 'charged'})
##LBS
when 'lbs_address'
when 'lbs_token'
when 'lbs_invalidToken'
when 'lbs_turnedOff'
##raven
when 'raven_invalid'
end
parseBody(response)
end
def err_bp(param)
case param
when 'invalidIdSecret'
uri = $uri ; uri.query = "app_id=A11Y0uRb@$3&app_secret=RBe1onG2Us"
response = Net::HTTP.post_form(uri, {'address' => $address, 'message' => $message, 'passphrase' => $passphrase})
when 'idSecret'
uri = $uri ; uri.query = ""
response = Net::HTTP.post_form(uri, {'address' => $address, 'message' => $message, 'passphrase' => $passphrase})
when 'address'
response = Net::HTTP.post_form($uri, {'message' => $message, 'passphrase' => $passphrase})
when 'message'
response = Net::HTTP.post_form($uri, {'address' => $address, 'passphrase' => $passphrase})
when 'nil_message'
uri = $uriBp ; uri.query = $uriBp.query
response = Net::HTTP.post_form(uri, {'address' => $address, 'message' => '', 'passphrase' => $passphrase})
when 'extended_err'
uri = URI.parse("http://devapi.globelabs.com.ph/smsmessaging/v1/outbound/711712345678/requests")
uri.query = $uri.query
response = Net::HTTP.post_form(uri, {'address' => $address, 'message' => $message, 'passphrase' => $passphrase})
when 'endUserId'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'amount' => "0.00", 'referenceCode' => $increment,
'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'endUserId'
response = Net::HTTP.post_form($uricBp, {'description' => 'desc', 'amount' => "0.00", 'referenceCode' => $increment,
'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'amount'
response = Net::HTTP.post_form($uricBp, {'description' => 'desc', 'endUserId' => $address, 'referenceCode' => $increment,
'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'amount'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'endUserId' => $address, 'referenceCode' => $increment,
'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'referenceCode'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'endUserId' => $address,
'amount' => "0.00", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'referenceCode'
response = Net::HTTP.post_form($uricBp, {'description' => 'desc', 'endUserId' => $address,
'amount' => "0.00", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'invalidRate'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'endUserId' => $address, 'referenceCode' => $increment,
'amount' => "0.50", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'invalidRate'
response = Net::HTTP.post_form($uricBp, {'description' => 'desc', 'endUserId' => $address, 'referenceCode' => $increment,
'amount' => "0.50", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'chargingLength'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'endUserId' => $address, 'referenceCode' => $increment,
'amount' => "0.00", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'chargingLength'
response = Net::HTTP.post_form($uricBp, {'description' => 'desc', 'endUserId' => $address, 'referenceCode' => $increment,
'amount' => "0.00", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'chargingFormat'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'endUserId' => $address, 'referenceCode' => $increment,
'amount' => "0.00", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'chargingFormat'
response = Net::HTTP.post_form($uricBp, {'description' => 'desc', 'endUserId' => $address, 'referenceCode' => $increment,
'amount' => "0.00", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'insuff_globe'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'endUserId' => $insuff_globe, 'referenceCode' => $increment,
'amount' => "0.00", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'insuff_globe'
response = Net::HTTP.post_form($uricBp, {'description' => 'desc', 'endUserId' => $insuff_globe, 'referenceCode' => $increment,
'amount' => "0.00", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'insuff_tm'
response = Net::HTTP.post_form($uric, {'description' => 'desc', 'endUserId' => $insuff_tm, 'referenceCode' => $increment,
'amount' => "0.00", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
when 'insuff_tm'
response = Net::HTTP.post_form($uricBp, {'description' => 'desc', 'endUserId' => $insuff_tm, 'referenceCode' => $increment,
'amount' => "0.00", 'transactionOperationStatus' => 'charged', 'passphrase' => ''})
end
parseBody(response)
end
def charge(param)
case param
when 'ok'
#filler
end
Net::HTTP.post_form($uric, {'description' => 'desc',
'endUserId' => $address, 'amount' => "0.00", 'referenceCode' => $increment1,
'transactionOperationStatus' => 'charged'})
end
def charge_bp(param)
case param
when 'ok'
#filler
end
Net::HTTP.post_form($uricBp, {'description' => 'desc',
'endUserId' => $address, 'amount' => "0.00", 'referenceCode' => $increment2,
'transactionOperationStatus' => 'charged', 'passphrase' => $passphrase})
end
##LBS
def lbs(param)
case param
when 'ok'
res = HTTParty.get("http://devapi.globelabs.com.ph/location/v1/queries/location?access_token=#{token}address=#{address}&requestedAccuracy=100")
res.body
end
end
##raven
def raven(param)
case param
when 'ok'
res = HTTParty.get("http://devapi.globelabs.com.ph/location/v1/queries/bal?access_token=#{token}&address=#{token}")
end
end
def parseBody(response)
JSON.parse(response.body)['error']
end
# LOOK SON!!! AN AMATEUR!!!!!!
# ─────────────────────────▄▀▄
# ─────────────────────────█─█
# ─────────────────────────█─█
# ─────────────────────────█─█
# ─────────────────────────█─█
# ─────────────────────────█─█
# ─────────────────────────█─▀█▀█▄
# ─────────────────────────█──█──█
# ─────────────────────────█▄▄█──▀█
# ────────────────────────▄█──▄█▄─▀█
# ────────────────────────█─▄█─█─█─█
# ────────────────────────█──█─█─█─█
# ────────────────────────█──█─█─█─█
# ────▄█▄──▄█▄────────────█──▀▀█─█─█
# ──▄█████████────────────▀█───█─█▄▀
# ─▄███████████────────────██──▀▀─█
# ▄█████████████────────────█─────█
# ██████████───▀▀█▄─────────▀█────█
# ████████───▀▀▀──█──────────█────█
# ██████───────██─▀█─────────█────█
# ████──▄──────────▀█────────█────█
# ███──█──────▀▀█───▀█───────█────█
# ███─▀─██──────█────▀█──────█────█
# ███─────────────────▀█─────█────█
# ███──────────────────█─────█────█
# ███─────────────▄▀───█─────█────█
# ████─────────▄▄██────█▄────█────█
# ████────────██████────█────█────█
# █████────█──███████▀──█───▄█▄▄▄▄█
# ██▀▀██────▀─██▄──▄█───█───█─────█
# ██▄──────────██████───█───█─────█
# ─██▄────────────▄▄────█───█─────█
# ─███████─────────────▄█───█─────█
# ──██████─────────────█───█▀─────█
# ──▄███████▄─────────▄█──█▀──────█
# ─▄█─────▄▀▀▀█───────█───█───────█
# ▄█────────█──█────▄███▀▀▀▀──────█
# █──▄▀▀────────█──▄▀──█──────────█
# █────█─────────█─────█──────────█
# █────────▀█────█─────█─────────██
# █───────────────█──▄█▀─────────█
# █──────────██───█▀▀▀───────────█
# █───────────────█──────────────█
# █▄─────────────██──────────────█
# ─█▄────────────█───────────────█
# ──██▄────────▄███▀▀▀▀▀▄────────█
# ─█▀─▀█▄────────▀█──────▀▄──────█
# ─█────▀▀▀▀▄─────█────────▀─────█
# ─█─────────▀▄──▀───────────────█ | 42.053292 | 152 | 0.58196 |
e840e4f7cfc1d5a9c4f7880a579f36183a5f34df | 1,913 | class GoogleCalendar
def initialize(config, logger)
@config = config
@key = Google::APIClient::PKCS12.load_key(@config['google']['key_file'], @config['google']['key_secret'])
@client = Google::APIClient.new(application_name: "Huginn", application_version: "0.0.1")
@client.retries = 2
@logger ||= logger
@calendar = @client.discovered_api('calendar','v3')
@logger.info("Setup")
@logger.debug @calendar.inspect
end
def auth_as
@client.authorization = Signet::OAuth2::Client.new({
token_credential_uri: 'https://accounts.google.com/o/oauth2/token',
audience: 'https://accounts.google.com/o/oauth2/token',
scope: 'https://www.googleapis.com/auth/calendar',
issuer: @config['google']['service_account_email'],
signing_key: @key
});
@client.authorization.fetch_access_token!
end
# who - String: email of user to add event
# details - JSON String: see https://developers.google.com/google-apps/calendar/v3/reference/events/insert
def publish_as(who, details)
auth_as
@logger.info("Attempting to create event for " + who)
@logger.debug details.to_yaml
ret = @client.execute(
api_method: @calendar.events.insert,
parameters: {'calendarId' => who, 'sendNotifications' => true},
body: details.to_json,
headers: {'Content-Type' => 'application/json'}
)
@logger.debug ret.to_yaml
ret
end
def events_as(who, date)
auth_as
date ||= Date.today
@logger.info("Attempting to receive events for "+who)
@logger.debug details.to_yaml
ret = @client.execute(
api_method: @calendar.events.list,
parameters: {'calendarId' => who, 'sendNotifications' => true},
body: details.to_json,
headers: {'Content-Type' => 'application/json'}
)
@logger.debug ret.to_yaml
ret
end
end | 30.365079 | 109 | 0.64506 |
e8d5ca819f6a7ec3259026aabd2f5a81ec3d9576 | 652 | class AddReferenceToProductLists < ActiveRecord::Migration
def up
add_column :stall_product_lists, :reference, :string
add_index :stall_product_lists, :reference, unique: true
# Migrate all references stored in the JSON data columns to the new
# reference column
ProductList.update_all("reference = data->>'reference'")
# Remove all reference keys in the JSON data columns
# This is not supported before PG 4.5 so we avoid using it
# ProductList.update_all("data = (data - 'reference')")
end
def down
remove_index :stall_product_lists, :reference
remove_column :stall_product_lists, :reference
end
end
| 34.315789 | 71 | 0.73773 |
394488034e93c1029f4b4cdd71fdc44ee62c7765 | 2,337 | class LoggedException < ActiveRecord::Base
class << self
def create_from_exception(controller, exception, data)
message = exception.message.inspect
message << "\n* Extra Data\n\n#{data}" unless data.blank?
create! \
:exception_class => exception.class.name,
:controller_name => controller.controller_name,
:action_name => controller.action_name,
:message => message,
:backtrace => exception.backtrace,
:request => controller.request
end
def find_exception_class_names
connection.select_values "SELECT DISTINCT exception_class FROM #{table_name} ORDER BY exception_class"
end
def find_exception_controllers_and_actions
find(:all, :select => "DISTINCT controller_name, action_name", :order => "controller_name, action_name").collect(&:controller_action)
end
def host_name
`hostname -s`.chomp
end
end
def backtrace=(backtrace)
backtrace = sanitize_backtrace(backtrace) * "\n" unless backtrace.is_a?(String)
write_attribute :backtrace, backtrace
end
def request=(request)
if request.is_a?(String)
write_attribute :request, request
else
max = request.env.keys.max { |a,b| a.length <=> b.length }
env = request.env.keys.sort.inject [] do |env, key|
env << '* ' + ("%-*s: %s" % [max.length, key, request.env[key].to_s.strip])
end
write_attribute(:environment, (env << "* Process: #{$$}" << "* Server : #{self.class.host_name}") * "\n")
write_attribute(:request, [
"* URL:#{" #{request.method.to_s.upcase}" unless request.get?} #{request.protocol}#{request.env["HTTP_HOST"]}#{request.request_uri}",
"* Format: #{request.format.to_s}",
"* Parameters: #{request.parameters.inspect}",
"* Rails Root: #{rails_root}"
] * "\n")
end
end
def controller_action
@controller_action ||= "#{controller_name.camelcase}/#{action_name}"
end
private
@@rails_root = Pathname.new(RAILS_ROOT).cleanpath.to_s
@@backtrace_regex = /^#{Regexp.escape(@@rails_root)}/
def sanitize_backtrace(trace)
trace.collect { |line| Pathname.new(line.gsub(@@backtrace_regex, "[RAILS_ROOT]")).cleanpath.to_s }
end
def rails_root
@@rails_root
end
end | 34.880597 | 141 | 0.638425 |
91ad88928ff381473de1d59a7e9db4acb851fb80 | 5,977 | module Watir
class Capabilities
attr_reader :options
def initialize(browser, options = {})
@options = options.dup
Watir.logger.info "Creating Browser instance of #{browser} with user provided options: #{@options.inspect}"
@browser = if browser == :remote && @options.key?(:browser)
@options.delete(:browser)
elsif browser == :remote && @options.key?(:desired_capabilities)
@options[:desired_capabilities].browser_name.to_sym
else
browser.to_sym
end
@selenium_browser = browser == :remote || options[:url] ? :remote : browser
@selenium_opts = {}
end
def to_args
[@selenium_browser, process_arguments]
end
private
def process_arguments
url = @options.delete(:url)
@selenium_opts[:url] = url if url
create_http_client
@selenium_opts[:port] = @options.delete(:port) if @options.key?(:port)
@selenium_opts[:driver_opts] = @options.delete(:driver_opts) if @options.key?(:driver_opts)
@selenium_opts[:listener] = @options.delete(:listener) if @options.key?(:listener)
process_browser_options
process_capabilities
Watir.logger.info "Creating Browser instance with Watir processed options: #{@selenium_opts.inspect}"
@selenium_opts
end
def create_http_client
client_timeout = @options.delete(:client_timeout)
open_timeout = @options.delete(:open_timeout)
read_timeout = @options.delete(:read_timeout)
http_client = @options.delete(:http_client)
%i[open_timeout read_timeout client_timeout].each do |t|
next if http_client.nil? || !respond_to?(t)
msg = "You can pass #{t} value directly into Watir::Browser opt without needing to use :http_client"
Watir.logger.warn msg, ids: %i[http_client use_capabilities]
end
http_client ||= Selenium::WebDriver::Remote::Http::Default.new
http_client.timeout = client_timeout if client_timeout
http_client.open_timeout = open_timeout if open_timeout
http_client.read_timeout = read_timeout if read_timeout
@selenium_opts[:http_client] = http_client
end
# TODO: - this will get addressed with Capabilities Update
# rubocop:disable Metrics/AbcSize
# rubocop:disable Metrics/MethodLength
# rubocop:disable Metrics/PerceivedComplexity:
# rubocop:disable Metrics/CyclomaticComplexity::
def process_browser_options
browser_options = @options.delete(:options) || {}
case @selenium_browser
when :chrome
if @options.key?(:args) || @options.key?(:switches)
browser_options ||= {}
browser_options[:args] = (@options.delete(:args) || @options.delete(:switches)).dup
end
if @options.delete(:headless)
browser_options ||= {}
browser_options[:args] ||= []
browser_options[:args] += ['--headless', '--disable-gpu']
end
@selenium_opts[:options] = browser_options if browser_options.is_a? Selenium::WebDriver::Chrome::Options
@selenium_opts[:options] ||= Selenium::WebDriver::Chrome::Options.new(browser_options)
when :firefox
profile = @options.delete(:profile)
if browser_options.is_a? Selenium::WebDriver::Firefox::Options
@selenium_opts[:options] = browser_options
if profile
msg = 'Initializing Browser with both :profile and :option', ':profile as a key inside :option'
Watir.logger.deprecate msg, ids: [:firefox_profile]
end
end
if @options.delete(:headless)
browser_options ||= {}
browser_options[:args] ||= []
browser_options[:args] += ['--headless']
end
@selenium_opts[:options] ||= Selenium::WebDriver::Firefox::Options.new(browser_options)
@selenium_opts[:options].profile = profile if profile
when :safari
Selenium::WebDriver::Safari.technology_preview! if @options.delete(:technology_preview)
when :remote
if @browser == :chrome && @options.delete(:headless)
args = @options.delete(:args) || @options.delete(:switches) || []
@options['chromeOptions'] = {'args' => args + ['--headless', '--disable-gpu']}
end
if @browser == :firefox && @options.delete(:headless)
args = @options.delete(:args) || @options.delete(:switches) || []
@options[Selenium::WebDriver::Firefox::Options::KEY] = {'args' => args + ['--headless']}
end
if @browser == :safari && @options.delete(:technology_preview)
@options['safari.options'] = {'technologyPreview' => true}
end
when :ie
if @options.key?(:args)
browser_options ||= {}
browser_options[:args] = @options.delete(:args).dup
end
unless browser_options.is_a? Selenium::WebDriver::IE::Options
ie_caps = browser_options.select { |k| Selenium::WebDriver::IE::Options::CAPABILITIES.include?(k) }
browser_options = Selenium::WebDriver::IE::Options.new(browser_options)
ie_caps.each { |k, v| browser_options.add_option(k, v) }
end
@selenium_opts[:options] = browser_options
end
end
# rubocop:enable Metrics/AbcSize
# rubocop:enable Metrics/MethodLength
# rubocop:enable Metrics/PerceivedComplexity:
# rubocop:enable Metrics/CyclomaticComplexity::
def process_capabilities
caps = @options.delete(:desired_capabilities)
if caps
msg = 'You can pass values directly into Watir::Browser opt without needing to use :desired_capabilities'
Watir.logger.warn msg,
ids: [:use_capabilities]
@selenium_opts.merge!(@options)
else
caps = Selenium::WebDriver::Remote::Capabilities.send @browser, @options
end
@selenium_opts[:desired_capabilities] = caps
end
end
end
| 39.846667 | 113 | 0.641292 |
79a647f8bb2613c18b3bceb4c2232ffc9971db9d | 22,767 | require 'spec_helper'
require 'request_spec_shared_examples'
RSpec.describe 'v3 service bindings' do
let(:app_model) { VCAP::CloudController::AppModel.make }
let(:space) { app_model.space }
let(:user) { make_developer_for_space(space) }
let(:user_headers) { headers_for(user, user_name: user_name) }
let(:user_name) { 'room' }
let(:rails_logger) { instance_double(ActiveSupport::Logger, info: nil) }
before do
allow(ActiveSupport::Logger).to receive(:new).and_return(rails_logger)
allow(VCAP::CloudController::TelemetryLogger).to receive(:v3_emit).and_call_original
VCAP::CloudController::TelemetryLogger.init('fake-log-path')
end
describe 'POST /v3/service_bindings' do
context 'managed service instance' do
let(:service_instance) { VCAP::CloudController::ManagedServiceInstance.make(space: space, name: 'service-instance-name') }
before do
allow(VCAP::Services::ServiceBrokers::V2::Client).to receive(:new) do |*args, **kwargs, &block|
fb = FakeServiceBrokerV2Client.new(*args, **kwargs, &block)
fb.credentials = { 'username' => 'managed_username' }
fb.syslog_drain_url = 'syslog://mydrain.example.com'
fb.volume_mounts = [{ 'stuff' => 'thing', 'container_dir' => 'some-path' }]
fb
end
end
it 'creates a service binding' do
request_body = {
type: 'app',
data: { parameters: { potato: 'tomato' } },
relationships: {
app: {
data: {
guid: app_model.guid
}
},
service_instance: {
data: {
guid: service_instance.guid
}
},
}
}.to_json
post '/v3/service_bindings', request_body, user_headers
parsed_response = MultiJson.load(last_response.body)
guid = parsed_response['guid']
expected_response = {
'guid' => guid,
'type' => 'app',
'data' => {
'binding_name' => nil,
'credentials' => {
'username' => 'managed_username'
},
'instance_name' => 'service-instance-name',
'name' => 'service-instance-name',
'syslog_drain_url' => 'syslog://mydrain.example.com',
'volume_mounts' => [
{
'container_dir' => 'some-path',
}
]
},
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/service_bindings/#{guid}"
},
'service_instance' => {
'href' => "#{link_prefix}/v2/service_instances/#{service_instance.guid}"
},
'app' => {
'href' => "#{link_prefix}/v3/apps/#{app_model.guid}"
}
}
}
expect(last_response.status).to eq(201), last_response.body
expect(parsed_response).to be_a_response_like(expected_response)
expect(VCAP::CloudController::ServiceBinding.find(guid: guid)).to be_present
event = VCAP::CloudController::Event.last
expect(event.values).to match(
hash_including({
type: 'audit.service_binding.create',
actee: guid,
actee_type: 'service_binding',
actee_name: '',
actor: user.guid,
actor_type: 'user',
actor_username: user_name,
space_guid: space.guid,
organization_guid: space.organization.guid
})
)
expect(event.metadata).to eq({
'request' => {
'type' => 'app',
'relationships' => {
'app' => {
'data' => {
'guid' => app_model.guid,
},
},
'service_instance' => {
'data' => {
'guid' => service_instance.guid,
},
},
},
'data' => '[PRIVATE DATA HIDDEN]'
}
})
end
context 'telemetry' do
it 'should log the required fields when a service is bound' do
Timecop.freeze do
request_body = {
type: 'app',
data: { parameters: { potato: 'tomato' } },
relationships: {
app: {
data: {
guid: app_model.guid
}
},
service_instance: {
data: {
guid: service_instance.guid
}
},
}
}.to_json
post '/v3/service_bindings', request_body, user_headers
expected_json = {
'telemetry-source' => 'cloud_controller_ng',
'telemetry-time' => Time.now.to_datetime.rfc3339,
'bind-service' => {
'api-version' => 'v3',
'service-id' => Digest::SHA256.hexdigest(service_instance.service_plan.service.guid),
'service-instance-id' => Digest::SHA256.hexdigest(service_instance.guid),
'app-id' => Digest::SHA256.hexdigest(app_model.guid),
'user-id' => Digest::SHA256.hexdigest(user.guid),
}
}
expect(last_response.status).to eq(201), last_response.body
expect(rails_logger).to have_received(:info).with(JSON.generate(expected_json))
end
end
end
end
context 'user provided service instance' do
let(:service_instance) do
VCAP::CloudController::UserProvidedServiceInstance.make(
space: space,
credentials: { 'username': 'user_provided_username' },
syslog_drain_url: 'syslog://drain.url.com',
name: 'service-instance-name'
)
end
it 'creates a service binding' do
request_body = {
type: 'app',
relationships: {
app: {
data: { guid: app_model.guid }
},
service_instance: {
data: { guid: service_instance.guid }
},
}
}.to_json
post '/v3/service_bindings', request_body, user_headers
parsed_response = MultiJson.load(last_response.body)
guid = parsed_response['guid']
expected_response = {
'guid' => guid,
'type' => 'app',
'data' => {
'binding_name' => nil,
'credentials' => {
'username' => 'user_provided_username'
},
'instance_name' => 'service-instance-name',
'name' => 'service-instance-name',
'syslog_drain_url' => 'syslog://drain.url.com',
'volume_mounts' => []
},
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/service_bindings/#{guid}"
},
'service_instance' => {
'href' => "#{link_prefix}/v2/service_instances/#{service_instance.guid}"
},
'app' => {
'href' => "#{link_prefix}/v3/apps/#{app_model.guid}"
}
}
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(201)
expect(parsed_response).to be_a_response_like(expected_response)
expect(VCAP::CloudController::ServiceBinding.find(guid: guid)).to be_present
end
context 'telemetry' do
it 'should log the required fields when a service is bound' do
Timecop.freeze do
request_body = {
type: 'app',
data: { parameters: { potato: 'tomato' } },
relationships: {
app: {
data: {
guid: app_model.guid
}
},
service_instance: {
data: {
guid: service_instance.guid
}
},
}
}.to_json
post '/v3/service_bindings', request_body, user_headers
expected_json = {
'telemetry-source' => 'cloud_controller_ng',
'telemetry-time' => Time.now.to_datetime.rfc3339,
'bind-service' => {
'api-version' => 'v3',
'service-id' => Digest::SHA256.hexdigest('user-provided'),
'service-instance-id' => Digest::SHA256.hexdigest(service_instance.guid),
'app-id' => Digest::SHA256.hexdigest(app_model.guid),
'user-id' => Digest::SHA256.hexdigest(user.guid),
}
}
expect(last_response.status).to eq(201), last_response.body
expect(rails_logger).to have_received(:info).with(JSON.generate(expected_json))
end
end
end
end
end
describe 'DELETE /v3/service_bindings/:guid' do
let(:service_binding) { VCAP::CloudController::ServiceBinding.make(service_instance: service_instance) }
context 'managed service instance' do
let(:service_instance) { VCAP::CloudController::ManagedServiceInstance.make(space: space) }
before do
allow(VCAP::Services::ServiceBrokers::V2::Client).to receive(:new) do |*args, **kwargs, &block|
FakeServiceBrokerV2Client.new(*args, **kwargs, &block)
end
end
it 'deletes the service binding and returns a 204' do
delete "/v3/service_bindings/#{service_binding.guid}", nil, user_headers
expect(last_response.status).to eq(204)
expect(service_binding.exists?).to be_falsey
event = VCAP::CloudController::Event.last
expect(event.type).to eq('audit.service_binding.delete')
expect(event.actee).to eq(service_binding.guid)
expect(event.actee_type).to eq('service_binding')
expect(event.actee_name).to eq('')
expect(event.actor).to eq(user.guid)
expect(event.actor_type).to eq('user')
expect(event.actor_username).to eq(user_name)
expect(event.space_guid).to eq(space.guid)
expect(event.organization_guid).to eq(space.organization.guid)
expect(event.metadata).to eq(
'request' => {
'app_guid' => service_binding.app_guid,
'service_instance_guid' => service_binding.service_instance_guid,
}
)
end
end
context 'user provided service instance' do
let(:service_instance) { VCAP::CloudController::UserProvidedServiceInstance.make(space: space) }
it 'deletes the service binding and returns a 204' do
delete "/v3/service_bindings/#{service_binding.guid}", nil, user_headers
expect(last_response.status).to eq(204)
expect(service_binding.exists?).to be_falsey
event = VCAP::CloudController::Event.last
expect(event.type).to eq('audit.service_binding.delete')
expect(event.actee).to eq(service_binding.guid)
expect(event.actee_type).to eq('service_binding')
expect(event.actee_name).to eq('')
expect(event.actor).to eq(user.guid)
expect(event.actor_type).to eq('user')
expect(event.actor_username).to eq(user_name)
expect(event.space_guid).to eq(space.guid)
expect(event.organization_guid).to eq(space.organization.guid)
expect(event.metadata).to eq(
'request' => {
'app_guid' => service_binding.app_guid,
'service_instance_guid' => service_binding.service_instance_guid,
}
)
end
end
end
describe 'GET /v3/service_bindings/:guid' do
let(:service_instance) { VCAP::CloudController::ManagedServiceInstance.make(space: space, name: 'service-instance-name') }
let(:service_binding) do
VCAP::CloudController::ServiceBinding.make(
service_instance: service_instance,
app: app_model,
credentials: { 'username' => 'managed_username' },
name: 'binding-name',
syslog_drain_url: 'syslog://mydrain.example.com',
volume_mounts: [{ 'stuff' => 'thing', 'container_dir' => 'some-path' }],
)
end
it 'returns a service_binding' do
get "/v3/service_bindings/#{service_binding.guid}", nil, user_headers
parsed_response = MultiJson.load(last_response.body)
expected_response = {
'guid' => service_binding.guid,
'type' => 'app',
'data' => {
'binding_name' => 'binding-name',
'credentials' => {
'username' => 'managed_username'
},
'instance_name' => 'service-instance-name',
'name' => 'binding-name',
'syslog_drain_url' => 'syslog://mydrain.example.com',
'volume_mounts' => [{ 'container_dir' => 'some-path' }]
},
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/service_bindings/#{service_binding.guid}"
},
'service_instance' => {
'href' => "#{link_prefix}/v2/service_instances/#{service_instance.guid}"
},
'app' => {
'href' => "#{link_prefix}/v3/apps/#{app_model.guid}"
}
}
}
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
it 'redacts credentials for space auditors' do
auditor = VCAP::CloudController::User.make
space.organization.add_user(auditor)
space.add_auditor(auditor)
get "/v3/service_bindings/#{service_binding.guid}", nil, headers_for(auditor)
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['data']['credentials']).to eq({ 'redacted_message' => '[PRIVATE DATA HIDDEN]' })
end
end
describe 'GET /v3/service_bindings' do
let(:service_instance1) { VCAP::CloudController::ManagedServiceInstance.make(space: space, name: 'service-instance-1') }
let(:service_instance2) { VCAP::CloudController::ManagedServiceInstance.make(space: space, name: 'service-instance-2') }
let(:service_instance3) { VCAP::CloudController::ManagedServiceInstance.make(space: space, name: 'service-instance-3') }
let!(:service_binding1) do
VCAP::CloudController::ServiceBinding.make(
service_instance: service_instance1,
app: app_model,
credentials: { 'binding1' => 'shtuff' },
syslog_drain_url: 'syslog://binding1.example.com',
volume_mounts: [{ 'stuff' => 'thing', 'container_dir' => 'some-path' }],
)
end
let!(:service_binding2) do
VCAP::CloudController::ServiceBinding.make(
service_instance: service_instance2,
app: app_model,
credentials: { 'binding2' => 'things' },
syslog_drain_url: 'syslog://binding2.example.com',
volume_mounts: [{ 'stuff2' => 'thing2', 'container_dir' => 'some-path' }],
)
end
before { VCAP::CloudController::ServiceBinding.make(service_instance: service_instance3, app: app_model) }
it_behaves_like 'request_spec_shared_examples.rb list query endpoint' do
let(:request) { 'v3/service_bindings' }
let(:message) { VCAP::CloudController::ServiceBindingsListMessage }
let(:user_header) { headers_for(user) }
let(:params) do
{
service_instance_guids: ['foo', 'bar'],
app_guids: ['foo', 'bar'],
per_page: '10',
page: 2,
order_by: 'updated_at',
}
end
end
it 'returns a paginated list of service_bindings' do
get '/v3/service_bindings?per_page=2', nil, user_headers
expected_response = {
'pagination' => {
'total_results' => 3,
'total_pages' => 2,
'first' => { 'href' => "#{link_prefix}/v3/service_bindings?page=1&per_page=2" },
'last' => { 'href' => "#{link_prefix}/v3/service_bindings?page=2&per_page=2" },
'next' => { 'href' => "#{link_prefix}/v3/service_bindings?page=2&per_page=2" },
'previous' => nil,
},
'resources' => [
{
'guid' => service_binding1.guid,
'type' => 'app',
'data' => {
'binding_name' => nil,
'instance_name' => 'service-instance-1',
'name' => 'service-instance-1',
'credentials' => {
'redacted_message' => '[PRIVATE DATA HIDDEN IN LISTS]'
},
'syslog_drain_url' => 'syslog://binding1.example.com',
'volume_mounts' => [{ 'container_dir' => 'some-path' }]
},
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/service_bindings/#{service_binding1.guid}"
},
'service_instance' => {
'href' => "#{link_prefix}/v2/service_instances/#{service_instance1.guid}"
},
'app' => {
'href' => "#{link_prefix}/v3/apps/#{app_model.guid}"
}
}
},
{
'guid' => service_binding2.guid,
'type' => 'app',
'data' => {
'binding_name' => nil,
'instance_name' => 'service-instance-2',
'name' => 'service-instance-2',
'credentials' => {
'redacted_message' => '[PRIVATE DATA HIDDEN IN LISTS]'
},
'syslog_drain_url' => 'syslog://binding2.example.com',
'volume_mounts' => [{ 'container_dir' => 'some-path' }]
},
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/service_bindings/#{service_binding2.guid}"
},
'service_instance' => {
'href' => "#{link_prefix}/v2/service_instances/#{service_instance2.guid}"
},
'app' => {
'href' => "#{link_prefix}/v3/apps/#{app_model.guid}"
}
}
}
]
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
context 'faceted list' do
context 'by app_guids' do
let(:app_model2) { VCAP::CloudController::AppModel.make(space: space) }
let!(:another_apps_service_binding) do
VCAP::CloudController::ServiceBinding.make(service_instance: service_instance1,
app: app_model2,
credentials: { 'utako' => 'secret' },
syslog_drain_url: 'syslog://example.com',
volume_mounts: [{ 'stuff' => 'thing', 'container_dir' => 'some-path' }],
)
end
let(:app_model3) { VCAP::CloudController::AppModel.make(space: space) }
let!(:another_apps_service_binding2) do
VCAP::CloudController::ServiceBinding.make(service_instance: service_instance1,
app: app_model3,
credentials: { 'amelia' => 'apples' },
syslog_drain_url: 'www.neopets.com',
volume_mounts: [{ 'stuff2' => 'thing2', 'container_dir' => 'some-path' }],
)
end
it 'returns only the matching service bindings' do
get "/v3/service_bindings?per_page=2&app_guids=#{app_model2.guid},#{app_model3.guid}", nil, user_headers
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].map { |r| r['guid'] }).to eq([another_apps_service_binding.guid, another_apps_service_binding2.guid])
expect(parsed_response['pagination']).to be_a_response_like(
{
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/service_bindings?app_guids=#{app_model2.guid}%2C#{app_model3.guid}&page=1&per_page=2" },
'last' => { 'href' => "#{link_prefix}/v3/service_bindings?app_guids=#{app_model2.guid}%2C#{app_model3.guid}&page=1&per_page=2" },
'next' => nil,
'previous' => nil,
}
)
end
end
context 'by service instance guids' do
it 'returns only the matching service bindings' do
get "/v3/service_bindings?per_page=2&service_instance_guids=#{service_instance1.guid},#{service_instance2.guid}", nil, user_headers
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].map { |r| r['guid'] }).to eq([service_binding1.guid, service_binding2.guid])
expect(parsed_response['pagination']).to be_a_response_like(
{
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/service_bindings?page=1&per_page=2&service_instance_guids=#{service_instance1.guid}%2C#{service_instance2.guid}" },
'last' => { 'href' => "#{link_prefix}/v3/service_bindings?page=1&per_page=2&service_instance_guids=#{service_instance1.guid}%2C#{service_instance2.guid}" },
'next' => nil,
'previous' => nil,
}
)
end
end
end
end
end
| 39.321244 | 171 | 0.526552 |
112b6c94437d301bca0da2163cf85e95a9b24c75 | 367 | module Fog
module Hadoop
class HDFS
class Namenodes < Fog::Collection
model Fog::Hadoop::HDFS::Namenode
def all
namenodes = []
service.list_namenodes.each do |namenode|
namenodes << Fog::Hadoop::HDFS::Namenode.parse(namenode)
end
load(namenodes)
end
end
end
end
end
| 20.388889 | 68 | 0.564033 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.