hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
182bf4fce59427c3144711e3b5953dd746337c0c | 12,523 | require 'spec_helper'
describe VCR::Configuration do
describe '#cassette_library_dir=' do
let(:tmp_dir) { VCR::SPEC_ROOT + '/../tmp/cassette_library_dir/new_dir' }
after(:each) { FileUtils.rm_rf tmp_dir }
it 'creates the directory if it does not exist' do
expect { subject.cassette_library_dir = tmp_dir }.to change { File.exist?(tmp_dir) }.from(false).to(true)
end
it 'does not raise an error if given nil' do
expect { subject.cassette_library_dir = nil }.to_not raise_error
end
it 'resolves the given directory to an absolute path, so VCR continues to work even if the current directory changes' do
relative_dir = 'tmp/cassette_library_dir/new_dir'
subject.cassette_library_dir = relative_dir
absolute_dir = File.join(VCR::SPEC_ROOT.sub(/\/spec\z/, ''), relative_dir)
expect(subject.cassette_library_dir).to eq(absolute_dir)
end
end
describe '#default_cassette_options' do
it 'has a hash with some defaults' do
expect(subject.default_cassette_options).to eq({
:match_requests_on => VCR::RequestMatcherRegistry::DEFAULT_MATCHERS,
:allow_unused_http_interactions => true,
:record => :once,
:record_on_error => true,
:serialize_with => :yaml,
:persist_with => :file_system
})
end
it "returns #{VCR::RequestMatcherRegistry::DEFAULT_MATCHERS.inspect} for :match_requests_on when other defaults have been set" do
subject.default_cassette_options = { :record => :none }
expect(subject.default_cassette_options).to include(:match_requests_on => VCR::RequestMatcherRegistry::DEFAULT_MATCHERS)
end
it "returns :once for :record when other defaults have been set" do
subject.default_cassette_options = { :erb => :true }
expect(subject.default_cassette_options).to include(:record => :once)
end
it "allows defaults to be overriden" do
subject.default_cassette_options = { :record => :all }
expect(subject.default_cassette_options).to include(:record => :all)
end
it "allows other keys to be set" do
subject.default_cassette_options = { :re_record_interval => 10 }
expect(subject.default_cassette_options).to include(:re_record_interval => 10)
end
end
describe '#register_request_matcher' do
it 'registers the given request matcher' do
expect {
VCR.request_matchers[:custom]
}.to raise_error(VCR::UnregisteredMatcherError)
matcher_run = false
subject.register_request_matcher(:custom) { |r1, r2| matcher_run = true }
VCR.request_matchers[:custom].matches?(:r1, :r2)
expect(matcher_run).to be true
end
end
describe '#hook_into' do
it 'requires the named library hook' do
expect(subject).to receive(:require).with("vcr/library_hooks/webmock")
expect(subject).to receive(:require).with("vcr/library_hooks/excon")
subject.hook_into :webmock, :excon
end
it 'raises an error for unsupported stubbing libraries' do
expect {
subject.hook_into :unsupported_library
}.to raise_error(ArgumentError, /unsupported_library is not a supported VCR HTTP library hook/i)
end
it 'invokes the after_library_hooks_loaded hooks' do
called = false
subject.after_library_hooks_loaded { called = true }
subject.hook_into :webmock
expect(called).to be true
end
end
describe '#ignore_hosts' do
it 'delegates to the current request_ignorer instance' do
expect(VCR.request_ignorer).to receive(:ignore_hosts).with('example.com', 'example.net')
subject.ignore_hosts 'example.com', 'example.net'
end
end
describe '#unignore_hosts' do
it 'delegates to the current request_ignorer instance' do
expect(VCR.request_ignorer).to receive(:unignore_hosts).with('example.com', 'example.net')
subject.unignore_hosts 'example.com', 'example.net'
end
end
describe '#ignore_localhost=' do
it 'delegates to the current request_ignorer instance' do
expect(VCR.request_ignorer).to receive(:ignore_localhost=).with(true)
subject.ignore_localhost = true
end
end
describe '#ignore_request' do
let(:uri){ URI('http://foo.com') }
it 'registers the given block with the request ignorer' do
block_called = false
subject.ignore_request { |r| block_called = true }
VCR.request_ignorer.ignore?(double(:parsed_uri => uri))
expect(block_called).to be true
end
end
describe '#allow_http_connections_when_no_cassette=' do
[true, false].each do |val|
it "sets the allow_http_connections_when_no_cassette to #{val} when set to #{val}" do
subject.allow_http_connections_when_no_cassette = val
expect(subject.allow_http_connections_when_no_cassette?).to eq(val)
end
end
end
describe "request/configuration interactions", :with_monkey_patches => :webmock do
specify 'the request on the yielded interaction is not typed even though the request given to before_http_request is' do
before_record_req = before_request_req = nil
VCR.configure do |c|
c.before_http_request { |r| before_request_req = r }
c.before_record { |i| before_record_req = i.request }
end
VCR.use_cassette("example") do
::Net::HTTP.get_response(URI("http://localhost:#{VCR::SinatraApp.port}/foo"))
end
expect(before_record_req).not_to respond_to(:type)
expect(before_request_req).to respond_to(:type)
end unless (RUBY_VERSION =~ /^1\.8/ || RUBY_INTERPRETER == :jruby)
specify 'the filter_sensitive_data option works even when it modifies the URL in a way that makes it an invalid URI' do
VCR.configure do |c|
c.filter_sensitive_data('<HOST>') { 'localhost' }
end
2.times do
VCR.use_cassette("example") do
::Net::HTTP.get_response(URI("http://localhost:#{VCR::SinatraApp.port}/foo"))
end
end
end
end
[:before_record, :before_playback].each do |hook_type|
describe "##{hook_type}" do
it 'sets up a tag filter' do
called = false
VCR.configuration.send(hook_type, :my_tag) { called = true }
VCR.configuration.invoke_hook(hook_type, double, double(:tags => []))
expect(called).to be false
VCR.configuration.invoke_hook(hook_type, double, double(:tags => [:my_tag]))
expect(called).to be true
end
end
end
%w[ filter_sensitive_data define_cassette_placeholder ].each do |method|
describe "##{method}" do
let(:interaction) { double('interaction').as_null_object }
before(:each) { allow(interaction).to receive(:filter!) }
it 'adds a before_record hook that replaces the string returned by the block with the given string' do
subject.send(method, 'foo', &lambda { 'bar' })
expect(interaction).to receive(:filter!).with('bar', 'foo')
subject.invoke_hook(:before_record, interaction, double.as_null_object)
end
it 'adds a before_playback hook that replaces the given string with the string returned by the block' do
subject.send(method, 'foo', &lambda { 'bar' })
expect(interaction).to receive(:filter!).with('foo', 'bar')
subject.invoke_hook(:before_playback, interaction, double.as_null_object)
end
it 'tags the before_record hook when given a tag' do
expect(subject).to receive(:before_record).with(:my_tag)
subject.send(method, 'foo', :my_tag) { 'bar' }
end
it 'tags the before_playback hook when given a tag' do
expect(subject).to receive(:before_playback).with(:my_tag)
subject.send(method, 'foo', :my_tag) { 'bar' }
end
it 'yields the interaction to the block for the before_record hook' do
yielded_interaction = nil
subject.send(method, 'foo', &lambda { |i| yielded_interaction = i; 'bar' })
subject.invoke_hook(:before_record, interaction, double.as_null_object)
expect(yielded_interaction).to equal(interaction)
end
it 'yields the interaction to the block for the before_playback hook' do
yielded_interaction = nil
subject.send(method, 'foo', &lambda { |i| yielded_interaction = i; 'bar' })
subject.invoke_hook(:before_playback, interaction, double.as_null_object)
expect(yielded_interaction).to equal(interaction)
end
end
end
describe "#after_http_request" do
let(:raw_request) { VCR::Request.new }
let(:response) { VCR::Response.new }
def request(type)
VCR::Request::Typed.new(raw_request, type)
end
it 'handles symbol request predicate filters properly' do
yielded = false
subject.after_http_request(:stubbed_by_vcr?) { |req| yielded = true }
subject.invoke_hook(:after_http_request, request(:stubbed_by_vcr), response)
expect(yielded).to be true
yielded = false
subject.invoke_hook(:after_http_request, request(:ignored), response)
expect(yielded).to be false
end
end
describe "#cassette_serializers" do
let(:custom_serializer) { double }
it 'allows a custom serializer to be registered' do
expect { subject.cassette_serializers[:custom] }.to raise_error(ArgumentError)
subject.cassette_serializers[:custom] = custom_serializer
expect(subject.cassette_serializers[:custom]).to be(custom_serializer)
end
end
describe "#cassette_persisters" do
let(:custom_persister) { double }
it 'allows a custom persister to be registered' do
expect { subject.cassette_persisters[:custom] }.to raise_error(ArgumentError)
subject.cassette_persisters[:custom] = custom_persister
expect(subject.cassette_persisters[:custom]).to be(custom_persister)
end
end
describe "#uri_parser=" do
let(:custom_parser) { double }
it 'allows a custom uri parser to be set' do
subject.uri_parser = custom_parser
expect(subject.uri_parser).to eq(custom_parser)
end
it "uses Ruby's standard library `URI` as a default" do
expect(subject.uri_parser).to eq(URI)
end
end
describe "#preserve_exact_body_bytes_for?" do
def message_for(body)
double(:body => body)
end
context "default hook" do
it "returns false when there is no current cassette" do
expect(subject.preserve_exact_body_bytes_for?(message_for "string")).to be false
end
it "returns false when the current cassette has been created without the :preserve_exact_body_bytes option" do
VCR.insert_cassette('foo')
expect(subject.preserve_exact_body_bytes_for?(message_for "string")).to be false
end
it 'returns true when the current cassette has been created with the :preserve_exact_body_bytes option' do
VCR.insert_cassette('foo', :preserve_exact_body_bytes => true)
expect(subject.preserve_exact_body_bytes_for?(message_for "string")).to be true
end
end
it "returns true when the configured block returns true" do
subject.preserve_exact_body_bytes { |msg| msg.body == "a" }
expect(subject.preserve_exact_body_bytes_for?(message_for "a")).to be true
expect(subject.preserve_exact_body_bytes_for?(message_for "b")).to be false
end
it "returns true when any of the registered blocks returns true" do
called_hooks = []
subject.preserve_exact_body_bytes { called_hooks << :hook_1; false }
subject.preserve_exact_body_bytes { called_hooks << :hook_2; true }
expect(subject.preserve_exact_body_bytes_for?(message_for "a")).to be true
expect(called_hooks).to eq([:hook_1, :hook_2])
end
it "invokes the configured hook with the http message and the current cassette" do
VCR.use_cassette('example') do |cassette|
expect(cassette).to be_a(VCR::Cassette)
message = double(:message)
yielded_objects = nil
subject.preserve_exact_body_bytes { |a, b| yielded_objects = [a, b] }
subject.preserve_exact_body_bytes_for?(message)
expect(yielded_objects).to eq([message, cassette])
end
end
end
describe "#configure_rspec_metadata!" do
it "only configures the underlying metadata once, no matter how many times it is called" do
expect(VCR::RSpec::Metadata).to receive(:configure!).once
VCR.configure do |c|
c.configure_rspec_metadata!
end
VCR.configure do |c|
c.configure_rspec_metadata!
end
end
end
end
| 38.296636 | 133 | 0.69025 |
870d8b4673f829833ef61b05029d94735ce7f447 | 6,853 | #
# Helper - S3
#
require 'kumogata/template/helper'
def _s3_to_deletion_policy(value)
return "Retain" if value.nil?
case "value"
when "delete"
"Delete"
when "retain"
"Retain"
when "shapshot"
"Snapshot"
else
_valid_values(value, %w( Delete Retain Snapshot ), "Retain")
end
end
def _s3_to_access(value)
return "Private" if value.nil?
case value
when "auth read"
"AuthenticatedRead"
when "aws exec read"
"AwsExecRead"
when "owner read"
"BucketOwnerRead"
when "owner full"
"BucketOwnerFullControl"
when "log delivery write"
"LogDeliveryWrite"
when "private"
"Private"
when "public read"
"PublicRead"
when "public read write"
"PublicReadWrite"
else
value
end
end
def _s3_cors(args)
rules = args[:cors] || []
array = []
rules.each do |rule|
array << _{
AllowedHeaders _array(rule[:headers]) if rule.key? :headers
AllowedMethods _array(rule[:methods])
AllowedOrigins _array(rule[:origins])
ExposedHeaders _array(rule[:exposed_headers]) if rule.key? :exposed_headers
Id rule[:id] if rule.key? :id
MaxAge rule[:max_age] if rule.key? :max_age
}
end
return [] if array.empty?
_{
CorsRules array
}
end
def _s3_lifecycle(args)
rules = args[:lifecycle] || []
status_values = %w( Enabled Disabled )
array = []
rules.each do |rule|
abort_incomplete_multipart_upload = _s3_lifecycle_abort_incomplete_multipart_upload(rule)
noncurrent_transitions = _s3_lifecycle_noncurrent_version_transition(rule)
status = _valid_values(rule[:status], status_values, "Enabled")
transitions = _s3_lifecycle_transition(rule)
array << _{
AbortIncompleteMultipartUpload abort_incomplete_multipart_upload if abort_incomplete_multipart_upload
ExpirationDate rule[:expiration_date] if rule.key? :expiration_date
ExpirationInDays rule[:expiration_in_days] if rule.key? :expiration_in_days
Id rule[:id] if rule.key? :id
NoncurrentVersionExpirationInDays rule[:noncurrent_version_expiration_in_days] if rule.key? :noncurrent_version_expiration_in_days
NoncurrentVersionTransitions noncurrent_transitions unless noncurrent_transitions.empty?
Prefix rule[:prefix] if rule.key? :prefix
Status status
Transitions transitions unless transitions.empty?
}
end
return [] if array.empty?
_{
Rules array
}
end
def _s3_lifecycle_abort_incomplete_multipart_upload(args)
rule = args[:abort_incomplete_multipart_upload]
if rule
_{
DaysAfterInitiation rule[:days]
}
end
end
def _s3_lifecycle_noncurrent_version_transition(args)
transitions = args[:noncurrent_version_transitions] || []
array = []
transitions.each do |transition|
array << _{
StorageClass transition[:storage]
TransitionInDays transition[:in_days]
}
end
array
end
def _s3_lifecycle_transition(args)
transitions = args[:transitions] || []
array = []
transitions.each do |transition|
array << _{
StorageClass transition[:storage]
TransitionDate transition[:date] if transition.key? :date
TransitionInDays transition[:in_days] if transition.key? :in_days
}
end
array
end
def _s3_logging(args)
return "" unless args.key? :logging
logging = args[:logging]
_{
DestinationBucketName logging[:destination]
LogFilePrefix logging[:prefix] || ""
}
end
def _s3_notification(args)
return "" unless args.key? :notification
notification = args[:notification]
lambda = _s3_notification_configuration(notification, :lambda)
queue = _s3_notification_configuration(notification, :queue)
topic = _s3_notification_configuration(notification, :topic)
_{
LambdaConfigurations lambda unless lambda.empty?
QueueConfigurations queue unless queue.empty?
TopicConfigurations topic unless topic.empty?
}
end
def _s3_notification_configuration(args, key)
values = args[key] || []
array = []
values.each do |value|
array << _{
Event value[:event]
Filter _{
S3Key _{
Rules value[:filters].collect{|v|
filter = []
v.each_pair do |kk, vv|
filter << _{
Name kk
Value vv
}
end
filter
}.flatten
}
} if value.key? :filters
case key
when :lambda
Function value[:function]
when :queue
Queue value[:queue]
when :topic
Topic value[:topic]
end
}
end
array
end
def _s3_replication(args)
return "" unless args.key? :replication
replication = args[:replication]
rules = _s3_replication_rules(replication)
_{
Role replication[:role]
Rules rules
}
end
def _s3_replication_rules(args)
rules = args[:rules] || []
array = []
rules.each do |rule|
destination = _s3_replication_rules_destination(rule[:destination])
array << _{
Destination destination
Id rule[:id]
Prefix rule[:prefix]
Status rule[:status]
}
end
array
end
def _s3_replication_rules_destination(args)
_{
Bucket args[:bucket]
StorageClass args[:storage]
}
end
def _s3_versioning(args)
return "" unless args.key? :versioning
versioning = args[:versioning]
status_values = %w( Enabled Disabled )
status = _valid_values(versioning[:status], status_values, "Enabled")
_{
Status status
}
end
def _s3_website(args)
return "" unless args.key? :website
website = args[:website]
redirect = _s3_website_redirect_all_request(website)
routing = _s3_website_routing_rules(website)
_{
ErrorDocument website[:error] || "404.html"
IndexDocument website[:index] || "index.html"
RedirectAllRequestsTo redirect unless redirect.empty?
RoutingRules routing unless routing.empty?
}
end
def _s3_website_redirect_all_request(args)
return "" unless args.key? :redirect
redirect = args[:redirect] || {}
_{
HostName redirect[:hostname]
Protocol _valid_values(redirect[:protocol], %w( http https ), "http")
}
end
def _s3_website_routing_rules(args)
routing = args[:routing] || []
array = []
routing.each do |route|
array << _{
RedirectRule do
redirect = route[:redirect] || {}
HostName redirect[:host] if redirect.key? :host
HttpRedirectCode redirect[:http] if redirect.key? :http
Protocol redirect[:protocol] if redirect.key? :protocol
ReplaceKeyPrefixWith redirect[:replace_key_prefix] if redirect.key? :replace_key_prefix
ReplaceKeyWith redirect[:replace_key_with] if redirect.key? :replace_key_with
end
RoutingRuleCondition do
routing = route[:routing] || {}
HttpErrorCodeReturnedEquals routing[:http]
KeyPrefixEquals routing[:key_prefix]
end
}
end
array
end
| 24.215548 | 136 | 0.684226 |
912c54e6c4f589bd18ccec01fcd2ce6bb8f90fe4 | 1,840 | # frozen_string_literal: true
class CommentsController < BaseController
before_action :set_article, only: [:create, :preview]
def create
options = new_comment_defaults.merge comment_params.to_h
@comment = @article.add_comment(options)
unless current_user.nil? || session[:user_id].nil?
# maybe useless, but who knows ?
@comment.user_id = current_user.id if current_user.id == session[:user_id]
end
remember_author_info_for @comment
partial = "/articles/comment_failed"
partial = "/articles/comment" if recaptcha_ok_for?(@comment) && @comment.save
respond_to do |format|
format.js { render partial }
format.html { redirect_to URI.parse(@article.permalink_url).path }
end
end
def preview
return render plain: "Comments are closed" if @article.comments_closed?
if comment_params[:body].blank?
head :ok
return
end
@comment = @article.comments.build(comment_params)
end
protected
def recaptcha_ok_for?(comment)
use_recaptcha = comment.blog.use_recaptcha
((use_recaptcha && verify_recaptcha(model: comment)) || !use_recaptcha)
end
def new_comment_defaults
{ ip: request.remote_ip,
author: "Anonymous",
user: @current_user,
user_agent: request.env["HTTP_USER_AGENT"],
referrer: request.env["HTTP_REFERER"],
permalink: @article.permalink_url }.stringify_keys
end
def remember_author_info_for(comment)
add_to_cookies(:author, comment.author)
add_to_cookies(:url, comment.url)
add_to_cookies(:gravatar_id, Digest::MD5.hexdigest(comment.email.strip)) if comment.email.present?
end
def set_article
@article = Article.find(params[:article_id])
end
def comment_params
@comment_params ||= params.require(:comment).permit(:body, :author, :email, :url)
end
end
| 27.878788 | 102 | 0.709783 |
1df0ea73a49d95b89fa5bbf76307f390220da887 | 1,552 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Post
include Msf::Post::Windows::Priv
include Msf::Post::Windows::ShadowCopy
def initialize(info={})
super(update_info(info,
'Name' => "Windows Manage Mount Shadow Copy",
'Description' => %q{
This module will attempt to mount a Volume Shadow Copy
on the system. This is based on the VSSOwn Script
originally posted by Tim Tomes and Mark Baggett.
Works on win2k3 and later.
},
'License' => MSF_LICENSE,
'Platform' => ['win'],
'SessionTypes' => ['meterpreter'],
'Author' => ['theLightCosine'],
'References' => [
[ 'URL', 'http://pauldotcom.com/2011/11/safely-dumping-hashes-from-liv.html' ]
]
))
register_options(
[
OptString.new('DEVICE', [ true, 'DeviceObject of Shadowcopy to mount.' ]),
OptString.new('PATH', [ true, 'Path to mount it to.' ])
])
end
def run
unless is_admin?
print_error("This module requires admin privs to run")
return
end
if is_uac_enabled?
print_error("This module requires UAC to be bypassed first")
return
end
unless start_vss
return
end
r = session.sys.process.execute("cmd.exe /C mklink /D #{datastore['DEVICE']} #{datastore['PATH']}", nil, {'Hidden' => true})
end
end
| 28.740741 | 128 | 0.591495 |
b907eb93dacd7d1d2ac8df02b3ca24de9f4dec5f | 276 | class ApplicationHelper::Button::TemplateProvision < ApplicationHelper::Button::Basic
def calculate_properties
super
self[:title] = _("Selected item is not eligible for Provisioning") if disabled?
end
def disabled?
[email protected]_provisioning?
end
end
| 25.090909 | 85 | 0.753623 |
ffcaeb3c8f513b7b6f8ca64ca92f2d31ec6b6f4d | 206 | class CreateJoinTableGameTeam < ActiveRecord::Migration[6.0]
def change
create_join_table :games, :teams do |t|
t.index [:team_id, :game_id]
t.index [:game_id, :team_id]
end
end
end
| 22.888889 | 60 | 0.674757 |
f8314612960d081de707af8a9f4e3160d4f9856e | 974 | require File.dirname(__FILE__) + '/../spec_helper'
describe VimSdk::VmomiSupport do
describe :qualified_wsdl_name do
it "should provide qualified WSDL names for builtin types"
it "should provide qualified WSDL names for array types"
it "should provide qualified WSDL names for object types"
end
describe :wsdl_name do
it "should provide WSDL names for builtin types"
it "should provide WSDL names for array types"
it "should provide WSDL names for object types"
end
describe :guess_wsdl_type do
it "should guess the WSDL type based on an unqualified name"
end
describe :compatible_type do
it "should return itself if the version is compatible"
it "should return a compatible type if this is not available in the current version"
end
describe :wsdl_namespace do
it "should provide the WSDL namespace for a version"
end
describe :version_namespace do
it "should provide the version namespace"
end
end
| 28.647059 | 88 | 0.74538 |
79326699c05db48cbc6d340646e7f9bcff688a04 | 975 | require 'json'
package = JSON.parse(File.read(File.join(__dir__, '..', 'package.json')))
Pod::Spec.new do |s|
s.name = 'EXCamera'
s.version = package['version']
s.summary = package['description']
s.description = package['description']
s.license = package['license']
s.author = package['author']
s.homepage = package['homepage']
s.platform = :ios, '11.0'
s.source = { :git => "https://github.com/expo/expo.git" }
s.dependency 'UMCore'
s.dependency 'ExpoModulesCore'
s.dependency 'UMImageLoaderInterface'
s.dependency 'UMPermissionsInterface'
if !$ExpoUseSources&.include?(package['name']) && ENV['EXPO_USE_SOURCE'].to_i == 0 && File.exist?("#{s.name}.xcframework") && Gem::Version.new(Pod::VERSION) >= Gem::Version.new('1.10.0')
s.source_files = "#{s.name}/**/*.h"
s.vendored_frameworks = "#{s.name}.xcframework"
else
s.source_files = "#{s.name}/**/*.{h,m}"
end
end
| 34.821429 | 188 | 0.612308 |
1139ce9d672e25bbb70d670cfe13fc9630f032e4 | 615 | # frozen_string_literal: true
module Stupidedi
module Versions
module FortyTen
module SegmentDefs
s = Schema
e = ElementDefs
r = ElementReqs
TDS = s::SegmentDef.build(:TDS, "Total Monetary Value Summary",
"Total Monetary Value Summary",
e::E610.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E610.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E610.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E610.simple_use(r::Optional, s::RepeatCount.bounded(1))
)
end
end
end
end | 30.75 | 71 | 0.611382 |
abaf6b0a7a9bf3400ade81a21900a43bad39b85e | 2,009 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2020 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
class Queries::WorkPackages::Filter::SubjectOrIdFilter <
Queries::WorkPackages::Filter::WorkPackageFilter
include Queries::WorkPackages::Filter::OrFilterForWpMixin
CONTAINS_OPERATOR = '~'.freeze
EQUALS_OPERATOR = '='.freeze
FILTERS = [
Queries::WorkPackages::Filter::FilterConfiguration.new(
Queries::WorkPackages::Filter::SubjectFilter,
:subject,
CONTAINS_OPERATOR
),
Queries::WorkPackages::Filter::FilterConfiguration.new(
Queries::WorkPackages::Filter::IdFilter,
:id,
EQUALS_OPERATOR
)
].freeze
def self.key
:subject_or_id
end
def name
:subject_or_id
end
def type
:search
end
def human_name
I18n.t('label_subject_or_id')
end
def filter_configurations
FILTERS
end
end
| 28.295775 | 91 | 0.735689 |
616fa3f2e586a4f0c6f1bc965fd939508f7e8ac4 | 4,579 | # Copyright 2011, Dell
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "utils"
pkg = ""
case node[:platform_family]
when "debian"
pkg = "dhcp3"
package "dhcp3-server"
when "rhel"
pkg = "dhcp"
package "dhcp"
when "suse"
pkg = "dhcp-server"
package "dhcp-server"
end
directory "/etc/dhcp3"
directory "/etc/dhcp3/groups.d"
directory "/etc/dhcp3/subnets.d"
directory "/etc/dhcp3/hosts.d"
file "/etc/dhcp3/groups.d/group_list.conf" do
owner "root"
group "root"
mode 0644
end
file "/etc/dhcp3/subnets.d/subnet_list.conf" do
owner "root"
group "root"
mode 0644
end
file "/etc/dhcp3/hosts.d/host_list.conf" do
owner "root"
group "root"
mode 0644
end
bash "build omapi key" do
code <<-EOH
cd /etc/dhcp3
dnssec-keygen -r /dev/urandom -a HMAC-MD5 -b 512 -n HOST omapi_key
KEY=`cat /etc/dhcp3/Komapi_key*.private|grep ^Key|cut -d ' ' -f2-`
echo $KEY > /etc/dhcp3/omapi.key
EOH
not_if "test -f /etc/dhcp3/omapi.key"
end
# This needs to be evaled.
intfs = [Chef::Recipe::Barclamp::Inventory.get_network_by_type(node, "admin").interface]
address = Chef::Recipe::Barclamp::Inventory.get_network_by_type(node, "admin").address
d_opts = node[:dhcp][:options]
case node[:platform_family]
when "debian"
case node[:lsb][:codename]
when "natty","oneiric","precise"
template "/etc/dhcp/dhcpd.conf" do
owner "root"
group "root"
mode 0644
source "dhcpd.conf.erb"
variables(options: d_opts)
if node[:provisioner][:enable_pxe]
notifies :restart, "service[dhcp3-server]"
end
end
template "/etc/default/isc-dhcp-server" do
owner "root"
group "root"
mode 0644
source "dhcp3-server.erb"
variables(interfaces: intfs)
if node[:provisioner][:enable_pxe]
notifies :restart, "service[dhcp3-server]"
end
end
else
template "/etc/dhcp3/dhcpd.conf" do
owner "root"
group "root"
mode 0644
source "dhcpd.conf.erb"
variables(options: d_opts)
if node[:provisioner][:enable_pxe]
notifies :restart, "service[dhcp3-server]"
end
end
template "/etc/default/dhcp3-server" do
owner "root"
group "root"
mode 0644
source "dhcp3-server.erb"
variables(interfaces: intfs)
if node[:provisioner][:enable_pxe]
notifies :restart, "service[dhcp3-server]"
end
end
end
when "rhel"
dhcp_config_file = case
when node[:platform_version].to_f >= 6
"/etc/dhcp/dhcpd.conf"
else
"/etc/dhcpd.conf"
end
template dhcp_config_file do
owner "root"
group "root"
mode 0644
source "dhcpd.conf.erb"
variables(options: d_opts)
if node[:provisioner][:enable_pxe]
notifies :restart, "service[dhcp3-server]"
end
end
template "/etc/sysconfig/dhcpd" do
owner "root"
group "root"
mode 0644
source "redhat-sysconfig-dhcpd.erb"
variables(interfaces: intfs)
if node[:provisioner][:enable_pxe]
notifies :restart, "service[dhcp3-server]"
end
end
when "suse"
template "/etc/dhcpd.conf" do
owner "root"
group "root"
mode 0644
source "dhcpd.conf.erb"
variables(options: d_opts)
if node[:provisioner][:enable_pxe]
notifies :restart, "service[dhcp3-server]"
end
end
template "/etc/sysconfig/dhcpd" do
owner "root"
group "root"
mode 0644
source "suse-sysconfig-dhcpd.erb"
variables(interfaces: intfs)
if node[:provisioner][:enable_pxe]
notifies :restart, "service[dhcp3-server]"
end
end
end
service "dhcp3-server" do
if %w(suse rhel).include?(node[:platform_family])
service_name "dhcpd"
elsif node[:platform] == "ubuntu"
case node[:lsb][:codename]
when "maverick"
service_name "dhcp3-server"
when "natty", "oneiric", "precise"
service_name "isc-dhcp-server"
end
end
supports restart: true, status: true, reload: true
action node[:provisioner][:enable_pxe] ? "enable" : ["disable", "stop"]
end
utils_systemd_service_restart "dhcp3-server"
| 24.88587 | 88 | 0.666521 |
037b8f78752669f0e0d65a7db0fec68e0121a1d1 | 1,626 | ##
# This code was generated by
# \ / _ _ _| _ _
# | (_)\/(_)(_|\/| |(/_ v1.0.0
# / /
#
# frozen_string_literal: true
module Twilio
module REST
class Authy
class V1 < Version
##
# Initialize the V1 version of Authy
def initialize(domain)
super
@version = 'v1'
@services = nil
@forms = nil
end
##
# @param [String] sid A 34 character string that uniquely identifies this Service.
# @return [Twilio::REST::Authy::V1::ServiceContext] if sid was passed.
# @return [Twilio::REST::Authy::V1::ServiceList]
def services(sid=:unset)
if sid.nil?
raise ArgumentError, 'sid cannot be nil'
elsif sid == :unset
@services ||= ServiceList.new self
else
ServiceContext.new(self, sid)
end
end
##
# @param [form.FormTypes] form_type The Type of this Form. One of `form-app-push`,
# `form-sms` or `form-totp`.
# @return [Twilio::REST::Authy::V1::FormContext] if form_type was passed.
# @return [Twilio::REST::Authy::V1::FormList]
def forms(form_type=:unset)
if form_type.nil?
raise ArgumentError, 'form_type cannot be nil'
elsif form_type == :unset
@forms ||= FormList.new self
else
FormContext.new(self, form_type)
end
end
##
# Provide a user friendly representation
def to_s
'<Twilio::REST::Authy::V1>'
end
end
end
end
end | 27.559322 | 90 | 0.52706 |
619392c159bee3615a1cb008bd973467b35d7387 | 196 | # -*- encoding: utf-8 -*-
module BrBoleto
module Retorno
module Cnab240
class Cecred < BrBoleto::Retorno::Cnab240::Base
# O Banco Cecred segue o padrão da FEBRABAN.
end
end
end
end | 19.6 | 50 | 0.688776 |
111e5486706433ebe4fe82ecb38e87881f1fdf1e | 306 | # frozen_string_literal: true
# Copyright (c) 2019 Danil Pismenny <[email protected]>
# Be sure to restart your server when you modify this file.
# ActiveSupport::Reloader.to_prepare do
# ApplicationController.renderer.defaults.merge!(
# http_host: 'example.org',
# https: false
# )
# end
| 23.538462 | 59 | 0.72549 |
ff7cd58d88a38c0d2714b31b3f837c6090847a86 | 962 | require File.expand_path('../boot', __FILE__)
require 'rack/cors'
require 'rails/all'
Bundler.require(*Rails.groups)
require "devise_token_auth"
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.autoload_paths << Rails.root.join('lib')
end
end
| 38.48 | 99 | 0.721414 |
210ce1c4f1d6cefafc2e6ae5c588a1387ff8d05e | 791 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v4/resources/topic_view.proto
require 'google/protobuf'
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v4/resources/topic_view.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v4.resources.TopicView" do
optional :resource_name, :string, 1
end
end
end
module Google
module Ads
module GoogleAds
module V4
module Resources
TopicView = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v4.resources.TopicView").msgclass
end
end
end
end
end
| 28.25 | 134 | 0.740834 |
4a060b6671f410aa4e75e875f9b42f514242a639 | 394 | module Kernel
def calling_method
caller[1][/`([^']*)'/, 1].to_sym
end
# Returns the object's singleton class (exists in Ruby 1.9.2)
def singleton_class; class << self; self; end; end unless method_defined?(:singleton_class)
# class_eval on an object acts like singleton_class.class_eval.
def class_eval(*args, &block)
singleton_class.class_eval(*args, &block)
end
end
| 24.625 | 93 | 0.708122 |
26352e64aacfa56245bf42df91a6d51e17d2c487 | 52,728 | require "securerandom"
require_relative '../client'
require_relative '../du/du_client'
require_relative '../du/upload_file'
require_relative 'app_version_common'
require_relative 'app_version_ref'
require_relative 'availability'
require_relative 'errors'
require_relative 'iap_subscription_pricing_tier'
require_relative 'pricing_tier'
require_relative 'territory'
require_relative 'user_detail'
module Spaceship
# rubocop:disable Metrics/ClassLength
class TunesClient < Spaceship::Client
# Legacy support
ITunesConnectError = Tunes::Error
ITunesConnectTemporaryError = Tunes::TemporaryError
ITunesConnectPotentialServerError = Tunes::PotentialServerError
attr_reader :du_client
def initialize
super
@du_client = DUClient.new
end
class << self
# trailer preview screenshots are required to have a specific size
def video_preview_resolution_for(device, is_portrait)
resolutions = {
'iphone4' => [1136, 640],
'iphone6' => [1334, 750],
'iphone6Plus' => [2208, 1242],
'iphone58' => [2436, 1125],
'ipad' => [1024, 768],
'ipadPro' => [2732, 2048]
}
r = resolutions[device]
r = [r[1], r[0]] if is_portrait
r
end
end
#####################################################
# @!group Init and Login
#####################################################
def self.hostname
"https://itunesconnect.apple.com/WebObjects/iTunesConnect.woa/"
end
# Shows a team selection for the user in the terminal. This should not be
# called on CI systems
def select_team
t_id = (ENV['FASTLANE_ITC_TEAM_ID'] || '').strip
t_name = (ENV['FASTLANE_ITC_TEAM_NAME'] || '').strip
if t_name.length > 0 && t_id.length.zero? # we prefer IDs over names, they are unique
puts("Looking for iTunes Connect Team with name #{t_name}") if Spaceship::Globals.verbose?
teams.each do |t|
t_id = t['contentProvider']['contentProviderId'].to_s if t['contentProvider']['name'].casecmp(t_name.downcase).zero?
end
puts("Could not find team with name '#{t_name}', trying to fallback to default team") if t_id.length.zero?
end
t_id = teams.first['contentProvider']['contentProviderId'].to_s if teams.count == 1
if t_id.length > 0
puts("Looking for iTunes Connect Team with ID #{t_id}") if Spaceship::Globals.verbose?
# actually set the team id here
self.team_id = t_id
return
end
# user didn't specify a team... #thisiswhywecanthavenicethings
loop do
puts("Multiple #{'iTunes Connect teams'.yellow} found, please enter the number of the team you want to use: ")
if ENV["FASTLANE_HIDE_TEAM_INFORMATION"].to_s.length == 0
puts("Note: to automatically choose the team, provide either the iTunes Connect Team ID, or the Team Name in your fastlane/Appfile:")
puts("Alternatively you can pass the team name or team ID using the `FASTLANE_ITC_TEAM_ID` or `FASTLANE_ITC_TEAM_NAME` environment variable")
first_team = teams.first["contentProvider"]
puts("")
puts(" itc_team_id \"#{first_team['contentProviderId']}\"")
puts("")
puts("or")
puts("")
puts(" itc_team_name \"#{first_team['name']}\"")
puts("")
end
# We're not using highline here, as spaceship doesn't have a dependency to fastlane_core or highline
teams.each_with_index do |team, i|
puts("#{i + 1}) \"#{team['contentProvider']['name']}\" (#{team['contentProvider']['contentProviderId']})")
end
unless Spaceship::Client::UserInterface.interactive?
puts("Multiple teams found on iTunes Connect, Your Terminal is running in non-interactive mode! Cannot continue from here.")
puts("Please check that you set FASTLANE_ITC_TEAM_ID or FASTLANE_ITC_TEAM_NAME to the right value.")
raise "Multiple iTunes Connect Teams found; unable to choose, terminal not ineractive!"
end
selected = ($stdin.gets || '').strip.to_i - 1
team_to_use = teams[selected] if selected >= 0
if team_to_use
self.team_id = team_to_use['contentProvider']['contentProviderId'].to_s # actually set the team id here
break
end
end
end
def send_login_request(user, password)
clear_user_cached_data
result = send_shared_login_request(user, password)
store_cookie
return result
end
# Sometimes we get errors or info nested in our data
# This method allows you to pass in a set of keys to check for
# along with the name of the sub_section of your original data
# where we should check
# Returns a mapping of keys to data array if we find anything, otherwise, empty map
def fetch_errors_in_data(data_section: nil, sub_section_name: nil, keys: nil)
if data_section && sub_section_name
sub_section = data_section[sub_section_name]
else
sub_section = data_section
end
unless sub_section
return {}
end
error_map = {}
keys.each do |key|
errors = sub_section.fetch(key, [])
error_map[key] = errors if errors.count > 0
end
return error_map
end
# rubocop:disable Metrics/PerceivedComplexity
# If the response is coming from a flaky api, set flaky_api_call to true so we retry a little.
# Patience is a virtue.
def handle_itc_response(raw, flaky_api_call: false)
return unless raw
return unless raw.kind_of?(Hash)
data = raw['data'] || raw # sometimes it's with data, sometimes it isn't
error_keys_to_check = [
"sectionErrorKeys",
"sectionInfoKeys",
"sectionWarningKeys",
"validationErrors"
]
errors_in_data = fetch_errors_in_data(data_section: data, keys: error_keys_to_check)
errors_in_version_info = fetch_errors_in_data(data_section: data, sub_section_name: "versionInfo", keys: error_keys_to_check)
# If we have any errors or "info" we need to treat them as warnings or errors
if errors_in_data.count == 0 && errors_in_version_info.count == 0
logger.debug("Request was successful")
end
# We pass on the `current_language` so that the error message tells the user
# what language the error was caused in
handle_response_hash = lambda do |hash, current_language = nil|
errors = []
if hash.kind_of?(Hash)
current_language ||= hash["language"]
hash.each do |key, value|
errors += handle_response_hash.call(value, current_language)
next unless key == 'errorKeys' and value.kind_of?(Array) and value.count > 0
# Prepend the error with the language so it's easier to understand for the user
errors += value.collect do |current_error_message|
current_language ? "[#{current_language}]: #{current_error_message}" : current_error_message
end
end
elsif hash.kind_of?(Array)
hash.each do |value|
errors += handle_response_hash.call(value)
end
# else: We don't care about simple values
end
return errors
end
errors = handle_response_hash.call(data)
# Search at data level, as well as "versionInfo" level for errors
error_keys = ["sectionErrorKeys", "validationErrors"]
errors_in_data = fetch_errors_in_data(data_section: data, keys: error_keys)
errors_in_version_info = fetch_errors_in_data(data_section: data, sub_section_name: "versionInfo", keys: error_keys)
errors += errors_in_data.values if errors_in_data.values
errors += errors_in_version_info.values if errors_in_version_info.values
errors = errors.flat_map { |value| value }
# Sometimes there is a different kind of error in the JSON response
# e.g. {"warn"=>nil, "error"=>["operation_failed"], "info"=>nil}
different_error = raw.fetch('messages', {}).fetch('error', nil)
errors << different_error if different_error
if errors.count > 0 # they are separated by `.` by default
# Sample `error` content: [["Forbidden"]]
if errors.count == 1 and errors.first == "You haven't made any changes."
# This is a special error which we really don't care about
elsif errors.count == 1 and errors.first.include?("try again later")
raise ITunesConnectTemporaryError.new, errors.first
elsif errors.count == 1 and errors.first.include?("Forbidden")
raise_insuffient_permission_error!
elsif flaky_api_call
raise ITunesConnectPotentialServerError.new, errors.join(' ')
else
raise ITunesConnectError.new, errors.join(' ')
end
end
# Search at data level, as well as "versionInfo" level for info and warnings
info_keys = ["sectionInfoKeys", "sectionWarningKeys"]
info_in_data = fetch_errors_in_data(data_section: data, keys: info_keys)
info_in_version_info = fetch_errors_in_data(data_section: data, sub_section_name: "versionInfo", keys: info_keys)
info_in_data.each do |info_key, info_value|
puts(info_value)
end
info_in_version_info.each do |info_key, info_value|
puts(info_value)
end
return data
end
# rubocop:enable Metrics/PerceivedComplexity
#####################################################
# @!group Applications
#####################################################
def applications
r = request(:get, 'ra/apps/manageyourapps/summary/v2')
parse_response(r, 'data')['summaries']
end
def app_details(app_id)
r = request(:get, "ra/apps/#{app_id}/details")
parse_response(r, 'data')
end
def update_app_details!(app_id, data)
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/details")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
# Creates a new application on iTunes Connect
# @param name (String): The name of your app as it will appear on the App Store.
# This can't be longer than 255 characters.
# @param primary_language (String): If localized app information isn't available in an
# App Store territory, the information from your primary language will be used instead.
# @param version *DEPRECATED: Use `Spaceship::Tunes::Application.ensure_version!` method instead*
# (String): The version number is shown on the App Store and should match the one you used in Xcode.
# @param sku (String): A unique ID for your app that is not visible on the App Store.
# @param bundle_id (String): The bundle ID must match the one you used in Xcode. It
# can't be changed after you submit your first build.
def create_application!(name: nil, primary_language: nil, version: nil, sku: nil, bundle_id: nil, bundle_id_suffix: nil, company_name: nil, platform: nil, itunes_connect_users: nil)
puts("The `version` parameter is deprecated. Use `Spaceship::Tunes::Application.ensure_version!` method instead") if version
# First, we need to fetch the data from Apple, which we then modify with the user's values
primary_language ||= "English"
platform ||= "ios"
r = request(:get, "ra/apps/create/v2/?platformString=#{platform}")
data = parse_response(r, 'data')
# Now fill in the values we have
# some values are nil, that's why there is a hash
data['name'] = { value: name }
data['bundleId'] = { value: bundle_id }
data['primaryLanguage'] = { value: primary_language }
data['primaryLocaleCode'] = { value: primary_language.to_itc_locale }
data['vendorId'] = { value: sku }
data['bundleIdSuffix'] = { value: bundle_id_suffix }
data['companyName'] = { value: company_name } if company_name
data['enabledPlatformsForCreation'] = { value: [platform] }
data['initialPlatform'] = platform
data['enabledPlatformsForCreation'] = { value: [platform] }
unless itunes_connect_users.nil?
data['iTunesConnectUsers']['grantedAllUsers'] = false
data['iTunesConnectUsers']['grantedUsers'] = data['iTunesConnectUsers']['availableUsers'].select { |user| itunes_connect_users.include?(user['username']) }
end
# Now send back the modified hash
r = request(:post) do |req|
req.url('ra/apps/create/v2')
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
data = parse_response(r, 'data')
handle_itc_response(data)
end
def create_version!(app_id, version_number, platform = 'ios')
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/platforms/#{platform}/versions/create/")
req.body = {
version: {
value: version_number.to_s
}
}.to_json
req.headers['Content-Type'] = 'application/json'
end
data = parse_response(r, 'data')
handle_itc_response(data)
end
def get_resolution_center(app_id, platform)
r = request(:get, "ra/apps/#{app_id}/platforms/#{platform}/resolutionCenter?v=latest")
parse_response(r, 'data')
end
def get_ratings(app_id, platform, version_id = '', storefront = '')
# if storefront or version_id is empty api fails
rating_url = "ra/apps/#{app_id}/platforms/#{platform}/reviews/summary"
params = {}
params['storefront'] = storefront unless storefront.empty?
params['version_id'] = version_id unless version_id.empty?
r = request(:get, rating_url, params)
parse_response(r, 'data')
end
def get_reviews(app_id, platform, storefront, version_id)
index = 0
per_page = 100 # apple default
all_reviews = []
loop do
rating_url = "ra/apps/#{app_id}/platforms/#{platform}/reviews?"
rating_url << "sort=REVIEW_SORT_ORDER_MOST_RECENT"
rating_url << "&index=#{index}"
rating_url << "&storefront=#{storefront}" unless storefront.empty?
rating_url << "&version_id=#{version_id}" unless version_id.empty?
r = request(:get, rating_url)
all_reviews.concat(parse_response(r, 'data')['reviews'])
if all_reviews.count < parse_response(r, 'data')['reviewCount']
index += per_page
else
break
end
end
all_reviews
end
#####################################################
# @!group AppVersions
#####################################################
def app_version(app_id, is_live, platform: nil)
raise "app_id is required" unless app_id
# First we need to fetch the IDs for the edit / live version
r = request(:get, "ra/apps/#{app_id}/overview")
platforms = parse_response(r, 'data')['platforms']
platform = Spaceship::Tunes::AppVersionCommon.find_platform(platforms, search_platform: platform)
return nil unless platform
version_id = Spaceship::Tunes::AppVersionCommon.find_version_id(platform, is_live)
return nil unless version_id
version_platform = platform['platformString']
app_version_data(app_id, version_platform: version_platform, version_id: version_id)
end
def app_version_data(app_id, version_platform: nil, version_id: nil)
raise "app_id is required" unless app_id
raise "version_platform is required" unless version_platform
raise "version_id is required" unless version_id
r = request(:get, "ra/apps/#{app_id}/platforms/#{version_platform}/versions/#{version_id}")
parse_response(r, 'data')
end
def update_app_version!(app_id, version_id, data)
raise "app_id is required" unless app_id
raise "version_id is required" unless version_id.to_i > 0
with_tunes_retry do
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/platforms/ios/versions/#{version_id}")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body, flaky_api_call: true)
end
end
#####################################################
# @!group Members
#####################################################
def members
r = request(:get, "ra/users/itc")
parse_response(r, 'data')["users"]
end
def reinvite_member(email)
request(:post, "ra/users/itc/#{email}/resendInvitation")
end
def delete_member!(user_id, email)
payload = []
payload << {
dsId: user_id,
email: email
}
request(:post) do |req|
req.url("ra/users/itc/delete")
req.body = payload.to_json
req.headers['Content-Type'] = 'application/json'
end
end
def create_member!(firstname: nil, lastname: nil, email_address: nil, roles: [], apps: [])
r = request(:get, "ra/users/itc/create")
data = parse_response(r, 'data')
data["user"]["firstName"] = { value: firstname }
data["user"]["lastName"] = { value: lastname }
data["user"]["emailAddress"] = { value: email_address }
roles << "admin" if roles.length == 0
data["user"]["roles"] = []
roles.each do |role|
# find role from template
data["roles"].each do |template_role|
if template_role["value"]["name"] == role
data["user"]["roles"] << template_role
end
end
end
if apps.length == 0
data["user"]["userSoftwares"] = { value: { grantAllSoftware: true, grantedSoftwareAdamIds: [] } }
else
data["user"]["userSoftwares"] = { value: { grantAllSoftware: false, grantedSoftwareAdamIds: apps } }
end
# send the changes back to Apple
r = request(:post) do |req|
req.url("ra/users/itc/create")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
def update_member_roles!(member, roles: [], apps: [])
r = request(:get, "ra/users/itc/#{member.user_id}/roles")
data = parse_response(r, 'data')
roles << "admin" if roles.length == 0
data["user"]["roles"] = []
roles.each do |role|
# find role from template
data["roles"].each do |template_role|
if template_role["value"]["name"] == role
data["user"]["roles"] << template_role
end
end
end
if apps.length == 0
data["user"]["userSoftwares"] = { value: { grantAllSoftware: true, grantedSoftwareAdamIds: [] } }
else
data["user"]["userSoftwares"] = { value: { grantAllSoftware: false, grantedSoftwareAdamIds: apps } }
end
# send the changes back to Apple
r = request(:post) do |req|
req.url("ra/users/itc/#{member.user_id}/roles")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
#####################################################
# @!group Pricing
#####################################################
def update_price_tier!(app_id, price_tier)
r = request(:get, "ra/apps/#{app_id}/pricing/intervals")
data = parse_response(r, 'data')
first_price = (data["pricingIntervalsFieldTO"]["value"] || []).count == 0 # first price
data["pricingIntervalsFieldTO"]["value"] ||= []
data["pricingIntervalsFieldTO"]["value"] << {} if data["pricingIntervalsFieldTO"]["value"].count == 0
data["pricingIntervalsFieldTO"]["value"].first["tierStem"] = price_tier.to_s
effective_date = (first_price ? nil : Time.now.to_i * 1000)
data["pricingIntervalsFieldTO"]["value"].first["priceTierEffectiveDate"] = effective_date
data["pricingIntervalsFieldTO"]["value"].first["priceTierEndDate"] = nil
data["countriesChanged"] = first_price
data["theWorld"] = true
if first_price # first price, need to set all countries
data["countries"] = supported_countries.collect do |c|
c.delete('region') # we don't care about le region
c
end
end
# send the changes back to Apple
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/pricing/intervals")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
def price_tier(app_id)
r = request(:get, "ra/apps/#{app_id}/pricing/intervals")
data = parse_response(r, 'data')
begin
data["pricingIntervalsFieldTO"]["value"].first["tierStem"]
rescue
nil
end
end
# Returns an array of all available pricing tiers
#
# @note Although this information is publicly available, the current spaceship implementation requires you to have a logged in client to access it
#
# @return [Array] the PricingTier objects (Spaceship::Tunes::PricingTier)
# [{
# "tierStem": "0",
# "tierName": "Free",
# "pricingInfo": [{
# "country": "United States",
# "countryCode": "US",
# "currencySymbol": "$",
# "currencyCode": "USD",
# "wholesalePrice": 0.0,
# "retailPrice": 0.0,
# "fRetailPrice": "$0.00",
# "fWholesalePrice": "$0.00"
# }, {
# ...
# }, {
# ...
def pricing_tiers
@pricing_tiers ||= begin
r = request(:get, 'ra/apps/pricing/matrix')
data = parse_response(r, 'data')['pricingTiers']
data.map { |tier| Spaceship::Tunes::PricingTier.factory(tier) }
end
end
#####################################################
# @!group Availability
#####################################################
# Updates the availability
#
# @note Although this information is publicly available, the current spaceship implementation requires you to have a logged in client to access it
# @param app_id (String): The id of your app
# @param availability (Availability): The availability update
#
# @return [Spaceship::Tunes::Availability] the new Availability
def update_availability!(app_id, availability)
r = request(:get, "ra/apps/#{app_id}/pricing/intervals")
data = parse_response(r, 'data')
data["countriesChanged"] = true
data["countries"] = availability.territories.map { |territory| { 'code' => territory.code } }
data["theWorld"] = availability.include_future_territories.nil? ? true : availability.include_future_territories
# send the changes back to Apple
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/pricing/intervals")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
data = parse_response(r, 'data')
Spaceship::Tunes::Availability.factory(data)
end
def availability(app_id)
r = request(:get, "ra/apps/#{app_id}/pricing/intervals")
data = parse_response(r, 'data')
Spaceship::Tunes::Availability.factory(data)
end
# Returns an array of all supported territories
#
# @note Although this information is publicly available, the current spaceship implementation requires you to have a logged in client to access it
#
# @return [Array] the Territory objects (Spaceship::Tunes::Territory)
def supported_territories
data = supported_countries
data.map { |country| Spaceship::Tunes::Territory.factory(country) }
end
# An array of supported countries
# [{
# "code": "AL",
# "name": "Albania",
# "region": "Europe"
# }, {
# ...
def supported_countries
r = request(:get, "ra/apps/pricing/supportedCountries")
parse_response(r, 'data')
end
def available_languages
r = request(:get, "ra/apps/storePreview/regionCountryLanguage")
response = parse_response(r, 'data')
response.flat_map { |region| region["storeFronts"] }
.flat_map { |storefront| storefront["supportedLocaleCodes"] }
.uniq
end
#####################################################
# @!group App Icons
#####################################################
# Uploads a large icon
# @param app_version (AppVersion): The version of your app
# @param upload_image (UploadFile): The icon to upload
# @return [JSON] the response
def upload_large_icon(app_version, upload_image)
raise "app_version is required" unless app_version
raise "upload_image is required" unless upload_image
du_client.upload_large_icon(app_version, upload_image, content_provider_id, sso_token_for_image)
end
# Uploads a watch icon
# @param app_version (AppVersion): The version of your app
# @param upload_image (UploadFile): The icon to upload
# @return [JSON] the response
def upload_watch_icon(app_version, upload_image)
raise "app_version is required" unless app_version
raise "upload_image is required" unless upload_image
du_client.upload_watch_icon(app_version, upload_image, content_provider_id, sso_token_for_image)
end
# Uploads an In-App-Purchase Review screenshot
# @param app_id (AppId): The id of the app
# @param upload_image (UploadFile): The icon to upload
# @return [JSON] the screenshot data, ready to be added to an In-App-Purchase
def upload_purchase_review_screenshot(app_id, upload_image)
data = du_client.upload_purchase_review_screenshot(app_id, upload_image, content_provider_id, sso_token_for_image)
{
"value" => {
"assetToken" => data["token"],
"sortOrder" => 0,
"type" => du_client.get_picture_type(upload_image),
"originalFileName" => upload_image.file_name,
"size" => data["length"],
"height" => data["height"],
"width" => data["width"],
"checksum" => data["md5"]
}
}
end
# Uploads a screenshot
# @param app_version (AppVersion): The version of your app
# @param upload_image (UploadFile): The image to upload
# @param device (string): The target device
# @param is_messages (Bool): True if the screenshot is for iMessage
# @return [JSON] the response
def upload_screenshot(app_version, upload_image, device, is_messages)
raise "app_version is required" unless app_version
raise "upload_image is required" unless upload_image
raise "device is required" unless device
du_client.upload_screenshot(app_version, upload_image, content_provider_id, sso_token_for_image, device, is_messages)
end
# Uploads an iMessage screenshot
# @param app_version (AppVersion): The version of your app
# @param upload_image (UploadFile): The image to upload
# @param device (string): The target device
# @return [JSON] the response
def upload_messages_screenshot(app_version, upload_image, device)
raise "app_version is required" unless app_version
raise "upload_image is required" unless upload_image
raise "device is required" unless device
du_client.upload_messages_screenshot(app_version, upload_image, content_provider_id, sso_token_for_image, device)
end
# Uploads the transit app file
# @param app_version (AppVersion): The version of your app
# @param upload_file (UploadFile): The image to upload
# @return [JSON] the response
def upload_geojson(app_version, upload_file)
raise "app_version is required" unless app_version
raise "upload_file is required" unless upload_file
du_client.upload_geojson(app_version, upload_file, content_provider_id, sso_token_for_image)
end
# Uploads the transit app file
# @param app_version (AppVersion): The version of your app
# @param upload_trailer (UploadFile): The trailer to upload
# @return [JSON] the response
def upload_trailer(app_version, upload_trailer)
raise "app_version is required" unless app_version
raise "upload_trailer is required" unless upload_trailer
du_client.upload_trailer(app_version, upload_trailer, content_provider_id, sso_token_for_video)
end
# Uploads the trailer preview
# @param app_version (AppVersion): The version of your app
# @param upload_trailer_preview (UploadFile): The trailer preview to upload
# @param device (string): The target device
# @return [JSON] the response
def upload_trailer_preview(app_version, upload_trailer_preview, device)
raise "app_version is required" unless app_version
raise "upload_trailer_preview is required" unless upload_trailer_preview
raise "device is required" unless device
du_client.upload_trailer_preview(app_version, upload_trailer_preview, content_provider_id, sso_token_for_image, device)
end
# Fetches the App Version Reference information from ITC
# @return [AppVersionRef] the response
def ref_data
r = request(:get, '/WebObjects/iTunesConnect.woa/ra/apps/version/ref')
data = parse_response(r, 'data')
Spaceship::Tunes::AppVersionRef.factory(data)
end
# Fetches the User Detail information from ITC. This gets called often and almost never changes
# so we cache it
# @return [UserDetail] the response
def user_detail_data
@_cached_user_detail_data ||= Spaceship::Tunes::UserDetail.factory(user_details_data, self)
end
#####################################################
# @!group CandiateBuilds
#####################################################
def candidate_builds(app_id, version_id)
r = request(:get, "ra/apps/#{app_id}/versions/#{version_id}/candidateBuilds")
parse_response(r, 'data')['builds']
end
#####################################################
# @!group Build Trains
#####################################################
# rubocop:disable Metrics/BlockNesting
# @param (testing_type) internal or external
def build_trains(app_id, testing_type, tries = 5, platform: nil)
raise "app_id is required" unless app_id
url = "ra/apps/#{app_id}/trains/?testingType=#{testing_type}"
url += "&platform=#{platform}" unless platform.nil?
r = request(:get, url)
return parse_response(r, 'data')
rescue Spaceship::Client::UnexpectedResponse => ex
# Build trains fail randomly very often
# we need to catch those errors and retry
# https://github.com/fastlane/fastlane/issues/6419
retry_error_messages = [
"ITC.response.error.OPERATION_FAILED",
"Internal Server Error",
"Service Unavailable"
].freeze
if retry_error_messages.any? { |message| ex.to_s.include?(message) }
tries -= 1
if tries > 0
logger.warn("Received temporary server error from iTunes Connect. Retrying the request...")
sleep(3) unless Object.const_defined?("SpecHelper")
retry
end
end
raise Spaceship::Client::UnexpectedResponse, "Temporary iTunes Connect error: #{ex}"
end
# rubocop:enable Metrics/BlockNesting
def update_build_trains!(app_id, testing_type, data)
raise "app_id is required" unless app_id
# The request fails if this key is present in the data
data.delete("dailySubmissionCountByPlatform")
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/testingTypes/#{testing_type}/trains/")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
def remove_testflight_build_from_review!(app_id: nil, train: nil, build_number: nil, platform: 'ios')
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/platforms/#{platform}/trains/#{train}/builds/#{build_number}/reject")
req.body = {}.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
# All build trains, even if there is no TestFlight
def all_build_trains(app_id: nil, platform: 'ios')
platform = 'ios' if platform.nil?
r = request(:get, "ra/apps/#{app_id}/buildHistory?platform=#{platform}")
handle_itc_response(r.body)
end
def all_builds_for_train(app_id: nil, train: nil, platform: 'ios')
platform = 'ios' if platform.nil?
r = request(:get, "ra/apps/#{app_id}/trains/#{train}/buildHistory?platform=#{platform}")
handle_itc_response(r.body)
end
def build_details(app_id: nil, train: nil, build_number: nil, platform: nil)
r = request(:get, "ra/apps/#{app_id}/platforms/#{platform || 'ios'}/trains/#{train}/builds/#{build_number}/details")
handle_itc_response(r.body)
end
def update_build_information!(app_id: nil,
train: nil,
build_number: nil,
# optional:
whats_new: nil,
description: nil,
feedback_email: nil,
platform: 'ios')
url = "ra/apps/#{app_id}/platforms/#{platform}/trains/#{train}/builds/#{build_number}/testInformation"
build_info = get_build_info_for_review(app_id: app_id, train: train, build_number: build_number, platform: platform)
build_info["details"].each do |current|
current["whatsNew"]["value"] = whats_new if whats_new
current["description"]["value"] = description if description
current["feedbackEmail"]["value"] = feedback_email if feedback_email
end
review_user_name = build_info['reviewUserName']['value']
review_password = build_info['reviewPassword']['value']
build_info['reviewAccountRequired']['value'] = (review_user_name.to_s + review_password.to_s).length > 0
# Now send everything back to iTC
r = request(:post) do |req| # same URL, but a POST request
req.url(url)
req.body = build_info.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
# rubocop:disable Metrics/ParameterLists
def submit_testflight_build_for_review!(app_id: nil, train: nil, build_number: nil, platform: 'ios',
# Required Metadata:
changelog: nil,
description: nil,
feedback_email: nil,
marketing_url: nil,
first_name: nil,
last_name: nil,
review_email: nil,
phone_number: nil,
significant_change: false,
# Optional Metadata:
privacy_policy_url: nil,
review_user_name: nil,
review_password: nil,
review_notes: nil,
encryption: false,
encryption_updated: false,
is_exempt: false,
proprietary: false,
third_party: false)
build_info = get_build_info_for_review(app_id: app_id, train: train, build_number: build_number, platform: platform)
# Now fill in the values provided by the user
# First the localized values:
build_info['details'].each do |current|
current['whatsNew']['value'] = changelog if changelog
current['description']['value'] = description if description
current['feedbackEmail']['value'] = feedback_email if feedback_email
current['marketingUrl']['value'] = marketing_url if marketing_url
current['privacyPolicyUrl']['value'] = privacy_policy_url if privacy_policy_url
current['pageLanguageValue'] = current['language'] # There is no valid reason why we need this, only iTC being iTC
end
review_info = {
"significantChange" => {
"value" => significant_change
},
"buildTestInformationTO" => build_info,
"exportComplianceTO" => {
"usesEncryption" => {
"value" => encryption
},
"encryptionUpdated" => {
"value" => encryption_updated
},
"isExempt" => {
"value" => is_exempt
},
"containsProprietaryCryptography" => {
"value" => proprietary
},
"containsThirdPartyCryptography" => {
"value" => third_party
}
}
}
r = request(:post) do |req| # same URL, but a POST request
req.url("ra/apps/#{app_id}/platforms/#{platform}/trains/#{train}/builds/#{build_number}/review/submit")
req.body = review_info.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
# rubocop:enable Metrics/ParameterLists
def get_build_info_for_review(app_id: nil, train: nil, build_number: nil, platform: 'ios')
url = "ra/apps/#{app_id}/platforms/#{platform}/trains/#{train}/builds/#{build_number}/testInformation"
r = request(:get) do |req|
req.url(url)
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
r.body['data']
end
#####################################################
# @!group Submit for Review
#####################################################
def prepare_app_submissions(app_id, version)
raise "app_id is required" unless app_id
raise "version is required" unless version
r = request(:get) do |req|
req.url("ra/apps/#{app_id}/versions/#{version}/submit/summary")
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
parse_response(r, 'data')
end
def send_app_submission(app_id, version, data)
raise "app_id is required" unless app_id
# ra/apps/1039164429/version/submit/complete
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/versions/#{version}/submit/complete")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
# iTunes Connect still returns a success status code even the submission
# was failed because of Ad ID info. This checks for any section error
# keys in returned adIdInfo and prints them out.
ad_id_error_keys = r.body.fetch('data').fetch('adIdInfo').fetch('sectionErrorKeys')
if ad_id_error_keys.any?
raise "Something wrong with your Ad ID information: #{ad_id_error_keys}."
elsif r.body.fetch('messages').fetch('info').last == "Successful POST"
# success
else
raise "Something went wrong when submitting the app for review. Make sure to pass valid options to submit your app for review"
end
parse_response(r, 'data')
end
#####################################################
# @!group release
#####################################################
def release!(app_id, version)
raise "app_id is required" unless app_id
raise "version is required" unless version
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/versions/#{version}/releaseToStore")
req.headers['Content-Type'] = 'application/json'
req.body = app_id.to_s
end
handle_itc_response(r.body)
parse_response(r, 'data')
end
#####################################################
# @!group in-app-purchases
#####################################################
# Returns list of all available In-App-Purchases
def iaps(app_id: nil)
r = request(:get, "ra/apps/#{app_id}/iaps")
return r.body["data"]
end
# Returns list of all available Families
def iap_families(app_id: nil)
r = request(:get, "ra/apps/#{app_id}/iaps/families")
return r.body["data"]
end
# Deletes a In-App-Purchases
def delete_iap!(app_id: nil, purchase_id: nil)
r = request(:delete, "ra/apps/#{app_id}/iaps/#{purchase_id}")
handle_itc_response(r)
end
# Loads the full In-App-Purchases
def load_iap(app_id: nil, purchase_id: nil)
r = request(:get, "ra/apps/#{app_id}/iaps/#{purchase_id}")
parse_response(r, 'data')
end
# Loads the full In-App-Purchases-Family
def load_iap_family(app_id: nil, family_id: nil)
r = request(:get, "ra/apps/#{app_id}/iaps/family/#{family_id}")
parse_response(r, 'data')
end
# Loads the full In-App-Purchases-Pricing-Matrix
# note: the matrix is the same for any app_id
#
# @param app_id (String) The Apple ID of any app
# @return ([Spaceship::Tunes::IAPSubscriptionPricingTier]) An array of pricing tiers
def subscription_pricing_tiers(app_id)
@subscription_pricing_tiers ||= begin
r = request(:get, "ra/apps/#{app_id}/iaps/pricing/matrix/recurring")
data = parse_response(r, "data")["pricingTiers"]
data.map { |tier| Spaceship::Tunes::IAPSubscriptionPricingTier.factory(tier) }
end
end
# updates an In-App-Purchases-Family
def update_iap_family!(app_id: nil, family_id: nil, data: nil)
with_tunes_retry do
r = request(:put) do |req|
req.url("ra/apps/#{app_id}/iaps/family/#{family_id}/")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
end
# updates an In-App-Purchases
def update_iap!(app_id: nil, purchase_id: nil, data: nil)
with_tunes_retry do
r = request(:put) do |req|
req.url("ra/apps/#{app_id}/iaps/#{purchase_id}")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
end
def update_recurring_iap_pricing!(app_id: nil, purchase_id: nil, pricing_intervals: nil)
with_tunes_retry do
r = request(:post) do |req|
pricing_data = {}
req.url("ra/apps/#{app_id}/iaps/#{purchase_id}/pricing/subscriptions")
pricing_data["subscriptions"] = pricing_intervals
req.body = pricing_data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
end
def load_recurring_iap_pricing(app_id: nil, purchase_id: nil)
r = request(:get, "ra/apps/#{app_id}/iaps/#{purchase_id}/pricing")
parse_response(r, 'data')
end
def create_iap_family(app_id: nil, name: nil, product_id: nil, reference_name: nil, versions: [])
r = request(:get, "ra/apps/#{app_id}/iaps/family/template")
data = parse_response(r, 'data')
data['activeAddOns'][0]['productId'] = { value: product_id }
data['activeAddOns'][0]['referenceName'] = { value: reference_name }
data['name'] = { value: name }
data["details"]["value"] = versions
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/iaps/family/")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
# returns pricing goal array
def iap_subscription_pricing_target(app_id: nil, purchase_id: nil, currency: nil, tier: nil)
r = request(:get, "ra/apps/#{app_id}/iaps/#{purchase_id}/pricing/equalize/#{currency}/#{tier}")
parse_response(r, 'data')
end
# Creates an In-App-Purchases
def create_iap!(app_id: nil, type: nil, versions: nil, reference_name: nil, product_id: nil, cleared_for_sale: true, review_notes: nil, review_screenshot: nil, pricing_intervals: nil, family_id: nil, subscription_duration: nil, subscription_free_trial: nil)
# Load IAP Template based on Type
type ||= "consumable"
r = request(:get, "ra/apps/#{app_id}/iaps/#{type}/template")
data = parse_response(r, 'data')
# Now fill in the values we have
# some values are nil, that's why there is a hash
data['familyId'] = family_id.to_s if family_id
data['productId'] = { value: product_id }
data['referenceName'] = { value: reference_name }
data['clearedForSale'] = { value: cleared_for_sale }
data['pricingDurationType'] = { value: subscription_duration } if subscription_duration
data['freeTrialDurationType'] = { value: subscription_free_trial } if subscription_free_trial
# pricing tier
if pricing_intervals
data['pricingIntervals'] = []
pricing_intervals.each do |interval|
data['pricingIntervals'] << {
value: {
country: interval[:country] || "WW",
tierStem: interval[:tier].to_s,
priceTierEndDate: interval[:end_date],
priceTierEffectiveDate: interval[:begin_date]
}
}
end
end
versions_array = []
versions.each do |k, v|
versions_array << {
value: {
description: { value: v[:description] },
name: { value: v[:name] },
localeCode: k.to_s
}
}
end
data["versions"][0]["details"]["value"] = versions_array
data['versions'][0]["reviewNotes"] = { value: review_notes }
if review_screenshot
# Upload Screenshot:
upload_file = UploadFile.from_path(review_screenshot)
screenshot_data = upload_purchase_review_screenshot(app_id, upload_file)
data["versions"][0]["reviewScreenshot"] = screenshot_data
end
# Now send back the modified hash
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/iaps")
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
handle_itc_response(r.body)
end
#####################################################
# @!group Sandbox Testers
#####################################################
def sandbox_testers(tester_class)
url = tester_class.url[:index]
r = request(:get, url)
parse_response(r, 'data')
end
def create_sandbox_tester!(tester_class: nil, email: nil, password: nil, first_name: nil, last_name: nil, country: nil)
url = tester_class.url[:create]
r = request(:post) do |req|
req.url(url)
req.body = {
user: {
emailAddress: { value: email },
password: { value: password },
confirmPassword: { value: password },
firstName: { value: first_name },
lastName: { value: last_name },
storeFront: { value: country },
birthDay: { value: 1 },
birthMonth: { value: 1 },
secretQuestion: { value: SecureRandom.hex },
secretAnswer: { value: SecureRandom.hex },
sandboxAccount: nil
}
}.to_json
req.headers['Content-Type'] = 'application/json'
end
response_object = parse_response(r, 'data')
errors = response_object['sectionErrorKeys']
raise ITunesConnectError, errors.join(' ') unless errors.empty?
response_object['user']
end
def delete_sandbox_testers!(tester_class, emails)
url = tester_class.url[:delete]
request(:post) do |req|
req.url(url)
req.body = emails.map do |email|
{
emailAddress: {
value: email
}
}
end.to_json
req.headers['Content-Type'] = 'application/json'
end
true
end
#####################################################
# @!group State History
#####################################################
def versions_history(app_id, platform)
r = request(:get, "ra/apps/#{app_id}/stateHistory?platform=#{platform}")
parse_response(r, 'data')['versions']
end
def version_states_history(app_id, platform, version_id)
r = request(:get, "ra/apps/#{app_id}/versions/#{version_id}/stateHistory?platform=#{platform}")
parse_response(r, 'data')
end
#####################################################
# @!group Promo codes
#####################################################
def app_promocodes(app_id: nil)
r = request(:get, "ra/apps/#{app_id}/promocodes/versions")
parse_response(r, 'data')['versions']
end
def generate_app_version_promocodes!(app_id: nil, version_id: nil, quantity: nil)
data = [{
numberOfCodes: quantity,
agreedToContract: true,
versionId: version_id
}]
url = "ra/apps/#{app_id}/promocodes/versions"
r = request(:post) do |req|
req.url(url)
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
parse_response(r, 'data')
end
def app_promocodes_history(app_id: nil)
r = request(:get, "ra/apps/#{app_id}/promocodes/history")
parse_response(r, 'data')['requests']
end
#####################################################
# @!group reject
#####################################################
def reject!(app_id, version)
raise "app_id is required" unless app_id
raise "version is required" unless version
r = request(:post) do |req|
req.url("ra/apps/#{app_id}/versions/#{version}/reject")
req.headers['Content-Type'] = 'application/json'
req.body = app_id.to_s
end
handle_itc_response(r.body)
parse_response(r, 'data')
end
private
def with_tunes_retry(tries = 5, potential_server_error_tries = 3, &_block)
return yield
rescue Spaceship::TunesClient::ITunesConnectTemporaryError => ex
unless (tries -= 1).zero?
msg = "iTunes Connect temporary error received: '#{ex.message}'. Retrying after 60 seconds (remaining: #{tries})..."
puts(msg)
logger.warn(msg)
sleep(60) unless Object.const_defined?("SpecHelper")
retry
end
raise ex # re-raise the exception
rescue Spaceship::TunesClient::ITunesConnectPotentialServerError => ex
unless (potential_server_error_tries -= 1).zero?
msg = "Potential server error received: '#{ex.message}'. Retrying after 10 seconds (remaining: #{tries})..."
puts(msg)
logger.warn(msg)
sleep(10) unless Object.const_defined?("SpecHelper")
retry
end
raise ex
end
def clear_user_cached_data
@content_provider_id = nil
@sso_token_for_image = nil
@sso_token_for_video = nil
end
# the contentProviderIr found in the UserDetail instance
def content_provider_id
@content_provider_id ||= user_detail_data.content_provider_id
end
# the ssoTokenForImage found in the AppVersionRef instance
def sso_token_for_image
@sso_token_for_image ||= ref_data.sso_token_for_image
end
# the ssoTokenForVideo found in the AppVersionRef instance
def sso_token_for_video
@sso_token_for_video ||= ref_data.sso_token_for_video
end
def update_tester_from_app!(tester, app_id, testing)
url = tester.class.url(app_id)[:update_by_app]
data = {
users: [
{
emailAddress: {
value: tester.email
},
firstName: {
value: tester.first_name
},
lastName: {
value: tester.last_name
},
testing: {
value: testing
}
}
]
}
r = request(:post) do |req|
req.url(url)
req.body = data.to_json
req.headers['Content-Type'] = 'application/json'
end
data = parse_response(r, 'data')
handle_itc_response(data)
end
end
# rubocop:enable Metrics/ClassLength
end
| 37.743737 | 261 | 0.608178 |
ed7193b97776613839d6864cf9114eaebb53d243 | 4,897 | require 'spec_helper'
feature 'Merge When Pipeline Succeeds', :feature, :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
let(:merge_request) do
create(:merge_request_with_diffs, source_project: project,
author: user,
title: 'Bug NS-04')
end
let(:pipeline) do
create(:ci_pipeline, project: project,
sha: merge_request.diff_head_sha,
ref: merge_request.source_branch)
end
before { project.team << [user, :master] }
context 'when there is active pipeline for merge request' do
background do
create(:ci_build, pipeline: pipeline)
end
before do
login_as user
visit_merge_request(merge_request)
end
it 'displays the Merge When Pipeline Succeeds button' do
expect(page).to have_button "Merge When Pipeline Succeeds"
end
describe 'enabling Merge When Pipeline Succeeds' do
shared_examples 'Merge When Pipeline Succeeds activator' do
it 'activates the Merge When Pipeline Succeeds feature' do
click_button "Merge When Pipeline Succeeds"
expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds."
expect(page).to have_content "The source branch will not be removed."
expect(page).to have_link "Cancel Automatic Merge"
visit_merge_request(merge_request) # Needed to refresh the page
expect(page).to have_content /enabled an automatic merge when the pipeline for \h{8} succeeds/i
end
end
context "when enabled immediately" do
it_behaves_like 'Merge When Pipeline Succeeds activator'
end
context 'when enabled after pipeline status changed' do
before do
pipeline.run!
# We depend on merge request widget being reloaded
# so we have to wait for asynchronous call to reload it
# and have_content expectation handles that.
#
expect(page).to have_content "Pipeline ##{pipeline.id} running"
end
it_behaves_like 'Merge When Pipeline Succeeds activator'
end
context 'when enabled after it was previously canceled' do
before do
click_button "Merge When Pipeline Succeeds"
click_link "Cancel Automatic Merge"
end
it_behaves_like 'Merge When Pipeline Succeeds activator'
end
context 'when it was enabled and then canceled' do
let(:merge_request) do
create(:merge_request_with_diffs,
:merge_when_pipeline_succeeds,
source_project: project,
title: 'Bug NS-04',
author: user,
merge_user: user)
end
before do
click_link "Cancel Automatic Merge"
end
it_behaves_like 'Merge When Pipeline Succeeds activator'
end
end
end
context 'when merge when pipeline succeeds is enabled' do
let(:merge_request) do
create(:merge_request_with_diffs, :simple, source_project: project,
author: user,
merge_user: user,
title: 'MepMep',
merge_when_pipeline_succeeds: true)
end
let!(:build) do
create(:ci_build, pipeline: pipeline)
end
before do
login_as user
visit_merge_request(merge_request)
end
it 'allows to cancel the automatic merge' do
click_link "Cancel Automatic Merge"
expect(page).to have_button "Merge When Pipeline Succeeds"
visit_merge_request(merge_request) # refresh the page
expect(page).to have_content "canceled the automatic merge"
end
it "allows the user to remove the source branch" do
expect(page).to have_link "Remove Source Branch When Merged"
click_link "Remove Source Branch When Merged"
expect(page).to have_content "The source branch will be removed"
end
context 'when pipeline succeeds' do
background { build.success }
it 'merges merge request' do
visit_merge_request(merge_request) # refresh the page
expect(page).to have_content 'The changes were merged'
expect(merge_request.reload).to be_merged
end
end
end
context 'when pipeline is not active' do
it "does not allow to enable merge when pipeline succeeds" do
visit_merge_request(merge_request)
expect(page).not_to have_link 'Merge When Pipeline Succeeds'
end
end
def visit_merge_request(merge_request)
visit namespace_project_merge_request_path(merge_request.project.namespace, merge_request.project, merge_request)
end
end
| 32.217105 | 117 | 0.633653 |
030f443cb7546423c7f6c28ef99cb7528b702bd2 | 366 | # frozen_string_literal: true
module Resolvers
module FiltersBySchemaName
extend ActiveSupport::Concern
included do
option :schema, type: [String, { null: false }], description: "Filter by a namespace.name schema identifier" do |scope, value|
value.present? ? scope.filtered_by_schema_version(value) : scope.all
end
end
end
end
| 26.142857 | 132 | 0.713115 |
f8b843f9f4cab4d2dad4d3aaeb9491390dc42ab8 | 608 | cask "koodo-reader" do
arch = Hardware::CPU.intel? ? "" : "-arm64"
version "1.4.3"
if Hardware::CPU.intel?
sha256 "fc5b491c5f800de6855288dfa3ef796970ced5c082dcf324f4ace84fc3d3a0cb"
else
sha256 "8e11951614d196971f8c7e3cad37531464c40b11f32860d5ec295eacd77f14c0"
end
url "https://github.com/troyeguo/koodo-reader/releases/download/v#{version}/Koodo-Reader-#{version}#{arch}.dmg",
verified: "github.com/troyeguo/koodo-reader/"
name "Koodo Reader"
desc "Open-source Epub reader with backup and restore support"
homepage "https://koodo.960960.xyz/"
app "Koodo Reader.app"
end
| 30.4 | 114 | 0.741776 |
e256a7890094dfcd27db9ddca2d7116f82a44e07 | 197 | # frozen_string_literal: true
module Fields::Validations
class MultipleNestedFormField < FieldOptions
prepend Fields::Validations::Presence
prepend Fields::Validations::Length
end
end
| 21.888889 | 46 | 0.791878 |
399948da78ae2b38ed32e05ebedf94dc854e2a72 | 501 | Rails.application.routes.draw do
get 'sessions/new'
get 'users/new'
root 'static_pages#home'
get '/help', to: 'static_pages#help'
get '/about', to: 'static_pages#about'
get '/contact', to: 'static_pages#contact'
get '/signup', to: 'users#new'
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#destroy'
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
resources :users
end
| 31.3125 | 102 | 0.692615 |
1d917bb646e2734ce97658cac2f7360181462e1e | 685 | module ShipStation
class InternationalOptions
attr_accessor :contents,
:customsItems,
:nonDelivery
def initialize
self.nonDelivery = "return_to_sender"
self.contents = "merchandise"
end
def add_item(description, quantity, value, country)
self.customsItems ||= []
item = {description: description, quantity: quantity, value: (value / quantity).round(2), countryOfOrigin: country}
self.customsItems << item
end
def to_json(t)
hash = {}
self.instance_variables.each do |var|
hash[var.to_s.gsub("@","").to_sym] = self.instance_variable_get var
end
hash.to_json
end
end
end
| 20.147059 | 121 | 0.646715 |
d56cbadc3d5d47debc09684b030d391666f9fbba | 690 | cask "singularity" do
version "1.8.9.8709"
sha256 "ef40901d39389588be972b6f72ec58d1a4f0d982dbb87f0ea1eeeab1d388763b"
url "https://bitbucket.org/router_gray/singularityviewer/downloads/Singularity_Alpha_#{version.dots_to_underscores}_x86_64.dmg",
verified: "bitbucket.org/router_gray/singularityviewer/"
name "Singularity Viewer"
desc "Client for Second Life and OpenSim"
homepage "http://www.singularityviewer.org/"
livecheck do
url "http://www.singularityviewer.org/downloads"
strategy :page_match do |page|
v = page[/Singularity[._-]?Alpha[._-]?(\d+(?:_\d+)*)[._-]?x86_64\.dmg/i, 1]
v.tr("_", ".")
end
end
app "SingularityAlpha.app"
end
| 32.857143 | 130 | 0.718841 |
1a706e353d6c14f3cde5e03ad42a8a49b2e7768f | 158 | class DropCloudProvider < ActiveRecord::Migration[4.2]
def change
remove_column :projects, :cloud_provider_id
drop_table :cloud_providers
end
end
| 22.571429 | 54 | 0.778481 |
03bb13c013b93dd7c01ca62b362be69ce748ba34 | 1,796 | class SourcesController < ApplicationController
before_filter :authenticate, except: [:index, :show, :up, :down]
def index
if params[:query]
@sources = Source.text_search(params[:query])
elsif params[:tag]
@sources = Source.tagged_with(params[:tag]).order('rating DESC').page(params[:page]).per_page(params[:view].nil? ? 10 : 20)
else
@sources = Source.order('rating DESC').page(params[:page]).per_page(params[:view].nil? ? 10 : 20)
end
end
def show
@source = Source.find(params[:id])
end
def new
@source = Source.new
end
def create
@source = Source.new(params[:source])
if @source.save
flash[:notice] = "Successfully created source."
redirect_to sources_url
else
render action: 'new'
end
end
def edit
@source = Source.find(params[:id])
end
def update
@source = Source.find(params[:id])
if @source.update_attributes(params[:source])
flash[:notice] = "Successfully updated source."
redirect_to @source
else
render action: 'edit'
end
end
def destroy
@source = Source.find(params[:id])
@source.destroy
flash[:notice] = "Successfully destroyed source."
redirect_to sources_url
end
def up
@source = Source.find(params[:id])
if session["has_voted_source_#{@source.id}".to_sym] != true
@source.increment! :rating
session["has_voted_source_#{@source.id}".to_sym] = true
end
respond_to do |format|
format.js
end
end
def down
@source = Source.find(params[:id])
if session["has_voted_source_#{@source.id}".to_sym] != true
@source.decrement! :rating
session["has_voted_source_#{@source.id}".to_sym] = true
end
respond_to do |format|
format.js
end
end
end
| 23.946667 | 129 | 0.641425 |
0871c92841b7e028743f01707faf8539a634d32a | 688 | require 'serverspec'
require 'docker'
require 'spec_helper'
describe "Dockerfile" do
before(:all) do
@image = Docker::Image.build_from_dir('.', { 'dockerfile' => ENV['DOCKERFILE'] })
set :docker_image, @image.id
end
include_examples 'collection::bootstrap'
include_examples 'collection::base'
include_examples 'collection::base-app'
include_examples 'collection::php5::development'
include_examples 'collection::php-fpm5'
include_examples 'collection::php-fpm5::local-only'
include_examples 'collection::php-tools'
include_examples 'collection::nginx'
include_examples 'collection::php-fpm5::webserver-test::development'
end
| 31.272727 | 89 | 0.72093 |
b9989de77560f7e6611a457580142c2eeb122f17 | 192 | $LOAD_PATH.unshift(File.expand_path(File.dirname(__FILE__)))
$LOAD_PATH.unshift(File.expand_path(File.join(File.dirname(__FILE__), '..', 'lib')))
require 'moss_ruby'
require 'webmock/rspec'
| 32 | 84 | 0.760417 |
0888e5f9cc6adad433286040b5dd3c3440c40edc | 688 | # frozen_string_literal: true
require 'test_helper'
class ConnectionTest < Minitest::Test
def wrap_dbname_configuration
initial_dbname = Pgsnap.configuration.dbname
yield
Pgsnap.set_configuration { |config| config.dbname = initial_dbname }
end
def test_that_initial_dbname_is_nil
wrap_dbname_configuration do
Pgsnap.reset_configuration
assert_nil Pgsnap.configuration.dbname
end
end
def test_that_user_can_configure_dbname
wrap_dbname_configuration do
Pgsnap.set_configuration do |config|
config.dbname = 'yet_another_dbname'
end
assert_equal 'yet_another_dbname', Pgsnap.configuration.dbname
end
end
end
| 23.724138 | 72 | 0.761628 |
08ceef3f894af6f5be7c27fef0d660b12518e03a | 2,779 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe HCA::SubmissionFailureEmailAnalyticsJob, type: :job do
subject do
described_class.new
end
before do
Settings.reports.token = 'asdf'
Settings.reports.server = 'stage-tms.govdelivery.com'
Settings.google_analytics_tracking_id = 'UA-XXXXXXXXX-1'
end
describe '#perform', run_at: '2018-05-30 18:18:56' do
context 'GovDelivery token is missing from settings' do
it 'should raise an error' do
allow(FeatureFlipper).to receive(:send_email?).and_return(false)
expect { subject.perform }.to raise_error(Common::Exceptions::ParameterMissing)
end
end
context 'Google Analytics tracking ID is missing from settings' do
it 'should raise an error' do
Settings.google_analytics_tracking_id = nil
expect { subject.perform }.to raise_error(Common::Exceptions::ParameterMissing)
end
end
it 'should retrieve messages at least once, and stop when loop-break conditions are met' do
VCR.use_cassette('govdelivery_emails', allow_playback_repeats: true) do
expect(subject).to receive(:hca_emails).twice.and_call_original
subject.perform
end
end
it 'should process HCA failure emails for Google Analytics evaluation' do
VCR.use_cassette('govdelivery_emails', allow_playback_repeats: true) do
expect(subject).to receive(:eval_email).twice
subject.perform
end
end
it 'should send events to Google Analytics' do
VCR.use_cassette('govdelivery_emails', allow_playback_repeats: true) do
expect_any_instance_of(Staccato::Tracker).to receive(:event).exactly(3).times
subject.perform
end
end
end
describe '#we_should_break?', run_at: '2018-05-30 18:27:56' do
before do
VCR.use_cassette('govdelivery_emails', allow_playback_repeats: true) do
subject.send(:hca_emails, 1)
@emails = subject.instance_variable_get(:@all_emails)
end
end
context 'last email created_at > time-range start time and 50 emails in collection' do
it 'should return false' do
@emails.collection.last.attributes[:created_at] = 1440.minutes.ago.to_s
expect(subject.send(:we_should_break?)).to be false
end
end
context 'last email created_at < time-range start time' do
it 'should return true' do
@emails.collection.last.attributes[:created_at] = 25.hours.ago.to_s
expect(subject.send(:we_should_break?)).to be true
end
end
context 'less than 50 emails were returned by govdelivery' do
it 'should return true' do
@emails.collection.delete_at(0)
expect(subject.send(:we_should_break?)).to be true
end
end
end
end
| 34.7375 | 95 | 0.698813 |
263b0e9a4ab29a61fd1d14f96812992a3627a4b7 | 502 | class UsersController < ApplicationController
def show
@user = User.find(params[:id])
end
def new
@user = User.new
end
def create
@user = User.new(user_params) # Not the final implementation!
if @user.save
flash[:success] = "Welcome to the Sample App!"
redirect_to @user
else
render 'new'
end
end
def user_params
params.require(:user).permit(:name, :email, :password,
:password_confirmation)
end
end | 19.307692 | 68 | 0.60757 |
4a7a517ef26f68bfaf3466024147c0cd6d3c81dd | 381 | name 'base-template'
maintainer 'Steven Gonzales'
maintainer_email '[email protected]'
license 'Apache 2.0'
description 'Installs/Configures base-template'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.2'
depends 'authorized_keys'
depends 'cloudpassage'
depends 'newrelic'
depends 'ntp'
| 29.307692 | 72 | 0.711286 |
e2061a17fa6c9d37d94f5137a87dab0cfd1cfd56 | 71 | module GovUKFrontendToolkit
class Engine < ::Rails::Engine
end
end
| 14.2 | 32 | 0.760563 |
bf61c0b07cf35cca99294876e0a9ae78470074ba | 1,374 | require 'test_helper'
class UsersLoginTest < ActionDispatch::IntegrationTest
test "login with invalid information" do
get login_path
assert_template 'sessions/new'
post login_path, params: { session: { email: "", password: "" } }
assert_template 'sessions/new'
assert_not flash.empty?
get root_path
assert flash.empty?
end
def setup
@user = users(:michael)
end
test "login with valid information followed by logout" do
get login_path
post login_path, params: { session: { email: @user.email, password: 'password' } }
assert is_logged_in?
assert_redirected_to @user
follow_redirect!
assert_template 'users/show'
assert_select "a[href=?]", login_path, count: 0
assert_select "a[href=?]", logout_path
assert_select "a[href=?]", user_path(@user)
delete logout_path
assert_not is_logged_in?
assert_redirected_to root_url
delete logout_path
follow_redirect!
assert_select "a[href=?]", login_path
assert_select "a[href=?]", logout_path, count: 0
assert_select "a[href=?]", user_path(@user), count: 0
end
test "login with remembering" do
log_in_as(@user, remember_me: '1')
assert_equal FILL_IN, assigns(:user).FILL_IN
end
test "login without remembering" do
log_in_as(@user, remember_me: '0')
assert_nil cookies['remember_token']
end
end
| 28.040816 | 86 | 0.697234 |
e9a917fa1b3f4920a56738eae46a2316617cab70 | 303 | if Rails.env.development? || ENV['FORCE_DB_SEED'].present?
admin = Administrator.create(
name: 'Dev Admin',
email: '[email protected]',
one_time_setup_token: SecureRandom.hex
)
puts "Admin created:\nhttp://localhost:3000/admin/sessions/new?token=#{admin.one_time_setup_token}"
end
| 33.666667 | 101 | 0.732673 |
ab0228b347ebc06b2c58182e5b02195761b16bc0 | 133,161 | # +----------------------------------------------------------------------+
# | Licensed Materials - Property of IBM |
# | |
# | (C) Copyright IBM Corporation 2006- 2018 |
# +----------------------------------------------------------------------+
# | Authors: Antonio Cangiano <[email protected]> |
# | : Mario Ds Briggs <[email protected]> |
# | : Praveen Devarao <[email protected]> |
# | : Arvind Gupta <[email protected]> |
# +----------------------------------------------------------------------+
require 'active_record/connection_adapters/abstract_adapter'
require 'arel/visitors/visitor'
require 'active_support/core_ext/string/strip'
require 'active_record/type'
require 'active_record/connection_adapters/sql_type_metadata'
module CallChain
def self.caller_method(depth=1)
parse_caller(caller(depth+1).first).last
end
private
# Copied from ActionMailer
def self.parse_caller(at)
if /^(.+?):(\d+)(?::in `(.*)')?/ =~ at
file = Regexp.last_match[1]
line = Regexp.last_match[2].to_i
method = Regexp.last_match[3]
[file, line, method]
end
end
end
module ActiveRecord
class SchemaMigration < ActiveRecord::Base
class << self
def create_table
#puts "Calling method : " << CallChain.caller_method << "\n"
#puts "Calling method for create_table(): " << String(caller(start=1, length=nil) )
unless table_exists?
version_options = connection.internal_string_options_for_primary_key
connection.create_table(table_name,id:false) do |t|
t.string :version, version_options
end
end
end
end
end
class Relation
def insert(values)
primary_key_value = nil
if primary_key && Hash === values
primary_key_value = values[values.keys.find { |k|
k.name == primary_key
}]
if !primary_key_value && connection.prefetch_primary_key?(klass.table_name)
primary_key_value = connection.next_sequence_value(klass.sequence_name)
values[klass.arel_table[klass.primary_key]] = primary_key_value
end
end
im = arel.create_insert
im.into @table
conn = @klass.connection
substitutes = values.sort_by { |arel_attr,_| arel_attr.name }
binds = substitutes.map do |arel_attr, value|
[@klass.columns_hash[arel_attr.name], value]
end
#substitutes.each_with_index do |tuple, i|
# tuple[1] = conn.substitute_at(binds[i][0], i)
#end
substitutes, binds = substitute_values values
if values.empty? # empty insert
im.values = Arel.sql(connection.empty_insert_statement_value(klass.primary_key))
else
im.insert substitutes
end
conn.insert(
im,
'SQL',
primary_key,
primary_key_value,
nil,
binds)
end
end
class Base
# Method required to handle LOBs and XML fields.
# An after save callback checks if a marker has been inserted through
# the insert or update, and then proceeds to update that record with
# the actual large object through a prepared statement (param binding).
after_save :handle_lobs
def handle_lobs()
if self.class.connection.kind_of?(ConnectionAdapters::IBM_DBAdapter)
# Checks that the insert or update had at least a BLOB, CLOB or XML field
self.class.connection.sql.each do |clob_sql|
if clob_sql =~ /BLOB\('(.*)'\)/i ||
clob_sql =~ /@@@IBMTEXT@@@/i ||
clob_sql =~ /@@@IBMXML@@@/i ||
clob_sql =~ /@@@IBMBINARY@@@/i
update_query = "UPDATE #{self.class.table_name} SET ("
counter = 0
values = []
params = []
# Selects only binary, text and xml columns
self.class.columns.select{|col| col.sql_type.to_s =~ /blob|binary|clob|text|xml/i }.each do |col|
if counter == 0
update_query << "#{col.name}"
else
update_query << ",#{col.name}"
end
# Add a '?' for the parameter or a NULL if the value is nil or empty
# (except for a CLOB field where '' can be a value)
if self[col.name].nil? ||
self[col.name] == {} ||
self[col.name] == [] ||
(self[col.name] == '' && !(col.sql_type.to_s =~ /text|clob/i))
params << 'NULL'
else
if (col.cast_type.is_a?(::ActiveRecord::Type::Serialized))
values << YAML.dump(self[col.name])
else
values << self[col.name]
end
params << '?'
end
counter += 1
end
# no subsequent update is required if no relevant columns are found
next if counter == 0
update_query << ") = "
# IBM_DB accepts 'SET (column) = NULL' but not (NULL),
# therefore the sql needs to be changed for a single NULL field.
if params.size==1 && params[0] == 'NULL'
update_query << "NULL"
else
update_query << "(" + params.join(',') + ")"
end
update_query << " WHERE #{self.class.primary_key} = ?"
values << self[self.class.primary_key.downcase]
begin
unless stmt = IBM_DB.prepare(self.class.connection.connection, update_query)
error_msg = IBM_DB.getErrormsg( self.class.connection.connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Statement prepare for updating LOB/XML column failed : #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during update of LOB/XML column')
end
end
self.class.connection.log_query(update_query,'update of LOB/XML field(s)in handle_lobs')
# rollback any failed LOB/XML field updates (and remove associated marker)
unless IBM_DB.execute(stmt, values)
error_msg = "Failed to insert/update LOB/XML field(s) due to: #{IBM_DB.getErrormsg( stmt, IBM_DB::DB_STMT )}"
self.class.connection.execute("ROLLBACK")
raise error_msg
end
rescue StandardError => error
raise error
ensure
IBM_DB.free_stmt(stmt) if stmt
end
end # if clob_sql
end #connection.sql.each
self.class.connection.handle_lobs_triggered = true
end # if connection.kind_of?
end # handle_lobs
private :handle_lobs
# Establishes a connection to a specified database using the credentials provided
# with the +config+ argument. All the ActiveRecord objects will use this connection
def self.ibm_db_connection(config)
# Attempts to load the Ruby driver IBM databases
# while not already loaded or raises LoadError in case of failure.
begin
require 'ibm_db' unless defined? IBM_DB
rescue LoadError
raise LoadError, "Failed to load IBM_DB Ruby driver."
end
#if( config.has_key?(:parameterized) && config[:parameterized] == true )
# require 'active_record/connection_adapters/ibm_db_pstmt'
# end
# Check if class TableDefinition responds to indexes method to determine if we are on AR 3 or AR 4.
# This is a interim hack ti ensure backward compatibility. To remove as we move out of AR 3 support or have a better way to determine which version of AR being run against.
checkClass = ActiveRecord::ConnectionAdapters::TableDefinition.new(self,nil)
if(checkClass.respond_to?(:indexes))
isAr3 = false
else
isAr3= true
end
# Converts all +config+ keys to symbols
config = config.symbolize_keys
# Flag to decide if quoted literal replcement should take place. By default it is ON. Set it to OFF if using Pstmt
set_quoted_literal_replacement = IBM_DB::QUOTED_LITERAL_REPLACEMENT_ON
# Retrieves database user credentials from the +config+ hash
# or raises ArgumentError in case of failure.
if !config.has_key?(:username) || !config.has_key?(:password)
raise ArgumentError, "Missing argument(s): Username/Password for #{config[:database]} is not specified"
else
if(config[:username].to_s.nil? || config[:password].to_s.nil?)
raise ArgumentError, "Username/Password cannot be nil"
end
username = config[:username].to_s
password = config[:password].to_s
end
if(config.has_key?(:dbops) && config[:dbops] == true)
return ConnectionAdapters::IBM_DBAdapter.new(nil, isAr3, logger, config, {})
end
# Retrieves the database alias (local catalog name) or remote name
# (for remote TCP/IP connections) from the +config+ hash
# or raises ArgumentError in case of failure.
if config.has_key?(:database)
database = config[:database].to_s
else
raise ArgumentError, "Missing argument: a database name needs to be specified."
end
# Providing default schema (username) when not specified
config[:schema] = config.has_key?(:schema) ? config[:schema].to_s : config[:username].to_s
if(config.has_key?(:parameterized) && config[:parameterized] == true )
set_quoted_literal_replacement = IBM_DB::QUOTED_LITERAL_REPLACEMENT_OFF
end
# Extract connection options from the database configuration
# (in support to formatting, audit and billing purposes):
# Retrieve database objects fields in lowercase
conn_options = {IBM_DB::ATTR_CASE => IBM_DB::CASE_LOWER}
config.each do |key, value|
if !value.nil?
case key
when :app_user # Set connection's user info
conn_options[IBM_DB::SQL_ATTR_INFO_USERID] = value
when :account # Set connection's account info
conn_options[IBM_DB::SQL_ATTR_INFO_ACCTSTR] = value
when :application # Set connection's application info
conn_options[IBM_DB::SQL_ATTR_INFO_APPLNAME] = value
when :workstation # Set connection's workstation info
conn_options[IBM_DB::SQL_ATTR_INFO_WRKSTNNAME] = value
end
end
end
begin
# Checks if a host name or address has been specified. If so, this implies a TCP/IP connection
# Returns IBM_DB.Connection object upon succesful DB connection to the database
# If otherwise the connection fails, +false+ is returned
if config.has_key?(:host)
# Retrieves the host address/name
host = config[:host]
# A net address connection requires a port. If no port has been specified, 50000 is used by default
port = config[:port] || 50000
# Connects to the database specified using the hostname, port, authentication type, username and password info
# Starting with DB2 9.1FP5 secure connections using SSL are supported.
# On the client side using CLI this is supported from CLI version V95FP2 and onwards.
# This feature is set by specifying SECURITY=SSL in the connection string.
# Below connection string is constructed and SECURITY parameter is appended if the user has specified the :security option
conn_string = "DRIVER={IBM DB2 ODBC DRIVER};\
DATABASE=#{database};\
HOSTNAME=#{host};\
PORT=#{port};\
PROTOCOL=TCPIP;\
UID=#{username};\
PWD=#{password};"
conn_string << "SECURITY=#{config[:security]};" if config.has_key?(:security)
conn_string << "AUTHENTICATION=#{config[:authentication]};" if config.has_key?(:authentication)
conn_string << "CONNECTTIMEOUT=#{config[:timeout]};" if config.has_key?(:timeout)
connection = IBM_DB.connect( conn_string, '', '', conn_options, set_quoted_literal_replacement )
else
# No host implies a local catalog-based connection: +database+ represents catalog alias
connection = IBM_DB.connect( database, username, password, conn_options, set_quoted_literal_replacement )
end
rescue StandardError => connect_err
raise "Failed to connect to [#{database}] due to: #{connect_err}"
end
# Verifies that the connection was successful
if connection
# Creates an instance of *IBM_DBAdapter* based on the +connection+
# and credentials provided in +config+
ConnectionAdapters::IBM_DBAdapter.new(connection, isAr3, logger, config, conn_options)
else
# If the connection failure was not caught previoulsy, it raises a Runtime error
raise "An unexpected error occured during connect attempt to [#{database}]"
end
end # method self.ibm_db_connection
def self.ibmdb_connection(config)
#Method to support alising of adapter name as ibmdb [without underscore]
self.ibm_db_connection(config)
end
end # class Base
module ConnectionAdapters
class Column
def self.binary_to_string(value)
# Returns a string removing the eventual BLOB scalar function
value.to_s.gsub(/"SYSIBM"."BLOB"\('(.*)'\)/i,'\1')
end
end
module Quoting
def lookup_cast_type_from_column(column) # :nodoc:
lookup_cast_type(column.sql_type_metadata)
end
end
module Savepoints
def create_savepoint(name = current_savepoint_name)
execute("SAVEPOINT #{name} ON ROLLBACK RETAIN CURSORS")
end
end
module ColumnDumper
def prepare_column_options(column)
spec = {}
if limit = schema_limit(column)
spec[:limit] = limit
end
if precision = schema_precision(column)
spec[:precision] = precision
end
if scale = schema_scale(column)
spec[:scale] = scale
end
default = schema_default(column) if column.has_default?
spec[:default] = default unless default.nil?
spec[:null] = 'false' unless column.null
if collation = schema_collation(column)
spec[:collation] = collation
end
spec[:comment] = column.comment.inspect if column.comment.present?
spec
end
def schema_limit(column)
limit = column.limit unless column.bigint?
#limit.inspect if limit && limit != native_database_types[column.type][:limit]
limit.inspect if limit && limit != native_database_types[column.type.to_sym][:limit]
end
=begin
def column_spec_for_primary_key(column)
if column.bigint?
spec = { id: :bigint.inspect }
spec[:default] = schema_default(column) || 'nil' unless column.auto_increment?
else
#spec = super
end
#spec[:unsigned] = 'true' if column.unsigned?
#spec
""
end
=end
end
module SchemaStatements
def internal_string_options_for_primary_key # :nodoc:
{ primary_key: true}
{ version_options: "PRIMARY KEY NOT NULL"}
end
def drop_table(table_name,options={})
execute("DROP TABLE #{quote_table_name(table_name)}", options)
#execute("DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}"
end
=begin
def create_table_definition(name, temporary, options,as = nil)
TableDefinition.new self, name, temporary, options
end
=end
def create_table_definition(*args, **options)
TableDefinition.new(self, *args, **options)
end
def remove_foreign_key(from_table, options_or_to_table = {})
return unless supports_foreign_keys?
if options_or_to_table.is_a?(Hash)
options = options_or_to_table
else
options = { column: foreign_key_column_for(options_or_to_table) }
end
fk_name_to_delete = options.fetch(:name) do
fk_to_delete = foreign_keys(@servertype.set_case(from_table)).detect {|fk| "#{@servertype.set_case(fk.column)}" == "#{servertype.set_case(options[:column])}"}
if fk_to_delete
fk_to_delete.name
else
raise ArgumentError, "Table '#{from_table}' has no foreign key on column '#{options[:column]}'"
end
end
at = create_alter_table from_table
at.drop_foreign_key fk_name_to_delete
execute schema_creation.accept(at)
end
end #end of Module SchemaStatements
#class IBM_DBColumn < Column
class IBM_DBColumn < ConnectionAdapters::Column # :nodoc:
# delegate :precision, :scale, :limit, :type, :sql_type, to: :sql_type_metadata, allow_nil: true
def initialize(*)
super
end
#def initialize(column_name, column_default_value, sqltype_metadata, column_nullable, table_name, default_function, collation, comment)
#super(column_name, column_default_value, sqltype_metadata, column_nullable, table_name)
#end
# Casts value (which is a String) to an appropriate instance
=begin
def type_cast(value)
# Casts the database NULL value to nil
return nil if value == 'NULL'
# Invokes parent's method for default casts
super
end
=end
# Used to convert from BLOBs to Strings
def self.binary_to_string(value)
# Returns a string removing the eventual BLOB scalar function
value.to_s.gsub(/"SYSIBM"."BLOB"\('(.*)'\)/i,'\1')
end
end #class IBM_DBColumn
module ColumnMethods
def primary_key(name, type = :primary_key, **options)
column(name, type, options.merge(primary_key: true))
end
##class Table
class Table < ActiveRecord::ConnectionAdapters::Table
include ColumnMethods
#Method to parse the passed arguments and create the ColumnDefinition object of the specified type
def ibm_parse_column_attributes_args(type, *args)
options = {}
if args.last.is_a?(Hash)
options = args.delete_at(args.length-1)
end
args.each do | name |
column name,type.to_sym,options
end # end args.each
end
private :ibm_parse_column_attributes_args
#Method to support the new syntax of rails 2.0 migrations (short-hand definitions) for columns of type xml
#This method is different as compared to def char (sql is being issued explicitly
#as compared to def char where method column(which will generate the sql is being called)
#in order to handle the DEFAULT and NULL option for the native XML datatype
def xml(*args )
options = {}
if args.last.is_a?(Hash)
options = args.delete_at(args.length-1)
end
sql_segment = "ALTER TABLE #{@base.quote_table_name(@table_name)} ADD COLUMN "
args.each do | name |
sql = sql_segment + " #{@base.quote_column_name(name)} xml"
@base.execute(sql,"add_xml_column")
end
return self
end
#Method to support the new syntax of rails 2.0 migrations (short-hand definitions) for columns of type double
def double(*args)
ibm_parse_column_attributes_args('double',*args)
return self
end
#Method to support the new syntax of rails 2.0 migrations (short-hand definitions) for columns of type decfloat
def decfloat(*args)
ibm_parse_column_attributes_args('decfloat',*args)
return self
end
def graphic(*args)
ibm_parse_column_attributes_args('graphic',*args)
return self
end
def vargraphic(*args)
ibm_parse_column_attributes_args('vargraphic',*args)
return self
end
def bigint(*args)
ibm_parse_column_attributes_args('bigint',*args)
return self
end
#Method to support the new syntax of rails 2.0 migrations (short-hand definitions) for columns of type char [character]
def char(*args)
ibm_parse_column_attributes_args('char',*args)
return self
end
alias_method :character, :char
end
#class TableDefinition
class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
include ColumnMethods
=begin
def initialize(base, name=nil, temporary=nil, options=nil)
if(self.respond_to?(:indexes))
@ar3 = false
else
@ar3 = true
end
@columns = []
@columns_hash = {}
@indexes = {}
@base = base
@temporary = temporary
@options = options
@name = name
@foreign_keys = {}
end
=end
def initialize(conn, name, temporary = false, options = nil, as = nil, comment: nil)
@connection = conn
@columns_hash = {}
@indexes = []
@foreign_keys = []
@primary_keys = nil
@temporary = temporary
@options = options
@as = as
@name = name
@comment = comment
##
#@base = base
end
def primary_keys(name = nil) # :nodoc:
@primary_keys = PrimaryKeyDefinition.new(name) if name
@primary_keys
end
def native
@base.native_database_types
end
#Method to parse the passed arguments and create the ColumnDefinition object of the specified type
def ibm_parse_column_attributes_args(type, *args)
options = {}
if args.last.is_a?(Hash)
options = args.delete_at(args.length-1)
end
args.each do | name |
column(name,type,options)
end
end
private :ibm_parse_column_attributes_args
#Method to support the new syntax of rails 2.0 migrations for columns of type xml
def xml(*args )
ibm_parse_column_attributes_args('xml', *args)
return self
end
#Method to support the new syntax of rails 2.0 migrations (short-hand definitions) for columns of type double
def double(*args)
ibm_parse_column_attributes_args('double',*args)
return self
end
#Method to support the new syntax of rails 2.0 migrations (short-hand definitions) for columns of type decfloat
def decfloat(*args)
ibm_parse_column_attributes_args('decfloat',*args)
return self
end
def graphic(*args)
ibm_parse_column_attributes_args('graphic',*args)
return self
end
def vargraphic(*args)
ibm_parse_column_attributes_args('vargraphic',*args)
return self
end
def bigint(*args)
ibm_parse_column_attributes_args('bigint',*args)
return self
end
#Method to support the new syntax of rails 2.0 migrations (short-hand definitions) for columns of type char [character]
def char(*args)
ibm_parse_column_attributes_args('char',*args)
return self
end
alias_method :character, :char
# Overrides the abstract adapter in order to handle
# the DEFAULT option for the native XML datatype
def column(name, type, options ={})
# construct a column definition where @base is adaptor instance
column = ColumnDefinition.new(name, type)
# DB2 does not accept DEFAULT NULL option for XML
# for table create, but does accept nullable option
unless type.to_s == 'xml'
column.null = options[:null]
column.default = options[:default]
else
column.null = options[:null]
# Override column object's (instance of ColumnDefinition structure)
# to_s which is expected to return the create_table SQL fragment
# and bypass DEFAULT NULL option while still appending NOT NULL
def column.to_s
sql = "#{base.quote_column_name(name)} #{type}"
unless self.null == nil
sql << " NOT NULL" if (self.null == false)
end
return sql
end
end
column.scale = options[:scale] if options[:scale]
column.precision = options[:precision] if options[:precision]
# append column's limit option and yield native limits
if options[:limit]
column.limit = options[:limit]
elsif @base.native_database_types[type.to_sym]
column.limit = @base.native_database_types[type.to_sym][:limit] if @base.native_database_types[type.to_sym].has_key? :limit
end
unless @columns.nil? or @columns.include? column
@columns << column
end
@columns_hash[name] = column
return self
end
end
end
# The IBM_DB Adapter requires the native Ruby driver (ibm_db)
# for IBM data servers (ibm_db.so).
# +config+ the hash passed as an initializer argument content:
# == mandatory parameters
# adapter: 'ibm_db' // IBM_DB Adapter name
# username: 'db2user' // data server (database) user
# password: 'secret' // data server (database) password
# database: 'ARUNIT' // remote database name (or catalog entry alias)
# == optional (highly recommended for data server auditing and monitoring purposes)
# schema: 'rails123' // name space qualifier
# account: 'tester' // OS account (client workstation)
# app_user: 'test11' // authenticated application user
# application: 'rtests' // application name
# workstation: 'plato' // client workstation name
# == remote TCP/IP connection (required when no local database catalog entry available)
# host: 'socrates' // fully qualified hostname or IP address
# port: '50000' // data server TCP/IP port number
# security: 'SSL' // optional parameter enabling SSL encryption -
# // - Available only from CLI version V95fp2 and above
# authentication: 'SERVER' // AUTHENTICATION type which the client uses -
# // - to connect to the database server. By default value is SERVER
# timeout: 10 // Specifies the time in seconds (0 - 32767) to wait for a reply from server -
# //- when trying to establish a connection before generating a timeout
# == Parameterized Queries Support
# parameterized: false // Specifies if the prepared statement support of
# //- the IBM_DB Adapter is to be turned on or off
#
# When schema is not specified, the username value is used instead.
# The default setting of parameterized is false.
#
class IBM_DBAdapter < AbstractAdapter
attr_reader :connection, :servertype
attr_accessor :sql,:handle_lobs_triggered, :sql_parameter_values
attr_reader :schema, :app_user, :account, :application, :workstation
attr_reader :pstmt_support_on, :set_quoted_literal_replacement
# Name of the adapter
def adapter_name
'IBM_DB'
end
class BindSubstitution < Arel::Visitors::IBM_DB # :nodoc:
include Arel::Visitors
end
def initialize(connection, ar3, logger, config, conn_options)
# Caching database connection configuration (+connect+ or +reconnect+ support)\
@config = config
@connection = connection
@isAr3 = ar3
@conn_options = conn_options
@database = config[:database]
@username = config[:username]
@password = config[:password]
if config.has_key?(:host)
@host = config[:host]
@port = config[:port] || 50000 # default port
end
@schema = config[:schema]
@security = config[:security] || nil
@authentication = config[:authentication] || nil
@timeout = config[:timeout] || 0 # default timeout value is 0
@app_user = @account = @application = @workstation = nil
# Caching database connection options (auditing and billing support)
@app_user = conn_options[:app_user] if conn_options.has_key?(:app_user)
@account = conn_options[:account] if conn_options.has_key?(:account)
@application = conn_options[:application] if conn_options.has_key?(:application)
@workstation = conn_options[:workstation] if conn_options.has_key?(:workstation)
@sql = []
@sql_parameter_values = [] #Used only if pstmt support is turned on
@handle_lobs_triggered = false
# Calls the parent class +ConnectionAdapters+' initializer
# which sets @connection, @logger, @runtime and @last_verification
super(@connection, logger, @config)
if @connection
server_info = IBM_DB.server_info( @connection )
if( server_info )
case server_info.DBMS_NAME
when /DB2\//i # DB2 for Linux, Unix and Windows (LUW)
case server_info.DBMS_VER
when /09.07/i # DB2 Version 9.7 (Cobra)
@servertype = IBM_DB2_LUW_COBRA.new(self, @isAr3)
when /10./i #DB2 version 10.1 and above
@servertype = IBM_DB2_LUW_COBRA.new(self, @isAr3)
else # DB2 Version 9.5 or below
@servertype = IBM_DB2_LUW.new(self, @isAr3)
end
when /DB2/i # DB2 for zOS
case server_info.DBMS_VER
when /09/ # DB2 for zOS version 9 and version 10
@servertype = IBM_DB2_ZOS.new(self, @isAr3)
when /10/
@servertype = IBM_DB2_ZOS.new(self, @isAr3)
when /11/
@servertype = IBM_DB2_ZOS.new(self, @isAr3)
when /12/
@servertype = IBM_DB2_ZOS.new(self, @isAr3)
when /08/ # DB2 for zOS version 8
@servertype = IBM_DB2_ZOS_8.new(self, @isAr3)
else # DB2 for zOS version 7
raise "Only DB2 z/OS version 8 and above are currently supported"
end
when /AS/i # DB2 for i5 (iSeries)
@servertype = IBM_DB2_I5.new(self, @isAr3)
when /IDS/i # Informix Dynamic Server
@servertype = IBM_IDS.new(self, @isAr3)
else
log( "server_info", "Forcing servertype to LUW: DBMS name could not be retrieved. Check if your client version is of the right level")
warn "Forcing servertype to LUW: DBMS name could not be retrieved. Check if your client version is of the right level"
@servertype = IBM_DB2_LUW.new(self, @isAr3)
end
else
error_msg = IBM_DB.getErrormsg( @connection, IBM_DB::DB_CONN )
IBM_DB.close( @connection )
raise "Cannot retrieve server information: #{error_msg}"
end
end
# Executes the +set schema+ statement using the schema identifier provided
@servertype.set_schema(@schema) if @schema && @schema != @username
# Check for the start value for id (primary key column). By default it is 1
if config.has_key?(:start_id)
@start_id = config[:start_id]
else
@start_id = 1
end
#Check Arel version
begin
@arelVersion = Arel::VERSION.to_i
rescue
@arelVersion = 0
end
if(@arelVersion >= 3 )
@visitor = Arel::Visitors::IBM_DB.new self
end
if(config.has_key?(:parameterized) && config[:parameterized] == true)
@pstmt_support_on = true
@prepared_statements = true
@set_quoted_literal_replacement = IBM_DB::QUOTED_LITERAL_REPLACEMENT_OFF
else
@pstmt_support_on = false
@prepared_statements = false
@set_quoted_literal_replacement = IBM_DB::QUOTED_LITERAL_REPLACEMENT_ON
end
end
# Optional connection attribute: database name space qualifier
def schema=(name)
unless name == @schema
@schema = name
@servertype.set_schema(@schema)
end
end
# Optional connection attribute: authenticated application user
def app_user=(name)
unless name == @app_user
option = {IBM_DB::SQL_ATTR_INFO_USERID => "#{name}"}
if IBM_DB.set_option( @connection, option, 1 )
@app_user = IBM_DB.get_option( @connection, IBM_DB::SQL_ATTR_INFO_USERID, 1 )
end
end
end
# Optional connection attribute: OS account (client workstation)
def account=(name)
unless name == @account
option = {IBM_DB::SQL_ATTR_INFO_ACCTSTR => "#{name}"}
if IBM_DB.set_option( @connection, option, 1 )
@account = IBM_DB.get_option( @connection, IBM_DB::SQL_ATTR_INFO_ACCTSTR, 1 )
end
end
end
# Optional connection attribute: application name
def application=(name)
unless name == @application
option = {IBM_DB::SQL_ATTR_INFO_APPLNAME => "#{name}"}
if IBM_DB.set_option( @connection, option, 1 )
@application = IBM_DB.get_option( @connection, IBM_DB::SQL_ATTR_INFO_APPLNAME, 1 )
end
end
end
# Optional connection attribute: client workstation name
def workstation=(name)
unless name == @workstation
option = {IBM_DB::SQL_ATTR_INFO_WRKSTNNAME => "#{name}"}
if IBM_DB.set_option( @connection, option, 1 )
@workstation = IBM_DB.get_option( @connection, IBM_DB::SQL_ATTR_INFO_WRKSTNNAME, 1 )
end
end
end
def self.visitor_for(pool)
Arel::Visitors::IBM_DB.new(pool)
end
#Check Arel version
begin
@arelVersion = Arel::VERSION.to_i
rescue
@arelVersion = 0
end
if(@arelVersion < 6 )
def to_sql(arel, binds = [])
if arel.respond_to?(:ast)
visitor.accept(arel.ast) do
quote(*binds.shift.reverse)
end
else
arel
end
end
end
# This adapter supports migrations.
# Current limitations:
# +rename_column+ is not currently supported by the IBM data servers
# +remove_column+ is not currently supported by the DB2 for zOS data server
# Tables containing columns of XML data type do not support +remove_column+
def supports_migrations?
true
end
def supports_foreign_keys?
true
end
def supports_datetime_with_precision?
true
end
# This Adapter supports DDL transactions.
# This means CREATE TABLE and other DDL statements can be carried out as a transaction.
# That is the statements executed can be ROLLED BACK in case of any error during the process.
def supports_ddl_transactions?
true
end
def log_query(sql, name) #:nodoc:
# Used by handle_lobs
log(sql,name){}
end
#==============================================
# CONNECTION MANAGEMENT
#==============================================
# Tests the connection status
def active?
IBM_DB.active @connection
rescue
false
end
# Private method used by +reconnect!+.
# It connects to the database with the initially provided credentials
def connect
# If the type of connection is net based
if(@username.nil? || @password.nil?)
raise ArgumentError, "Username/Password cannot be nil"
end
begin
if @host
@conn_string = "DRIVER={IBM DB2 ODBC DRIVER};\
DATABASE=#{@database};\
HOSTNAME=#{@host};\
PORT=#{@port};\
PROTOCOL=TCPIP;\
UID=#{@username};\
PWD=#{@password};"
@conn_string << "SECURITY=#{@security};" if @security
@conn_string << "AUTHENTICATION=#{@authentication};" if @authentication
@conn_string << "CONNECTTIMEOUT=#{@timeout};"
# Connects and assigns the resulting IBM_DB.Connection to the +@connection+ instance variable
@connection = IBM_DB.connect(@conn_string, '', '', @conn_options, @set_quoted_literal_replacement)
else
# Connects to the database using the local alias (@database)
# and assigns the connection object (IBM_DB.Connection) to @connection
@connection = IBM_DB.connect(@database, @username, @password, @conn_options, @set_quoted_literal_replacement)
end
rescue StandardError => connect_err
warn "Connection to database #{@database} failed: #{connect_err}"
@connection = false
end
# Sets the schema if different from default (username)
if @schema && @schema != @username
@servertype.set_schema(@schema)
end
end
private :connect
# Closes the current connection and opens a new one
def reconnect!
disconnect!
connect
end
# Closes the current connection
def disconnect!
# Attempts to close the connection. The methods will return:
# * true if succesfull
# * false if the connection is already closed
# * nil if an error is raised
return nil if @connection.nil? || @connection == false
IBM_DB.close(@connection) rescue nil
end
#==============================================
# DATABASE STATEMENTS
#==============================================
def create_table(name, options = {})
@servertype.setup_for_lob_table
#Table definition is complete only when a unique index is created on the primarykey column for DB2 V8 on zOS
#create index on id column if options[:id] is nil or id ==true
#else check if options[:primary_key]is not nil then create an unique index on that column
if !options[:id].nil? || !options[:primary_key].nil?
if (!options[:id].nil? && options[:id] == true)
@servertype.create_index_after_table(name,"id")
elsif !options[:primary_key].nil?
@servertype.create_index_after_table(name,options[:primary_key].to_s)
end
else
@servertype.create_index_after_table(name,"id")
end
super(name, options)
end
# Returns an array of hashes with the column names as keys and
# column values as values. +sql+ is the select query,
# and +name+ is an optional description for logging
def prepared_select(sql_param_hash, name = nil)
# Replaces {"= NULL" with " IS NULL"} OR {"IN (NULL)" with " IS NULL"}
results = []
# Invokes the method +prepare+ in order prepare the SQL
# IBM_DB.Statement is returned from which the statement is executed and results fetched
pstmt = prepare(sql_param_hash["sqlSegment"], name)
if(execute_prepared_stmt(pstmt, sql_param_hash["paramArray"]))
begin
results = @servertype.select(pstmt)
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(pstmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise StatementInvalid,"Failed to retrieve data: #{error_msg}"
else
error_msg = "An unexpected error occurred during data retrieval"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure
# Ensures to free the resources associated with the statement
IBM_DB.free_stmt(pstmt) if pstmt
end
end
# The array of record hashes is returned
results
end
# Returns an array of hashes with the column names as keys and
# column values as values. +sql+ is the select query,
# and +name+ is an optional description for logging
def prepared_select_values(sql_param_hash, name = nil)
# Replaces {"= NULL" with " IS NULL"} OR {"IN (NULL)" with " IS NULL"}
results = []
# Invokes the method +prepare+ in order prepare the SQL
# IBM_DB.Statement is returned from which the statement is executed and results fetched
pstmt = prepare(sql_param_hash["sqlSegment"], name)
if(execute_prepared_stmt(pstmt, sql_param_hash["paramArray"]))
begin
results = @servertype.select_rows(sql_param_hash["sqlSegment"], name, pstmt, results)
if results
return results.map { |v| v[0] }
else
nil
end
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(pstmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise StatementInvalid,"Failed to retrieve data: #{error_msg}"
else
error_msg = "An unexpected error occurred during data retrieval"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure
# Ensures to free the resources associated with the statement
IBM_DB.free_stmt(pstmt) if pstmt
end
end
# The array of record hashes is returned
results
end
#Calls the servertype select method to fetch the data
def fetch_data(stmt)
if(stmt)
begin
return @servertype.select(stmt)
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise StatementInvalid,"Failed to retrieve data: #{error_msg}"
else
error_msg = "An unexpected error occurred during data retrieval"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure
# Ensures to free the resources associated with the statement
IBM_DB.free_stmt(stmt) if stmt
end
end
end
def select(sql, name = nil, binds = [])
# Replaces {"= NULL" with " IS NULL"} OR {"IN (NULL)" with " IS NULL"
sql.gsub( /(=\s*NULL|IN\s*\(NULL\))/i, " IS NULL" )
results = []
if(binds.nil? || binds.empty?)
stmt = execute(sql, name)
else
stmt = exec_query(sql, name, binds)
end
cols = IBM_DB.resultCols(stmt)
if( stmt )
results = fetch_data(stmt)
end
if(@isAr3)
return results
else
return ActiveRecord::Result.new(cols, results)
end
end
#Returns an array of arrays containing the field values.
#This is an implementation for the abstract method
#+sql+ is the select query and +name+ is an optional description for logging
def select_rows(sql, name = nil,binds = [])
# Replaces {"= NULL" with " IS NULL"} OR {"IN (NULL)" with " IS NULL"}
sql.gsub( /(=\s*NULL|IN\s*\(NULL\))/i, " IS NULL" )
results = []
# Invokes the method +execute+ in order to log and execute the SQL
# IBM_DB.Statement is returned from which results can be fetched
if !binds.nil? && !binds.empty?
param_array = binds.map do |column,value|
quote_value_for_pstmt(value, column)
end
return prepared_select({"sqlSegment" => sql, "paramArray" => param_array})
end
stmt = execute(sql, name)
if(stmt)
begin
results = @servertype.select_rows(sql, name, stmt, results)
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise StatementInvalid,"Failed to retrieve data: #{error_msg}"
else
error_msg = "An unexpected error occurred during data retrieval"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure
# Ensures to free the resources associated with the statement
IBM_DB.free_stmt(stmt) if stmt
end
end
# The array of record hashes is returned
results
end
# Returns a record hash with the column names as keys and column values
# as values.
#def select_one(sql, name = nil)
# Gets the first hash from the array of hashes returned by
# select_all
# select_all(sql,name).first
#end
#inserts values from fixtures
#overridden to handle LOB's fixture insertion, as, in normal inserts callbacks are triggered but during fixture insertion callbacks are not triggered
#hence only markers like @@@IBMBINARY@@@ will be inserted and are not updated to actual data
def insert_fixture(fixture, table_name)
if(fixture.respond_to?(:keys))
insert_query = "INSERT INTO #{quote_table_name(table_name)} ( #{fixture.keys.join(', ')})"
else
insert_query = "INSERT INTO #{quote_table_name(table_name)} ( #{fixture.key_list})"
end
insert_values = []
params = []
if @servertype.instance_of? IBM_IDS
super
return
end
column_list = columns(table_name)
fixture.each do |item|
col = nil
column_list.each do |column|
if column.name.downcase == item.at(0).downcase
col= column
break
end
end
if item.at(1).nil? ||
item.at(1) == {} ||
(item.at(1) == '' && !(col.sql_type.to_s =~ /text|clob/i))
params << 'NULL'
elsif (!col.nil? && (col.sql_type.to_s =~ /blob|binary|clob|text|xml/i) )
# Add a '?' for the parameter or a NULL if the value is nil or empty
# (except for a CLOB field where '' can be a value)
insert_values << quote_value_for_pstmt(item.at(1))
params << '?'
else
insert_values << quote_value_for_pstmt(item.at(1),col)
params << '?'
end
end
insert_query << " VALUES ("+ params.join(',') + ")"
unless stmt = IBM_DB.prepare(@connection, insert_query)
error_msg = IBM_DB.getErrormsg( @connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Failed to prepare statement for fixtures insert due to : #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during preparing SQL for fixture insert')
end
end
#log_query(insert_query,'fixture insert')
log(insert_query,'fixture insert') do
unless IBM_DB.execute(stmt, insert_values)
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
IBM_DB.free_stmt(stmt) if stmt
raise "Failed to insert due to: #{error_msg}"
else
IBM_DB.free_stmt(stmt) if stmt
end
end
end
def empty_insert_statement_value(pkey)
"(#{pkey}) VALUES (DEFAULT)"
end
# Perform an insert and returns the last ID generated.
# This can be the ID passed to the method or the one auto-generated by the database,
# and retrieved by the +last_generated_id+ method.
def insert_direct(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
if @handle_lobs_triggered #Ensure the array of sql is cleared if they have been handled in the callback
@sql = []
@handle_lobs_triggered = false
end
clear_query_cache if defined? clear_query_cache
if stmt = execute(sql, name)
begin
@sql << sql
return id_value || @servertype.last_generated_id(stmt)
# Ensures to free the resources associated with the statement
ensure
IBM_DB.free_stmt(stmt) if stmt
end
end
end
def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds=[])
if(@arelVersion < 6)
sql, binds = [to_sql(arel), binds]
else
sql, binds = [to_sql(arel),binds] #sql_for_insert(to_sql(arel, binds), binds) #[to_sql(arel),binds]
end
#unless IBM_DBAdapter.respond_to?(:exec_insert)
if binds.nil? || binds.empty?
return insert_direct(sql, name, pk, id_value, sequence_name)
end
clear_query_cache if defined? clear_query_cache
if stmt = exec_insert(sql, name, binds)
begin
@sql << sql
return id_value || @servertype.last_generated_id(stmt)
ensure
IBM_DB.free_stmt(stmt) if stmt
end
end
end
# Praveen
# Performs an insert using the prepared statement and returns the last ID generated.
# This can be the ID passed to the method or the one auto-generated by the database,
# and retrieved by the +last_generated_id+ method.
def prepared_insert(pstmt, param_array = nil, id_value = nil)
if @handle_lobs_triggered #Ensure the array of sql is cleared if they have been handled in the callback
@sql = []
@sql_parameter_values = []
@handle_lobs_triggered = false
end
clear_query_cache if defined? clear_query_cache
begin
if execute_prepared_stmt(pstmt, param_array)
@sql << @prepared_sql
@sql_parameter_values << param_array
return id_value || @servertype.last_generated_id(pstmt)
end
rescue StandardError => insert_err
raise insert_err
ensure
IBM_DB.free_stmt(pstmt) if pstmt
end
end
# Praveen
# Prepares and logs +sql+ commands and
# returns a +IBM_DB.Statement+ object.
def prepare(sql,name = nil)
# The +log+ method is defined in the parent class +AbstractAdapter+
@prepared_sql = sql
log(sql,name) do
@servertype.prepare(sql, name)
end
end
# Praveen
#Executes the prepared statement
#ReturnsTrue on success and False on Failure
def execute_prepared_stmt(pstmt, param_array = nil)
if !param_array.nil? && param_array.size < 1
param_array = nil
end
if( !IBM_DB.execute(pstmt, param_array) )
error_msg = IBM_DB.getErrormsg(pstmt, IBM_DB::DB_STMT)
if !error_msg.empty?
error_msg = "Statement execution failed: " + error_msg
else
error_msg = "Statement execution failed"
end
IBM_DB.free_stmt(pstmt) if pstmt
raise StatementInvalid, error_msg
else
return true
end
end
# Executes +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
def exec_query(sql, name = 'SQL', binds = [])
begin
param_array = binds.map do |column,value|
quote_value_for_pstmt(value, column)
end
stmt = prepare(sql, name)
if( stmt )
if(execute_prepared_stmt(stmt, param_array))
return stmt
end
else
return false
end
ensure
@offset = @limit = nil
end
end
# Executes and logs +sql+ commands and
# returns a +IBM_DB.Statement+ object.
def execute(sql, name=nil)
# Logs and execute the sql instructions.
# The +log+ method is defined in the parent class +AbstractAdapter+
#sql='INSERT INTO ar_internal_metadata (key, value, created_at, updated_at) VALUES ('10', '10', '10', '10')
log(sql , name) do
@servertype.execute(sql, name)
end
end
def exec_insert(sql,name,binds,pk,sequence_name)
end
# Executes an "UPDATE" SQL statement
def update_direct(sql, name = nil)
if @handle_lobs_triggered #Ensure the array of sql is cleared if they have been handled in the callback
@sql = []
@handle_lobs_triggered = false
end
# Logs and execute the given sql query.
if stmt = execute(sql, name)
begin
@sql << sql
# Retrieves the number of affected rows
IBM_DB.num_rows(stmt)
# Ensures to free the resources associated with the statement
ensure
IBM_DB.free_stmt(stmt) if stmt
end
end
end
#Praveen
def prepared_update(pstmt, param_array = nil )
if @handle_lobs_triggered #Ensure the array of sql is cleared if they have been handled in the callback
@sql = []
@sql_parameter_values = []
@handle_lobs_triggered = false
end
clear_query_cache if defined? clear_query_cache
begin
if execute_prepared_stmt(pstmt, param_array)
@sql << @prepared_sql
@sql_parameter_values << param_array
# Retrieves the number of affected rows
IBM_DB.num_rows(pstmt)
# Ensures to free the resources associated with the statement
end
rescue StandardError => updt_err
raise updt_err
ensure
IBM_DB.free_stmt(pstmt) if pstmt
end
end
# The delete method executes the delete
# statement and returns the number of affected rows.
# The method is an alias for +update+
alias_method :prepared_delete, :prepared_update
def update(arel, name = nil, binds = [])
if(@arelVersion < 6 )
sql = to_sql(arel)
else
sql = to_sql(arel,binds)
end
# Make sure the WHERE clause handles NULL's correctly
sqlarray = sql.split(/\s*WHERE\s*/)
size = sqlarray.size
if size > 1
sql = sqlarray[0] + " WHERE "
if size > 2
1.upto size-2 do |index|
sqlarray[index].gsub!( /(=\s*NULL|IN\s*\(NULL\))/i, " IS NULL" ) unless sqlarray[index].nil?
sql = sql + sqlarray[index] + " WHERE "
end
end
sqlarray[size-1].gsub!( /(=\s*NULL|IN\s*\(NULL\))/i, " IS NULL" ) unless sqlarray[size-1].nil?
sql = sql + sqlarray[size-1]
end
clear_query_cache if defined? clear_query_cache
if binds.nil? || binds.empty?
update_direct(sql, name)
else
begin
if stmt = exec_query(sql,name,binds)
IBM_DB.num_rows(stmt)
end
ensure
IBM_DB.free_stmt(stmt) if(stmt)
end
end
end
alias_method :delete, :update
# Begins the transaction (and turns off auto-committing)
def begin_db_transaction
# Turns off the auto-commit
IBM_DB.autocommit(@connection, IBM_DB::SQL_AUTOCOMMIT_OFF)
end
# Commits the transaction and turns on auto-committing
def commit_db_transaction
# Commits the transaction
IBM_DB.commit @connection rescue nil
# Turns on auto-committing
IBM_DB.autocommit @connection, IBM_DB::SQL_AUTOCOMMIT_ON
end
# Rolls back the transaction and turns on auto-committing. Must be
# done if the transaction block raises an exception or returns false
def rollback_db_transaction
# ROLLBACK the transaction
IBM_DB.rollback(@connection) rescue nil
# Turns on auto-committing
IBM_DB.autocommit @connection, IBM_DB::SQL_AUTOCOMMIT_ON
end
def get_limit_offset_clauses(limit,offset)
if limit && limit == 0
clauses = @servertype.get_limit_offset_clauses(limit,0)
else
clauses = @servertype.get_limit_offset_clauses(limit, offset)
end
end
# Modifies a sql statement in order to implement a LIMIT and an OFFSET.
# A LIMIT defines the number of rows that should be fetched, while
# an OFFSET defines from what row the records must be fetched.
# IBM data servers implement a LIMIT in SQL statements through:
# FETCH FIRST n ROWS ONLY, where n is the number of rows required.
# The implementation of OFFSET is more elaborate, and requires the usage of
# subqueries and the ROW_NUMBER() command in order to add row numbering
# as an additional column to a copy of the existing table.
# ==== Examples
# add_limit_offset!('SELECT * FROM staff', {:limit => 10})
# generates: "SELECT * FROM staff FETCH FIRST 10 ROWS ONLY"
#
# add_limit_offset!('SELECT * FROM staff', {:limit => 10, :offset => 30})
# generates "SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_rownum
# FROM (SELECT * FROM staff) AS I) AS O WHERE sys_row_num BETWEEN 31 AND 40"
def add_limit_offset!(sql, options)
limit = options[:limit]
offset = options[:offset]
# if the limit is zero
if limit && limit == 0
# Returns a query that will always generate zero records
# (e.g. WHERE sys_row_num BETWEEN 1 and 0)
if( @pstmt_support_on )
sql = @servertype.query_offset_limit!(sql, 0, limit, options)
else
sql = @servertype.query_offset_limit(sql, 0, limit)
end
# If there is a non-zero limit
else
# If an offset is specified builds the query with offset and limit,
# otherwise retrieves only the first +limit+ rows
if( @pstmt_support_on )
sql = @servertype.query_offset_limit!(sql, offset, limit, options)
else
sql = @servertype.query_offset_limit(sql, offset, limit)
end
end
# Returns the sql query in any case
sql
end # method add_limit_offset!
def default_sequence_name(table, column) # :nodoc:
"#{table}_#{column}_seq"
end
#==============================================
# QUOTING
#==============================================
# Quote date/time values for use in SQL input.
# Includes microseconds, if the value is a Time responding to usec.
=begin
def quoted_date(value) #:nodoc:
if value.respond_to?(:usec)
"#{super}.#{sprintf("%06d", value.usec)}"
else
super
end
end
=end
def quote_value_for_pstmt(value, column=nil)
return value.quoted_id if value.respond_to?(:quoted_id)
case value
when String, ActiveSupport::Multibyte::Chars then
value = value.to_s
if column && column.sql_type.to_s =~ /int|serial|float/i
value = column.sql_type.to_s =~ /int|serial/i ? value.to_i : value.to_f
value
else
value
end
when NilClass then nil
when TrueClass then 1
when FalseClass then 0
when Float, Fixnum, Bignum then value
# BigDecimals need to be output in a non-normalized form and quoted.
when BigDecimal then value.to_s('F')
when Numeric, Symbol then value.to_s
else
if value.acts_like?(:date) || value.acts_like?(:time)
quoted_date(value)
else
value.to_yaml
end
end
end
# Properly quotes the various data types.
# +value+ contains the data, +column+ is optional and contains info on the field
# def quote(value, column=nil)
# return value.quoted_id if value.respond_to?(:quoted_id)
# case value
# # If it's a numeric value and the column sql_type is not a string, it shouldn't be quoted
# # (IBM_DB doesn't accept quotes on numeric types)
# when Numeric
# # If the column sql_type is text or string, return the quote value
# if (column && ( column.sql_type.to_s =~ /text|char/i ))
# unless caller[0] =~ /insert_fixture/i
# "'#{value}'"
# else
# "#{value}"
# end
# else
# # value is Numeric, column.sql_type is not a string,
# # therefore it converts the number to string without quoting it
# value.to_s
# end
# when String, ActiveSupport::Multibyte::Chars
# if column && column.sql_type.to_s =~ /binary|blob/i && !(column.sql_type.to_s =~ /for bit data/i)
# # If quoting is required for the insert/update of a BLOB
# unless caller[0] =~ /add_column_options/i
# # Invokes a convertion from string to binary
# @servertype.set_binary_value
# else
# # Quoting required for the default value of a column
# @servertype.set_binary_default(value)
# end
# elsif column && column.sql_type.to_s =~ /text|clob/i
# unless caller[0] =~ /add_column_options/i
# @servertype.set_text_default(quote_string(value))
# else
# @servertype.set_text_default(quote_string(value))
# end
# elsif column && column.sql_type.to_s =~ /xml/i
# unless caller[0] =~ /add_column_options/i
# "#{value}"
# else
# "#{value}"
# end
# else
# unless caller[0] =~ /insert_fixture/i
# super(value)
# else
# "#{value}"
# end
# end
# #when TrueClass then quoted_true # return '1' for true
# when TrueClass
# quoted_true
# #when FalseClass then quoted_false # return '0' for false
# when FalseClass
# quoted_false
# when nil
# "NULL"
# when Date
# "'#{quoted_date(value)}'"
# when Time
# "'#{quoted_date(value)}'"
# when Symbol
# "'#{quote_string(value)}'"
# else
# unless caller[0] =~ /insert_fixture/i
# "'#{quote_string(YAML.dump(value))}'"
# else
# "#{quote_string(YAML.dump(value))}"
# end
# end
# end
# # Quotes a given string, escaping single quote (') characters.
# def quote_string(string)
# #string.gsub(/'/, "''")
# string.gsub('\\', '\&\&').gsub("'", "''")
# end
# *true* is represented by a smallint 1, *false*
# by 0, as no native boolean type exists in DB2.
# Numerics are not quoted in DB2.
def quoted_true
"1"
end
def quoted_false
"0"
end
def quote_column_name(name)
@servertype.check_reserved_words(name)
end
#==============================================
# SCHEMA STATEMENTS
#==============================================
# Returns a Hash of mappings from the abstract data types to the native
# database types
def native_database_types
{
:primary_key => { :name => @servertype.primary_key_definition(@start_id)},
:string => { :name => "varchar", :limit => 255 },
:text => { :name => "clob" },
:integer => { :name => "integer" },
:float => { :name => "float" },
:datetime => { :name => "timestamp" },
:timestamp => { :name => "timestamp" },
:time => { :name => "time" },
:date => { :name => "date" },
:binary => { :name => "blob" },
# IBM data servers don't have a native boolean type.
# A boolean can be represented by a smallint,
# adopting the convention that False is 0 and True is 1
:boolean => { :name => "smallint"},
:xml => { :name => "xml"},
:decimal => { :name => "decimal" },
:rowid => { :name => "rowid" }, # rowid is a supported datatype on z/OS and i/5
:serial => { :name => "serial" }, # rowid is a supported datatype on Informix Dynamic Server
:char => { :name => "char" },
:double => { :name => @servertype.get_double_mapping },
:decfloat => { :name => "decfloat"},
:graphic => { :name => "graphic"},
:vargraphic => { :name => "vargraphic"},
:bigint => { :name => "bigint"}
}
end
def build_conn_str_for_dbops()
connect_str = "DRIVER={IBM DB2 ODBC DRIVER};ATTACH=true;"
if([email protected]?)
connect_str << "HOSTNAME=#{@host};"
connect_str << "PORT=#{@port};"
connect_str << "PROTOCOL=TCPIP;"
end
connect_str << "UID=#{@username};PWD=#{@password};"
return connect_str
end
def drop_database(dbName)
connect_str = build_conn_str_for_dbops()
#Ensure connection is closed before trying to drop a database.
#As a connect call would have been made by call seeing connection in active
disconnect!
begin
dropConn = IBM_DB.connect(connect_str, '', '')
rescue StandardError => connect_err
raise "Failed to connect to server due to: #{connect_err}"
end
if(IBM_DB.dropDB(dropConn,dbName))
IBM_DB.close(dropConn)
return true
else
error = IBM_DB.getErrormsg(dropConn, IBM_DB::DB_CONN)
IBM_DB.close(dropConn)
raise "Could not drop Database due to: #{error}"
end
end
def create_database(dbName, codeSet=nil, mode=nil)
connect_str = build_conn_str_for_dbops()
#Ensure connection is closed before trying to drop a database.
#As a connect call would have been made by call seeing connection in active
disconnect!
begin
createConn = IBM_DB.connect(connect_str, '', '')
rescue StandardError => connect_err
raise "Failed to connect to server due to: #{connect_err}"
end
if(IBM_DB.createDB(createConn,dbName,codeSet,mode))
IBM_DB.close(createConn)
return true
else
error = IBM_DB.getErrormsg(createConn, IBM_DB::DB_CONN)
IBM_DB.close(createConn)
raise "Could not create Database due to: #{error}"
end
end
def valid_type?(type)
#!native_database_types[type].nil?
native_database_types[type].nil?
end
# IBM data servers do not support limits on certain data types (unlike MySQL)
# Limit is supported for the {float, decimal, numeric, varchar, clob, blob, graphic, vargraphic} data types.
def type_to_sql(type, limit=nil, precision=nil, scale=nil )
if type.to_sym == :decimal
if limit.class == Hash
if limit.has_key?("precision".to_sym)
precision = limit[:precision]
end
end
if limit.class == Hash
if limit.has_key?("scale".to_sym)
scale = limit[:scale]
end
end
sql_segment = native_database_types[type.to_sym][:name].to_s
if !precision.nil? && !scale.nil?
sql_segment << "(#{precision},#{scale})"
return sql_segment
elsif scale.nil? && !precision.nil?
sql_segment << "(#{precision})"
else
return sql_segment
end
end
if type.to_sym == :decfloat
sql_segment = native_database_types[type.to_sym][:name].to_s
sql_segment << "(#{precision})" if !precision.nil?
return sql_segment
end
if type.to_sym == :vargraphic
sql_segment = native_database_types[type.to_sym][:name].to_s
if limit.class == Hash
if limit.has_key?("limit".to_sym)
limit1 = limit[:limit]
sql_segment << "(#{limit1})"
else
return "vargraphic(1)"
end
else
if limit != nil
sql_segment << "(#{limit})"
else
return "vargraphic(1)"
end
end
return sql_segment
end
if type.to_sym == :graphic
sql_segment = native_database_types[type.to_sym][:name].to_s
if limit.class == Hash
if limit.has_key?("limit".to_sym)
limit1 = limit[:limit]
sql_segment << "(#{limit1})"
else
return "graphic(1)"
end
else
if limit != nil
sql_segment << "(#{limit})"
else
return "graphic(1)"
end
end
return sql_segment
end
if limit.class == Hash
return super if limit.has_key?("limit".to_sym).nil?
else
return super if limit.nil?
end
# strip off limits on data types not supporting them
if @servertype.limit_not_supported_types.include? type.to_sym
return native_database_types[type.to_sym][:name].to_s
elsif type.to_sym == :boolean
return "smallint"
else
return super(type)
end
end
# Returns the maximum length a table alias identifier can be.
# IBM data servers (cross-platform) table limit is 128 characters
def table_alias_length
128
end
# Retrieves table's metadata for a specified shema name
def tables(name = nil)
# Initializes the tables array
tables = []
# Retrieve table's metadata through IBM_DB driver
stmt = IBM_DB.tables(@connection, nil,
@servertype.set_case(@schema))
if(stmt)
begin
# Fetches all the records available
while tab = IBM_DB.fetch_assoc(stmt)
# Adds the lowercase table name to the array
if(tab["table_type"]== 'TABLE') #check, so that only tables are dumped,IBM_DB.tables also returns views,alias etc in the schema
tables << tab["table_name"].downcase
end
end
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise "Failed to retrieve table metadata during fetch: #{error_msg}"
else
error_msg = "An unexpected error occurred during retrieval of table metadata"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure
IBM_DB.free_stmt(stmt) if stmt # Free resources associated with the statement
end
else # Handle driver execution errors
error_msg = IBM_DB.getErrormsg(@connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Failed to retrieve tables metadata due to error: #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during retrieval of table metadata')
end
end
# Returns the tables array
return tables
end
###################################
# Retrieves views's metadata for a specified shema name
def views
# Initializes the tables array
tables = []
# Retrieve view's metadata through IBM_DB driver
stmt = IBM_DB.tables(@connection, nil,
@servertype.set_case(@schema))
if(stmt)
begin
# Fetches all the records available
while tab = IBM_DB.fetch_assoc(stmt)
# Adds the lowercase view's name to the array
if(tab["table_type"]== 'V') #check, so that only views are dumped,IBM_DB.tables also returns tables,alias etc in the schema
tables << tab["table_name"].downcase
end
end
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise "Failed to retrieve views metadata during fetch: #{error_msg}"
else
error_msg = "An unexpected error occurred during retrieval of views metadata"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure
IBM_DB.free_stmt(stmt) if stmt # Free resources associated with the statement
end
else # Handle driver execution errors
error_msg = IBM_DB.getErrormsg(@connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Failed to retrieve tables metadata due to error: #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during retrieval of views metadata')
end
end
# Returns the tables array
return tables
end
# Returns the primary key of the mentioned table
def primary_key(table_name)
pk_name = nil
stmt = IBM_DB.primary_keys( @connection, nil,
@servertype.set_case(@schema),
@servertype.set_case(table_name))
if(stmt)
begin
if ( pk_index_row = IBM_DB.fetch_array(stmt) )
pk_name = pk_index_row[3].downcase
end
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg( stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise "Failed to retrieve primarykey metadata during fetch: #{error_msg}"
else
error_msg = "An unexpected error occurred during retrieval of primary key metadata"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure # Free resources associated with the statement
IBM_DB.free_stmt(stmt) if stmt
end
else
error_msg = IBM_DB.getErrormsg( @connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Failed to retrieve primary key metadata due to error: #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during primary key retrieval')
end
end
return pk_name
end
# Returns an array of non-primary key indexes for a specified table name
def indexes(table_name, name = nil)
# to_s required because +table_name+ may be a symbol.
table_name = table_name.to_s
# Checks if a blank table name has been given.
# If so it returns an empty array of columns.
return [] if table_name.strip.empty?
indexes = []
pk_index = nil
index_schema = []
#fetch the primary keys of the table using function primary_keys
#TABLE_SCHEM:: pk_index[1]
#TABLE_NAME:: pk_index[2]
#COLUMN_NAME:: pk_index[3]
#PK_NAME:: pk_index[5]
stmt = IBM_DB.primary_keys( @connection, nil,
@servertype.set_case(@schema),
@servertype.set_case(table_name))
if(stmt)
begin
while ( pk_index_row = IBM_DB.fetch_array(stmt) )
if pk_index_row[5]
pk_index_name = pk_index_row[5].downcase
pk_index_columns = [pk_index_row[3].downcase] # COLUMN_NAME
if pk_index
pk_index.columns = pk_index.columns + pk_index_columns
else
pk_index = IndexDefinition.new(table_name, pk_index_name, true, pk_index_columns)
end
end
end
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise "Failed to retrieve primarykey metadata during fetch: #{error_msg}"
else
error_msg = "An unexpected error occurred during retrieval of primary key metadata"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure # Free resources associated with the statement
IBM_DB.free_stmt(stmt) if stmt
end
else # Handle driver execution errors
error_msg = IBM_DB.getErrormsg(@connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Failed to retrieve primary key metadata due to error: #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during primary key retrieval')
end
end
# Query table statistics for all indexes on the table
# "TABLE_NAME: #{index_stats[2]}"
# "NON_UNIQUE: #{index_stats[3]}"
# "INDEX_NAME: #{index_stats[5]}"
# "COLUMN_NAME: #{index_stats[8]}"
stmt = IBM_DB.statistics( @connection, nil,
@servertype.set_case(@schema),
@servertype.set_case(table_name), 1 )
if(stmt)
begin
while ( index_stats = IBM_DB.fetch_array(stmt) )
is_composite = false
if index_stats[5] # INDEX_NAME
index_name = index_stats[5].downcase
index_unique = (index_stats[3] == 0)
index_columns = [index_stats[8].downcase] # COLUMN_NAME
index_qualifier = index_stats[4].downcase #Index_Qualifier
# Create an IndexDefinition object and add to the indexes array
i = 0;
indexes.each do |index|
if index.name == index_name && index_schema[i] == index_qualifier
#index.columns = index.columns + index_columns
index.columns.concat index_columns
is_composite = true
end
i = i+1
end
unless is_composite
indexes << IndexDefinition.new(table_name, index_name, index_unique, index_columns)
index_schema << index_qualifier
end
end
end
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise "Failed to retrieve index metadata during fetch: #{error_msg}"
else
error_msg = "An unexpected error occurred during retrieval of index metadata"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure # Free resources associated with the statement
IBM_DB.free_stmt(stmt) if stmt
end
else # Handle driver execution errors
error_msg = IBM_DB.getErrormsg(@connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Failed to retrieve index metadata due to error: #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during index retrieval')
end
end
# remove the primary key index entry.... should not be dumped by the dumper
i = 0
indexes.each do |index|
if pk_index && index.columns == pk_index.columns
indexes.delete_at(i)
end
i = i+1
end
# Returns the indexes array
return indexes
end
# Mapping IBM data servers SQL datatypes to Ruby data types
def simplified_type2(field_type)
case field_type
# if +field_type+ contains 'for bit data' handle it as a binary
when /for bit data/i
"binary"
when /smallint/i
"boolean"
when /int|serial/i
"integer"
when /decimal|numeric|decfloat/i
"decimal"
when /float|double|real/i
"float"
when /timestamp|datetime/i
"timestamp"
when /time/i
"time"
when /date/i
"date"
when /vargraphic/i
"vargraphic"
when /graphic/i
"graphic"
when /clob|text/i
"text"
when /xml/i
"xml"
when /blob|binary/i
"binary"
when /char/i
"string"
when /boolean/i
"boolean"
when /rowid/i # rowid is a supported datatype on z/OS and i/5
"rowid"
end
end # method simplified_type
# Mapping IBM data servers SQL datatypes to Ruby data types
def simplified_type(field_type)
case field_type
# if +field_type+ contains 'for bit data' handle it as a binary
when /for bit data/i
:binary
when /smallint/i
:boolean
when /int|serial/i
:integer
when /decimal|numeric|decfloat/i
:decimal
when /float|double|real/i
:float
when /timestamp|datetime/i
:timestamp
when /time/i
:time
when /date/i
:date
when /vargraphic/i
:vargraphic
when /graphic/i
:graphic
when /clob|text/i
:text
when /xml/i
:xml
when /blob|binary/i
:binary
when /char/i
:string
when /boolean/i
:boolean
when /rowid/i # rowid is a supported datatype on z/OS and i/5
:rowid
end
end # method simplified_type
# Returns an array of Column objects for the table specified by +table_name+
def columns(table_name)
# to_s required because it may be a symbol.
table_name = @servertype.set_case(table_name.to_s)
# Checks if a blank table name has been given.
# If so it returns an empty array
return [] if table_name.strip.empty?
# +columns+ will contain the resulting array
columns = []
# Statement required to access all the columns information
stmt = IBM_DB.columns( @connection, nil,
@servertype.set_case(@schema),
@servertype.set_case(table_name) )
if(stmt)
begin
# Fetches all the columns and assigns them to col.
# +col+ is an hash with keys/value pairs for a column
while col = IBM_DB.fetch_assoc(stmt)
column_name = col["column_name"].downcase
# Assigns the column default value.
column_default_value = col["column_def"]
# If there is no default value, it assigns NIL
column_default_value = nil if (column_default_value && column_default_value.upcase == 'NULL')
# If default value is IDENTITY GENERATED BY DEFAULT (this value is retrieved in case of id columns)
column_default_value = nil if (column_default_value && column_default_value.upcase =~ /IDENTITY GENERATED BY DEFAULT/i)
# Removes single quotes from the default value
column_default_value.gsub!(/^'(.*)'$/, '\1') unless column_default_value.nil?
# Assigns the column type
column_type = col["type_name"].downcase
# Assigns the field length (size) for the column
original_column_type = "#{column_type}"
column_length = col["column_size"]
column_scale = col["decimal_digits"]
# The initializer of the class Column, requires the +column_length+ to be declared
# between brackets after the datatype(e.g VARCHAR(50)) for :string and :text types.
# If it's a "for bit data" field it does a subsitution in place, if not
# it appends the (column_length) string on the supported data types
unless column_length.nil? ||
column_length == '' ||
column_type.sub!(/ \(\) for bit data/i,"(#{column_length}) FOR BIT DATA") ||
!column_type =~ /char|lob|graphic/i
if column_type =~ /decimal|numeric/i
column_type << "(#{column_length},#{column_scale})"
elsif column_type =~ /smallint|integer|double|date|time|timestamp|xml|bigint/i
column_type << "" # override native limits incompatible with table create
else
column_type << "(#{column_length})"
end
end
# col["NULLABLE"] is 1 if the field is nullable, 0 if not.
column_nullable = (col["nullable"] == 1) ? true : false
# Make sure the hidden column (db2_generated_rowid_for_lobs) in DB2 z/OS isn't added to the list
if !(column_name =~ /db2_generated_rowid_for_lobs/i)
# Pushes into the array the *IBM_DBColumn* object, created by passing to the initializer
# +column_name+, +default_value+, +column_type+ and +column_nullable+.
#if(@arelVersion >= 6 )
#cast_type = lookup_cast_type(column_type)
ruby_type = simplified_type2(column_type)
precision = extract_precision(ruby_type)
#type = type_map.lookup(column_type)
sql_type = type_to_sql(column_type, column_length, precision, column_scale)
sqltype_metadata = SqlTypeMetadata.new(
#sql_type: sql_type,
sql_type: original_column_type,
type: ruby_type,
limit: column_length,
precision: precision,
scale: column_scale,
)
columns << Column.new(column_name, column_default_value, sqltype_metadata, column_nullable, table_name)
#else
# columns << IBM_DBColumn.new(column_name, column_default_value, column_type, column_nullable)
#end
end
end
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise "Failed to retrieve column metadata during fetch: #{error_msg}"
else
error_msg = "An unexpected error occurred during retrieval of column metadata"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure # Free resources associated with the statement
IBM_DB.free_stmt(stmt) if stmt
end
else # Handle driver execution errors
error_msg = IBM_DB.getErrormsg(@connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Failed to retrieve column metadata due to error: #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during retrieval of columns metadata')
end
end
# Returns the columns array
return columns
end
def foreign_keys(table_name)
#fetch the foreign keys of the table using function foreign_keys
#PKTABLE_NAME:: fk_row[2] Name of the table containing the primary key.
#PKCOLUMN_NAME:: fk_row[3] Name of the column containing the primary key.
#FKTABLE_NAME:: fk_row[6] Name of the table containing the foreign key.
#FKCOLUMN_NAME:: fk_row[7] Name of the column containing the foreign key.
#FK_NAME:: fk_row[11] The name of the foreign key.
table_name = @servertype.set_case(table_name.to_s)
foreignKeys = []
stmt = IBM_DB.foreignkeys( @connection, nil,
@servertype.set_case(@schema),
@servertype.set_case(table_name), "FK_TABLE")
if(stmt)
begin
while ( fk_row = IBM_DB.fetch_array(stmt) )
options = {
column: fk_row[7],
name: fk_row[11],
primary_key: fk_row[3],
}
options[:on_update] = extract_foreign_key_action(fk_row[9])
options[:on_delete] = extract_foreign_key_action(fk_row[10])
foreignKeys << ForeignKeyDefinition.new(fk_row[6], table_name, options)
end
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise "Failed to retrieve foreign key metadata during fetch: #{error_msg}"
else
error_msg = "An unexpected error occurred during retrieval of foreign key metadata"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure # Free resources associated with the statement
IBM_DB.free_stmt(stmt) if stmt
end
else # Handle driver execution errors
error_msg = IBM_DB.getErrormsg(@connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Failed to retrieve foreign key metadata due to error: #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during foreign key retrieval')
end
end
#Returns the foreignKeys array
return foreignKeys
end
def extract_foreign_key_action(specifier) # :nodoc:
case specifier
when 0; :cascade
when 1; :restrict
when 2; :nullify
when 3; :noaction
end
end
def supports_disable_referential_integrity? #:nodoc:
true
end
def disable_referential_integrity #:nodoc:
if supports_disable_referential_integrity?
alter_foreign_keys(tables, true)
end
yield
ensure
if supports_disable_referential_integrity?
alter_foreign_keys(tables, false)
end
end
def alter_foreign_keys(tables, not_enforced)
enforced = not_enforced ? 'NOT ENFORCED' : 'ENFORCED'
tables.each do |table|
foreign_keys(table).each do |fk|
execute("ALTER TABLE #{@servertype.set_case(fk.from_table)} ALTER FOREIGN KEY #{@servertype.set_case(fk.name)} #{enforced}")
end
end
end
# Renames a table.
# ==== Example
# rename_table('octopuses', 'octopi')
# Overriden to satisfy IBM data servers syntax
def rename_table(name, new_name)
# SQL rename table statement
rename_table_sql = "RENAME TABLE #{name} TO #{new_name}"
stmt = execute(rename_table_sql)
# Ensures to free the resources associated with the statement
ensure
IBM_DB.free_stmt(stmt) if stmt
end
# Renames a column.
# ===== Example
# rename_column(:suppliers, :description, :name)
def rename_column(table_name, column_name, new_column_name)
@servertype.rename_column(table_name, column_name, new_column_name)
end
# Removes the column from the table definition.
# ===== Examples
# remove_column(:suppliers, :qualification)
def remove_column(table_name, column_name)
@servertype.remove_column(table_name, column_name)
end
# Changes the column's definition according to the new options.
# See TableDefinition#column for details of the options you can use.
# ===== Examples
# change_column(:suppliers, :name, :string, :limit => 80)
# change_column(:accounts, :description, :text)
def change_column(table_name, column_name, type, options = {})
@servertype.change_column(table_name, column_name, type, options)
end
#Add distinct clause to the sql if there is no order by specified
def distinct(columns, order_by)
if order_by.nil?
"DISTINCT #{columns}"
else
"#{columns}"
end
end
def columns_for_distinct(columns, orders) #:nodoc:
order_columns = orders.reject(&:blank?).map{ |s|
# Convert Arel node to string
s = s.to_sql unless s.is_a?(String)
# Remove any ASC/DESC modifiers
s.gsub(/\s+(?:ASC|DESC)\b/i, '')
.gsub(/\s+NULLS\s+(?:FIRST|LAST)\b/i, '')
}.reject(&:blank?).map.with_index { |column, i| "#{column} AS alias_#{i}" }
[super, *order_columns].join(', ')
end
# Sets a new default value for a column. This does not set the default
# value to +NULL+, instead, it needs DatabaseStatements#execute which
# can execute the appropriate SQL statement for setting the value.
# ==== Examples
# change_column_default(:suppliers, :qualification, 'new')
# change_column_default(:accounts, :authorized, 1)
# Method overriden to satisfy IBM data servers syntax.
def change_column_default(table_name, column_name, default)
@servertype.change_column_default(table_name, column_name, default)
end
#Changes the nullability value of a column
def change_column_null(table_name, column_name, null, default = nil)
@servertype.change_column_null(table_name, column_name, null, default)
end
# Remove the given index from the table.
#
# Remove the suppliers_name_index in the suppliers table (legacy support, use the second or third forms).
# remove_index :suppliers, :name
# Remove the index named accounts_branch_id in the accounts table.
# remove_index :accounts, :column => :branch_id
# Remove the index named by_branch_party in the accounts table.
# remove_index :accounts, :name => :by_branch_party
#
# You can remove an index on multiple columns by specifying the first column.
# add_index :accounts, [:username, :password]
# remove_index :accounts, :username
# Overriden to use the IBM data servers SQL syntax.
def remove_index(table_name, options = {})
execute("DROP INDEX #{index_name(table_name, options)}")
end
protected
def initialize_type_map(m) # :nodoc:
register_class_with_limit m, %r(boolean)i, Type::Boolean
register_class_with_limit m, %r(char)i, Type::String
register_class_with_limit m, %r(binary)i, Type::Binary
register_class_with_limit m, %r(text)i, Type::Text
register_class_with_limit m, %r(date)i, Type::Date
register_class_with_limit m, %r(time)i, Type::Time
register_class_with_limit m, %r(datetime)i, Type::DateTime
register_class_with_limit m, %r(float)i, Type::Float
register_class_with_limit m, %r(int)i, Type::Integer
m.alias_type %r(blob)i, 'binary'
m.alias_type %r(clob)i, 'text'
m.alias_type %r(timestamp)i, 'datetime'
m.alias_type %r(numeric)i, 'decimal'
m.alias_type %r(number)i, 'decimal'
m.alias_type %r(double)i, 'float'
m.register_type(%r(decimal)i) do |sql_type|
scale = extract_scale(sql_type)
precision = extract_precision(sql_type)
if scale == 0
# FIXME: Remove this class as well
Type::DecimalWithoutScale.new(precision: precision)
else
Type::Decimal.new(precision: precision, scale: scale)
end
end
m.alias_type %r(xml)i, 'text'
m.alias_type %r(for bit data)i, 'binary'
m.alias_type %r(smallint)i, 'boolean'
m.alias_type %r(serial)i, 'int'
m.alias_type %r(decfloat)i, 'decimal'
m.alias_type %r(real)i, 'decimal'
m.alias_type %r(graphic)i, 'binary'
m.alias_type %r(rowid)i, 'int'
end
end # class IBM_DBAdapter
# This class contains common code across DB's (DB2 LUW, zOS, i5 and IDS)
class IBM_DataServer
def initialize(adapter, ar3)
@adapter = adapter
@isAr3 = ar3
end
def last_generated_id(stmt)
end
def create_index_after_table (table_name,cloumn_name)
end
def setup_for_lob_table ()
end
def reorg_table(table_name)
end
def check_reserved_words(col_name)
col_name.to_s
end
# This is supported by the DB2 for Linux, UNIX, Windows data servers
# and by the DB2 for i5 data servers
def remove_column(table_name, column_name)
begin
@adapter.execute "ALTER TABLE #{table_name} DROP #{column_name}"
reorg_table(table_name)
rescue StandardError => exec_err
# Provide details on the current XML columns support
if exec_err.message.include?('SQLCODE=-1242') && exec_err.message.include?('42997')
raise StatementInvalid,
"A column that is part of a table containing an XML column cannot be dropped. \
To remove the column, the table must be dropped and recreated without the #{column_name} column: #{exec_err}"
else
raise "#{exec_err}"
end
end
end
def select(stmt)
results = []
# Fetches all the results available. IBM_DB.fetch_assoc(stmt) returns
# an hash for each single record.
# The loop stops when there aren't any more valid records to fetch
begin
if(@isAr3)
while single_hash = IBM_DB.fetch_assoc(stmt)
# Add the record to the +results+ array
results << single_hash
end
else
while single_hash = IBM_DB.fetch_array(stmt)
# Add the record to the +results+ array
results << single_hash
end
end
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise StatementInvalid,"Failed to retrieve data: #{error_msg}"
else
error_msg = "An unexpected error occurred during data retrieval"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
end
return results
end
def select_rows(sql, name, stmt, results)
# Fetches all the results available. IBM_DB.fetch_array(stmt) returns
# an array representing a row in a result set.
# The loop stops when there aren't any more valid records to fetch
begin
while single_array = IBM_DB.fetch_array(stmt)
#Add the array to results array
results << single_array
end
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise StatementInvalid,"Failed to retrieve data: #{error_msg}"
else
error_msg = "An unexpected error occurred during data retrieval"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
end
return results
end
# Praveen
def prepare(sql,name = nil)
begin
stmt = IBM_DB.prepare(@adapter.connection, sql)
if( stmt )
stmt
else
raise StatementInvalid, IBM_DB.getErrormsg(@adapter.connection, IBM_DB::DB_CONN )
end
rescue StandardError => prep_err
if prep_err && !prep_err.message.empty?
raise "Failed to prepare sql #{sql} due to: #{prep_err}"
else
raise
end
end
end
# Akhil Tcheck for if_exits added so that it will try to drop even if the table does not exit.
def execute(sql, name = nil)
if name == nil || name.class == String
begin
if stmt = IBM_DB.exec(@adapter.connection, sql)
stmt # Return the statement object
else
raise StatementInvalid, IBM_DB.getErrormsg(@adapter.connection, IBM_DB::DB_CONN )
end
rescue StandardError => exec_err
if exec_err && !exec_err.message.empty?
raise "Failed to execute statement due to: #{exec_err}"
else
raise
end
end
else
if name[:if_exists]
IBM_DB.exec(@adapter.connection, sql)
else
begin
if stmt = IBM_DB.exec(@adapter.connection, sql)
stmt # Return the statement object
else
raise StatementInvalid, IBM_DB.getErrormsg(@adapter.connection, IBM_DB::DB_CONN )
end
rescue StandardError => exec_err
if exec_err && !exec_err.message.empty?
raise "Failed to execute statement due to: #{exec_err}"
else
raise
end
end
end
end
end
def set_schema(schema)
@adapter.execute("SET SCHEMA #{schema}")
end
def query_offset_limit(sql, offset, limit)
end
def get_limit_offset_clauses(limit, offset)
end
def query_offset_limit!(sql, offset, limit, options)
end
def get_datetime_mapping
end
def get_time_mapping
end
def get_double_mapping
end
def change_column_default(table_name, column_name, default)
end
def change_column_null(table_name, column_name, null, default)
end
def set_binary_default(value)
end
def set_binary_value
end
def set_text_default
end
def set_case(value)
end
def limit_not_supported_types
[:integer, :double, :date, :time, :timestamp, :xml, :bigint]
end
end # class IBM_DataServer
class IBM_DB2 < IBM_DataServer
def initialize(adapter, ar3)
super(adapter,ar3)
@limit = @offset = nil
end
def rename_column(table_name, column_name, new_column_name)
raise NotImplementedError, "rename_column is not implemented yet in the IBM_DB Adapter"
end
def primary_key_definition(start_id)
return "INTEGER GENERATED BY DEFAULT AS IDENTITY (START WITH #{start_id}) PRIMARY KEY NOT NULL"
end
# Returns the last automatically generated ID.
# This method is required by the +insert+ method
# The "stmt" parameter is ignored for DB2 but used for IDS
def last_generated_id(stmt)
# Queries the db to obtain the last ID that was automatically generated
sql = "SELECT IDENTITY_VAL_LOCAL() FROM SYSIBM.SYSDUMMY1"
stmt = IBM_DB.prepare(@adapter.connection, sql)
if(stmt)
if(IBM_DB.execute(stmt, nil))
begin
# Fetches the only record available (containing the last id)
IBM_DB.fetch_row(stmt)
# Retrieves and returns the result of the query with the last id.
IBM_DB.result(stmt,0)
rescue StandardError => fetch_error # Handle driver fetch errors
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
if error_msg && !error_msg.empty?
raise "Failed to retrieve last generated id: #{error_msg}"
else
error_msg = "An unexpected error occurred during retrieval of last generated id"
error_msg = error_msg + ": #{fetch_error.message}" if !fetch_error.message.empty?
raise error_msg
end
ensure # Free resources associated with the statement
IBM_DB.free_stmt(stmt) if stmt
end
else
error_msg = IBM_DB.getErrormsg(stmt, IBM_DB::DB_STMT )
IBM_DB.free_stmt(stmt) if stmt
if error_msg && !error_msg.empty?
raise "Failed to retrieve last generated id: #{error_msg}"
else
error_msg = "An unexpected error occurred during retrieval of last generated id"
raise error_msg
end
end
else
error_msg = IBM_DB.getErrormsg(@adapter.connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Failed to retrieve last generated id due to error: #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during retrieval of last generated id')
end
end
end
def change_column(table_name, column_name, type, options)
if !options[:default].nil?
change_column_default(table_name, column_name, options[:default])
else
data_type = @adapter.type_to_sql(type, options[:limit], options[:precision], options[:scale])
begin
execute "ALTER TABLE #{table_name} ALTER #{column_name} SET DATA TYPE #{data_type}"
rescue StandardError => exec_err
if exec_err.message.include?('SQLCODE=-190')
raise StatementInvalid,
"Please consult documentation for compatible data types while changing column datatype. \
The column datatype change to [#{data_type}] is not supported by this data server: #{exec_err}"
else
raise "#{exec_err}"
end
end
reorg_table(table_name)
change_column_null(table_name,column_name,options[:null],nil)
change_column_default(table_name, column_name, options[:default])
reorg_table(table_name)
end
end
# DB2 specific ALTER TABLE statement to add a default clause
def change_column_default(table_name, column_name, default)
# SQL statement which alters column's default value
change_column_sql = "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} \
SET WITH DEFAULT #{@adapter.quote(default)}"
stmt = execute(change_column_sql)
reorg_table(table_name)
ensure
IBM_DB.free_stmt(stmt) if stmt
end
#DB2 specific ALTER TABLE statement to change the nullability of a column
def change_column_null(table_name, column_name, null, default)
if !default.nil?
change_column_default(table_name, column_name, default)
end
if !null.nil?
if null
change_column_sql = "ALTER TABLE #{table_name} ALTER #{column_name} DROP NOT NULL"
else
change_column_sql = "ALTER TABLE #{table_name} ALTER #{column_name} SET NOT NULL"
end
stmt = execute(change_column_sql)
reorg_table(table_name)
end
ensure
IBM_DB.free_stmt(stmt) if stmt
end
# This method returns the DB2 SQL type corresponding to the Rails
# datetime/timestamp type
def get_datetime_mapping
return "timestamp"
end
# This method returns the DB2 SQL type corresponding to the Rails
# time type
def get_time_mapping
return "time"
end
#This method returns the DB2 SQL type corresponding to Rails double type
def get_double_mapping
return "double"
end
def get_limit_offset_clauses(limit, offset)
retHash = {"endSegment"=> "", "startSegment" => ""}
if(offset.nil? && limit.nil?)
return retHash
end
if (offset.nil?)
retHash["endSegment"] = " FETCH FIRST #{limit} ROWS ONLY"
return retHash
end
#if(limit.nil?)
if(limit.nil?)
#retHash["startSegment"] = "SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_row_num FROM ( SELECT "
retHash["startSegment"] = "SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_row_num FROM ( "
retHash["endSegment"] = " ) AS I) AS O WHERE sys_row_num > #{offset}"
return retHash
end
# Defines what will be the last record
last_record = offset.to_i + limit.to_i
#retHash["startSegment"] = "SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_row_num FROM ( SELECT "
retHash["startSegment"] = "SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_row_num FROM ( "
if last_record < offset+1
retHash["endSegment"] = " ) AS I) AS O WHERE sys_row_num BETWEEN #{last_record} AND #{offset+1}"
else
retHash["endSegment"] = " ) AS I) AS O WHERE sys_row_num BETWEEN #{offset+1} AND #{last_record}"
end
return retHash
end
def query_offset_limit(sql, offset, limit)
if(offset.nil? && limit.nil?)
return sql
end
if (offset.nil?)
return sql << " FETCH FIRST #{limit} ROWS ONLY"
end
if(limit.nil?)
sql.sub!(/SELECT/i,"SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_row_num FROM (SELECT")
return sql << ") AS I) AS O WHERE sys_row_num > #{offset}"
end
# Defines what will be the last record
last_record = offset + limit
# Transforms the SELECT query in order to retrieve/fetch only
# a number of records after the specified offset.
# 'select' or 'SELECT' is replaced with the partial query below that adds the sys_row_num column
# to select with the condition of this column being between offset+1 and the offset+limit
sql.sub!(/SELECT/i,"SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_row_num FROM (SELECT")
# The final part of the query is appended to include a WHERE...BETWEEN...AND condition,
# and retrieve only a LIMIT number of records starting from the OFFSET+1
sql << ") AS I) AS O WHERE sys_row_num BETWEEN #{offset+1} AND #{last_record}"
end
def query_offset_limit!(sql, offset, limit, options)
if(offset.nil? && limit.nil?)
options[:paramArray] = []
return sql
end
if (offset.nil?)
options[:paramArray] = []
return sql << " FETCH FIRST #{limit} ROWS ONLY"
end
if(limit.nil?)
sql.sub!(/SELECT/i,"SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_row_num FROM (SELECT")
sql << ") AS I) AS O WHERE sys_row_num > ?"
options[:paramArray] = [offset]
return
end
# Defines what will be the last record
last_record = offset + limit
# Transforms the SELECT query in order to retrieve/fetch only
# a number of records after the specified offset.
# 'select' or 'SELECT' is replaced with the partial query below that adds the sys_row_num column
# to select with the condition of this column being between offset+1 and the offset+limit
sql.sub!(/SELECT/i,"SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_row_num FROM (SELECT")
# The final part of the query is appended to include a WHERE...BETWEEN...AND condition,
# and retrieve only a LIMIT number of records starting from the OFFSET+1
sql << ") AS I) AS O WHERE sys_row_num BETWEEN ? AND ?"
options[:paramArray] = [offset+1, last_record]
end
# This method generates the default blob value specified for
# DB2 Dataservers
def set_binary_default(value)
"BLOB('#{value}')"
end
# This method generates the blob value specified for DB2 Dataservers
def set_binary_value
"BLOB('?')"
end
# This method generates the default clob value specified for
# DB2 Dataservers
def set_text_default(value)
"'#{value}'"
end
# For DB2 Dataservers , the arguments to the meta-data functions
# need to be in upper-case
def set_case(value)
value.upcase
end
end # class IBM_DB2
class IBM_DB2_LUW < IBM_DB2
# Reorganizes the table for column changes
def reorg_table(table_name)
execute("CALL ADMIN_CMD('REORG TABLE #{table_name}')")
end
end # class IBM_DB2_LUW
class IBM_DB2_LUW_COBRA < IBM_DB2_LUW
# Cobra supports parameterised timestamp,
# hence overriding following method to allow timestamp datatype to be parameterised
def limit_not_supported_types
[:integer, :double, :date, :time, :xml, :bigint]
end
# Alter table column for renaming a column
# This feature is supported for against DB2 V97 and above only
def rename_column(table_name, column_name, new_column_name)
_table_name = table_name.to_s
_column_name = column_name.to_s
_new_column_name = new_column_name.to_s
nil_condition = _table_name.nil? || _column_name.nil? || _new_column_name.nil?
empty_condition = _table_name.empty? ||
_column_name.empty? ||
_new_column_name.empty? unless nil_condition
if nil_condition || empty_condition
raise ArgumentError,"One of the arguments passed to rename_column is empty or nil"
end
begin
rename_column_sql = "ALTER TABLE #{_table_name} RENAME COLUMN #{_column_name} \
TO #{_new_column_name}"
unless stmt = execute(rename_column_sql)
error_msg = IBM_DB.getErrormsg(@adapter.connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Rename column failed : #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during renaming the column')
end
end
reorg_table(_table_name)
ensure
IBM_DB.free_stmt(stmt) if stmt
end #End of begin
end # End of rename_column
end #IBM_DB2_LUW_COBRA
module HostedDataServer
require 'pathname'
#find DB2-i5-zOS rezerved words file relative path
rfile = Pathname.new(File.dirname(__FILE__)).parent + 'vendor' + 'db2-i5-zOS.yaml'
if rfile
RESERVED_WORDS = open(rfile.to_s) {|f| YAML.load(f) }
def check_reserved_words(col_name)
if RESERVED_WORDS[col_name]
'"' + RESERVED_WORDS[col_name] + '"'
else
col_name.to_s
end
end
else
raise "Failed to locate IBM_DB Adapter dependency: #{rfile}"
end
end # module HostedDataServer
class IBM_DB2_ZOS < IBM_DB2
# since v9 doesn't need, suggest putting it in HostedDataServer?
def create_index_after_table(table_name,column_name)
@adapter.add_index(table_name, column_name, :unique => true)
end
def remove_column(table_name, column_name)
raise NotImplementedError,
"remove_column is not supported by the DB2 for zOS data server"
end
#Alter table column for renaming a column
def rename_column(table_name, column_name, new_column_name)
_table_name = table_name.to_s
_column_name = column_name.to_s
_new_column_name = new_column_name.to_s
nil_condition = _table_name.nil? || _column_name.nil? || _new_column_name.nil?
empty_condition = _table_name.empty? ||
_column_name.empty? ||
_new_column_name.empty? unless nil_condition
if nil_condition || empty_condition
raise ArgumentError,"One of the arguments passed to rename_column is empty or nil"
end
begin
rename_column_sql = "ALTER TABLE #{_table_name} RENAME COLUMN #{_column_name} \
TO #{_new_column_name}"
unless stmt = execute(rename_column_sql)
error_msg = IBM_DB.getErrormsg(@adapter.connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Rename column failed : #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during renaming the column')
end
end
reorg_table(_table_name)
ensure
IBM_DB.free_stmt(stmt) if stmt
end #End of begin
end # End of rename_column
# DB2 z/OS only allows NULL or "" (empty) string as DEFAULT value for a BLOB column.
# For non-empty string and non-NULL values, the server returns error
def set_binary_default(value)
"#{value}"
end
def change_column_default(table_name, column_name, default)
unless default
raise NotImplementedError,
"DB2 for zOS data server version 9 does not support changing the column default to NULL"
else
super
end
end
def change_column_null(table_name, column_name, null, default)
raise NotImplementedError,
"DB2 for zOS data server does not support changing the column's nullability"
end
end # class IBM_DB2_ZOS
class IBM_DB2_ZOS_8 < IBM_DB2_ZOS
include HostedDataServer
def get_limit_offset_clauses(limit, offset)
retHash = {"startSegment" => "", "endSegment" => ""}
if (!limit.nil?)
retHash["endSegment"] = " FETCH FIRST #{limit} ROWS ONLY"
end
return retHash
end
def query_offset_limit(sql, offset, limit)
if (!limit.nil?)
sql << " FETCH FIRST #{limit} ROWS ONLY"
end
return sql
end
def query_offset_limit!(sql, offset, limit, options)
if (!limit.nil?)
sql << " FETCH FIRST #{limit} ROWS ONLY"
end
options[:paramArray] = []
end
# This call is needed on DB2 z/OS v8 for the creation of tables
# with LOBs. When issued, this call does the following:
# DB2 creates LOB table spaces, auxiliary tables, and indexes on auxiliary
# tables for LOB columns.
def setup_for_lob_table()
execute "SET CURRENT RULES = 'STD'"
end
def rename_column(table_name, column_name, new_column_name)
raise NotImplementedError, "rename_column is not implemented for DB2 on zOS 8"
end
def change_column_default(table_name, column_name, default)
raise NotImplementedError,
"DB2 for zOS data server version 8 does not support changing the column default"
end
end # class IBM_DB2_ZOS_8
class IBM_DB2_I5 < IBM_DB2
include HostedDataServer
end # class IBM_DB2_I5
class IBM_IDS < IBM_DataServer
# IDS does not support the SET SCHEMA syntax
def set_schema(schema)
end
# IDS specific ALTER TABLE statement to rename a column
def rename_column(table_name, column_name, new_column_name)
_table_name = table_name.to_s
_column_name = column_name.to_s
_new_column_name = new_column_name.to_s
nil_condition = _table_name.nil? || _column_name.nil? || _new_column_name.nil?
empty_condition = _table_name.empty? ||
_column_name.empty? ||
_new_column_name.empty? unless nil_condition
if nil_condition || empty_condition
raise ArgumentError,"One of the arguments passed to rename_column is empty or nil"
end
begin
rename_column_sql = "RENAME COLUMN #{table_name}.#{column_name} TO \
#{new_column_name}"
unless stmt = execute(rename_column_sql)
error_msg = IBM_DB.getErrormsg(@adapter.connection, IBM_DB::DB_CONN )
if error_msg && !error_msg.empty?
raise "Rename column failed : #{error_msg}"
else
raise StandardError.new('An unexpected error occurred during renaming the column')
end
end
reorg_table(_table_name)
ensure
IBM_DB.free_stmt(stmt) if stmt
end #End of begin
end # End of rename_column
def primary_key_definition(start_id)
return "SERIAL(#{start_id}) PRIMARY KEY"
end
def change_column(table_name, column_name, type, options)
if !options[:null].nil? && !options[:null]
execute "ALTER TABLE #{table_name} MODIFY #{column_name} #{@adapter.type_to_sql(type, options[:limit], options[:precision], options[:scale])} NOT NULL"
else
execute "ALTER TABLE #{table_name} MODIFY #{column_name} #{@adapter.type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
end
if !options[:default].nil?
change_column_default(table_name, column_name, options[:default])
end
reorg_table(table_name)
end
# IDS specific ALTER TABLE statement to add a default clause
# IDS requires the data type to be explicitly specified when adding the
# DEFAULT clause
def change_column_default(table_name, column_name, default)
sql_type = nil
is_nullable = true
@adapter.columns(table_name).select do |col|
if (col.name == column_name)
sql_type = @adapter.type_to_sql(col.sql_type, col.limit, col.precision, col.scale)
is_nullable = col.null
end
end
# SQL statement which alters column's default value
change_column_sql = "ALTER TABLE #{table_name} MODIFY #{column_name} #{sql_type} DEFAULT #{@adapter.quote(default)}"
change_column_sql << " NOT NULL" unless is_nullable
stmt = execute(change_column_sql)
reorg_table(table_name)
# Ensures to free the resources associated with the statement
ensure
IBM_DB.free_stmt(stmt) if stmt
end
# IDS specific ALTER TABLE statement to change the nullability of a column
def change_column_null(table_name,column_name,null,default)
if !default.nil?
change_column_default table_name, column_name, default
end
sql_type = nil
@adapter.columns(table_name).select do |col|
if (col.name == column_name)
sql_type = @adapter.type_to_sql(col.sql_type, col.limit, col.precision, col.scale)
end
end
if !null.nil?
if !null
change_column_sql = "ALTER TABLE #{table_name} MODIFY #{column_name} #{sql_type} NOT NULL"
else
change_column_sql = "ALTER TABLE #{table_name} MODIFY #{column_name} #{sql_type}"
end
stmt = execute(change_column_sql)
reorg_table(table_name)
end
ensure
IBM_DB.free_stmt(stmt) if stmt
end
# Reorganizes the table for column changes
def reorg_table(table_name)
execute("UPDATE STATISTICS FOR TABLE #{table_name}")
end
# This method returns the IDS SQL type corresponding to the Rails
# datetime/timestamp type
def get_datetime_mapping
return "datetime year to fraction(5)"
end
# This method returns the IDS SQL type corresponding to the Rails
# time type
def get_time_mapping
return "datetime hour to second"
end
# This method returns the IDS SQL type corresponding to Rails double type
def get_double_mapping
return "double precision"
end
def get_limit_offset_clauses(limit, offset)
retHash = {"startSegment" => "", "endSegment" => ""}
if limit != 0
if !offset.nil?
# Modifying the SQL to utilize the skip and limit amounts
retHash["startSegment"] = " SELECT SKIP #{offset} LIMIT #{limit} "
else
# Modifying the SQL to retrieve only the first #{limit} rows
retHash["startSegment"] = " SELECT FIRST #{limit} "
end
else
retHash["startSegment"] = " SELECT * FROM (SELECT "
retHash["endSegment"] = " ) WHERE 0 = 1 "
end
end
# Handling offset/limit as per Informix requirements
def query_offset_limit(sql, offset, limit)
if limit != 0
if !offset.nil?
# Modifying the SQL to utilize the skip and limit amounts
sql.gsub!(/SELECT/i,"SELECT SKIP #{offset} LIMIT #{limit}")
else
# Modifying the SQL to retrieve only the first #{limit} rows
sql = sql.gsub!("SELECT","SELECT FIRST #{limit}")
end
else
# Modifying the SQL to ensure that no rows will be returned
sql.gsub!(/SELECT/i,"SELECT * FROM (SELECT")
sql << ") WHERE 0 = 1"
end
end
# Handling offset/limit as per Informix requirements
def query_offset_limit!(sql, offset, limit, options)
if limit != 0
if !offset.nil?
# Modifying the SQL to utilize the skip and limit amounts
sql.gsub!(/SELECT/i,"SELECT SKIP #{offset} LIMIT #{limit}")
else
# Modifying the SQL to retrieve only the first #{limit} rows
sql = sql.gsub!("SELECT","SELECT FIRST #{limit}")
end
else
# Modifying the SQL to ensure that no rows will be returned
sql.gsub!(/SELECT/i,"SELECT * FROM (SELECT")
sql << ") WHERE 0 = 1"
end
end
# Method that returns the last automatically generated ID
# on the given +@connection+. This method is required by the +insert+
# method. IDS returns the last generated serial value in the SQLCA unlike
# DB2 where the generated value has to be retrieved using the
# IDENTITY_VAL_LOCAL function. We used the "stmt" parameter to identify
# the statement resource from which to get the last generated value
def last_generated_id(stmt)
IBM_DB.get_last_serial_value(stmt)
end
# This method throws an error when trying to create a default value on a
# BLOB/CLOB column for IDS. The documentation states: "if the column is a
# BLOB or CLOB datatype, NULL is the only valid default value."
def set_binary_default(value)
unless (value == 'NULL')
raise "Informix Dynamic Server only allows NULL as a valid default value for a BLOB data type"
end
end
# For Informix Dynamic Server, we treat binary value same as we treat a
# text value. We support literals by converting the insert into a dummy
# insert and an update (See handle_lobs method above)
def set_binary_value
"'@@@IBMBINARY@@@'"
end
# This method throws an error when trying to create a default value on a
# BLOB/CLOB column for IDS. The documentation states: "if the column is
# a BLOB or CLOB datatype, NULL is the only valid default value."
def set_text_default(value)
unless (value == 'NULL')
raise "Informix Dynamic Server only allows NULL as a valid default value for a CLOB data type"
end
end
# For Informix Dynamic Server, the arguments to the meta-data functions
# need to be in lower-case
def set_case(value)
value.downcase
end
end # class IBM_IDS
end # module ConnectionAdapters
end # module ActiveRecord
module Arel
#Check Arel version
begin
arelVersion = Arel::VERSION.to_i
rescue
arelVersion = 0
end
if(arelVersion >= 6)
module Collectors
class Bind
def changeFirstSegment(segment)
@parts[0] = segment
end
def changeEndSegment(segment)
len = @parts.length
@parts[len] = segment
end
end
end
end
module Visitors
class Visitor #opening and closing the class to ensure backward compatibility
end
#Check Arel version
begin
arelVersion = Arel::VERSION.to_i
rescue
arelVersion = 0
end
if(arelVersion >= 6 && arelVersion <= 9)
class ToSql < Arel::Visitors::Reduce #opening and closing the class to ensure backward compatibility
# In case when using Rails-2.3.x there is no arel used due to which the constructor has to be defined explicitly
# to ensure the same code works on any version of Rails
#Check Arel version
begin
@arelVersion = Arel::VERSION.to_i
rescue
@arelVersion = 0
end
if(@arelVersion >= 3)
def initialize connection
super()
@connection = connection
@schema_cache = connection.schema_cache if(connection.respond_to?(:schema_cache))
@quoted_tables = {}
@quoted_columns = {}
@last_column = nil
end
end
end
else
class ToSql < Arel::Visitors::Visitor #opening and closing the class to ensure backward compatibility
# In case when using Rails-2.3.x there is no arel used due to which the constructor has to be defined explicitly
# to ensure the same code works on any version of Rails
#Check Arel version
begin
@arelVersion = Arel::VERSION.to_i
rescue
@arelVersion = 0
end
if(@arelVersion >= 3)
def initialize connection
super()
@connection = connection
@schema_cache = connection.schema_cache if(connection.respond_to?(:schema_cache))
@quoted_tables = {}
@quoted_columns = {}
@last_column = nil
end
end
end
end
class IBM_DB < Arel::Visitors::ToSql
private
def visit_Arel_Nodes_Limit o,collector
collector << " FETCH FIRST "
visit o.expr, collector
collector << " ROWS ONLY "
end
def visit_Arel_Nodes_Offset o,collector
visit o.expr,collector
end
def visit_Arel_Nodes_ValuesList(o, collector)
collector << "VALUES "
o.rows.each_with_index do |row, i|
collector << ", " unless i == 0
collector << "("
row.each_with_index do |value, k|
collector << ", " unless k == 0
case value
when Nodes::SqlLiteral, Nodes::BindParam
collector = visit(value, collector)
#collector << quote(value).to_s
else
collector << value.to_s
end
end
collector << ")"
end
collector
end
def visit_Arel_Nodes_SelectStatement o, collector
if o.with
collector = visit o.with, collector
collector << " "
end
collector = o.cores.inject(collector) { |c,x|
visit_Arel_Nodes_SelectCore(x, c)
}
unless o.orders.empty?
collector << " ORDER BY "
len = o.orders.length - 1
o.orders.each_with_index { |x, i|
collector = visit(x, collector)
collector << "," unless len == i
}
end
# if o.limit
# limcoll = Arel::Collectors::SQLString.new
# visit(o.limit,limcoll)
# limit = limcoll.value.to_i
# else
# limit = nil
# end
#
# if o.offset
# offcoll = Arel::Collectors::SQLString.new
# visit(o.offset,offcoll)
# offset = offcoll.value.to_i
# else
# offset = nil
# end
#
# limOffClause = @connection.get_limit_offset_clauses(limit,offset)
#
# if( !limOffClause["startSegment"].empty? )
# #collector.changeFirstSegment(limOffClause["startSegment"])
# collector.value.prepend(limOffClause["startSegment"])
# end
#
# if( !limOffClause["endSegment"].empty? )
# #collector.changeEndSegment(limOffClause["endSegment"])
# collector << " "
# collector << limOffClause["endSegment"]
# end
#Initialize a new Collector and set its value to the sql string built so far with any limit and ofset modifications
#collector.reset(sql)
if (o.limit && o.offset.nil?)
visit(o.limit, collector)
end
if (o.offset && o.limit.nil?)
collector.value.prepend(" SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_row_num FROM ( ")
collector << (" ) AS I) AS O WHERE sys_row_num > ")
visit(o.offset, collector)
end
if (o.offset && o.limit)
collector.value.prepend(" SELECT O.* FROM (SELECT I.*, ROW_NUMBER() OVER () sys_row_num FROM ( ")
collector << (" ) AS I) AS O WHERE sys_row_num > ")
visit(o.offset, collector)
visit(o.limit, collector)
end
collector = maybe_visit o.lock, collector
return collector
end
end
end
end
| 37.050918 | 175 | 0.597915 |
1db572f041c0bb2eecc17f978212f04da08d8a1d | 84 | # typed: true
def dead_code
x = nil
return if !x
x # error: unreachable
end
| 9.333333 | 24 | 0.642857 |
7a065757d079906a6857878b1f11d8d5f9915242 | 3,940 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "helloworld_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.473684 | 102 | 0.758376 |
d555d03ecb9d736db625e1a716a86e6117b68bfb | 2,266 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with this
# work for additional information regarding copyright ownership. The ASF
# licenses this file to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
shared_examples_for 'packaging' do
it 'should create artifact of proper type' do
packaging = @packaging
package_type = @package_type || @packaging
define 'foo', :version=>'1.0' do
package(packaging).type.should eql(package_type) rescue exit!
end
end
it 'should create file with proper extension' do
packaging = @packaging
package_type = @package_type || @packaging
define 'foo', :version=>'1.0' do
package(packaging).to_s.should match(/.#{package_type}$/)
end
end
it 'should always return same task for the same package' do
packaging = @packaging
define 'foo', :version=>'1.0' do
package(packaging)
package(packaging, :id=>'other')
end
project('foo').packages.uniq.size.should eql(2)
end
it 'should complain if option not known' do
packaging = @packaging
define 'foo', :version=>'1.0' do
lambda { package(packaging, :unknown_option=>true) }.should raise_error(ArgumentError, /no such option/)
end
end
it 'should respond to with() and return self' do
packaging = @packaging
define 'foo', :version=>'1.0' do
package(packaging).with({}).should be(package(packaging))
end
end
it 'should respond to with() and complain if unknown option' do
packaging = @packaging
define 'foo', :version=>'1.0' do
lambda { package(packaging).with(:unknown_option=>true) }.should raise_error(ArgumentError, /does not support the option/)
end
end
end
| 35.40625 | 129 | 0.707414 |
334cc23b3fc815e5dc0fcdfada2a3c88a0f656a3 | 2,222 | require './spec/support/sidekiq_middleware'
require './spec/support/helpers/test_env'
class Gitlab::Seeder::Burndown
def initialize(project, perf: false)
@project = project
end
def seed!
Timecop.travel 10.days.ago
Sidekiq::Worker.skipping_transaction_check do
Sidekiq::Testing.inline! do
create_milestone
print '.'
create_issues
print '.'
close_issues
print '.'
reopen_issues
print '.'
end
end
Timecop.return
print '.'
end
private
def create_milestone
milestone_params = {
title: "Sprint - #{FFaker::Lorem.sentence}",
description: FFaker::Lorem.sentence,
state: 'active',
start_date: Date.today,
due_date: rand(5..10).days.from_now
}
@milestone = Milestones::CreateService.new(@project, @project.team.users.sample, milestone_params).execute
end
def create_issues
20.times do
issue_params = {
title: FFaker::Lorem.sentence(6),
description: FFaker::Lorem.sentence,
state: 'opened',
milestone: @milestone,
assignees: [@project.team.users.sample],
weight: rand(1..9)
}
Issues::CreateService.new(@project, @project.team.users.sample, issue_params).execute
end
end
def close_issues
@milestone.start_date.upto(@milestone.due_date) do |date|
Timecop.travel(date)
close_number = rand(1..3)
open_issues = @milestone.issues.opened
open_issues = open_issues.limit(close_number)
open_issues.each do |issue|
Issues::CloseService.new(@project, @project.team.users.sample, {}).execute(issue)
end
end
Timecop.return
end
def reopen_issues
count = @milestone.issues.closed.count / 3
issues = @milestone.issues.closed.limit(rand(count) + 1)
issues.each { |i| i.update(state: 'reopened') }
end
end
Gitlab::Seeder.quiet do
if project_id = ENV['PROJECT_ID']
project = Project.find(project_id)
seeder = Gitlab::Seeder::Burndown.new(project)
seeder.seed!
else
Project.not_mass_generated.each do |project|
seeder = Gitlab::Seeder::Burndown.new(project)
seeder.seed!
end
end
end
| 22.907216 | 110 | 0.646715 |
7a6d93f22f0fd6fca3674af6630a2722c46dc2af | 144 | class AddPersonToBraintreeAccount < ActiveRecord::Migration[5.2]
def change
add_column :braintree_accounts, :person_id, :string
end
end
| 24 | 64 | 0.784722 |
62edd348ad6537ebdba2220a9d250af114f4d9b7 | 671 | # require 'sidekiq'
if Sidekiq::VERSION.to_f >= 3
Sidekiq.configure_server do |config|
handler = ->(ex, content){
BetterExceptionNotifier.notify_exception(ex, data: { sidekiq: context })
}
config.error_handlers << handler
end
else
### Sidekiq < v3
module BetterExceptionNotifier
class Sidekiq
def call(_worker, msg, _queue)
yield
rescue Exception => e
BetterExceptionNotifier.notify_exception(e, data: { sidekiq: msg })
raise e
end
end
end
Sidekiq.configure_server do |config|
config.server_middleware do |chain|
chain.add ::BetterExceptionNotifierSidekiq
end
end
end
| 19.735294 | 78 | 0.66468 |
e8c65b4f4e02bf27f83c958049ba3e4ce63ae563 | 7,713 | =begin
PureCloud Platform API
With the PureCloud Platform API, you can control all aspects of your PureCloud environment. With the APIs you can access the system configuration, manage conversations and more.
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
License: UNLICENSED
https://help.mypurecloud.com/articles/terms-and-conditions/
Terms of Service: https://help.mypurecloud.com/articles/terms-and-conditions/
=end
require 'date'
module PureCloud
class ConversationCallEventTopicCallConversation
attr_accessor :id
attr_accessor :name
attr_accessor :participants
attr_accessor :other_media_uris
attr_accessor :recording_state
attr_accessor :max_participants
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'id' => :'id',
:'name' => :'name',
:'participants' => :'participants',
:'other_media_uris' => :'otherMediaUris',
:'recording_state' => :'recordingState',
:'max_participants' => :'maxParticipants'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'id' => :'String',
:'name' => :'String',
:'participants' => :'Array<ConversationCallEventTopicCallMediaParticipant>',
:'other_media_uris' => :'Array<String>',
:'recording_state' => :'String',
:'max_participants' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'name')
self.name = attributes[:'name']
end
if attributes.has_key?(:'participants')
if (value = attributes[:'participants']).is_a?(Array)
self.participants = value
end
end
if attributes.has_key?(:'otherMediaUris')
if (value = attributes[:'otherMediaUris']).is_a?(Array)
self.other_media_uris = value
end
end
if attributes.has_key?(:'recordingState')
self.recording_state = attributes[:'recordingState']
end
if attributes.has_key?(:'maxParticipants')
self.max_participants = attributes[:'maxParticipants']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
allowed_values = ["none", "active", "paused"]
if @recording_state && !allowed_values.include?(@recording_state)
return false
end
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] recording_state Object to be assigned
def recording_state=(recording_state)
allowed_values = ["none", "active", "paused"]
if recording_state && !allowed_values.include?(recording_state)
fail ArgumentError, "invalid value for 'recording_state', must be one of #{allowed_values}."
end
@recording_state = recording_state
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id &&
name == o.name &&
participants == o.participants &&
other_media_uris == o.other_media_uris &&
recording_state == o.recording_state &&
max_participants == o.max_participants
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id, name, participants, other_media_uris, recording_state, max_participants].hash
end
# build the object from hash
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
else
#TODO show warning in debug mode
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
else
# data not found in attributes(hash), not an issue as the data can be optional
end
end
self
end
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /^(true|t|yes|y|1)$/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
_model = Object.const_get("PureCloud").const_get(type).new
_model.build_from_hash(value)
end
end
def to_s
to_hash.to_s
end
# to_body is an alias to to_body (backward compatibility))
def to_body
to_hash
end
# return the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Method to output non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 22.356522 | 177 | 0.562686 |
6ab7930627300963502d589cb0ec5882ade5431c | 936 | module Centurion
class Pool
include Enumerable
attr_accessor :vms
def initialize
@vms = []
end
def each(&block)
@vms.each { |vm| block.call(vm) }
end
FORWARDED_COLLECTION_METHODS = [:name, :path, :ips, :status, :destroy]
FORWARDED_COLLECTION_METHODS.each do |method|
define_method(method) do
@vms.map(&method)
end
end
FORWARDED_STATUS_CHECKS = [:ready?, :destroyed?]
FORWARDED_STATUS_CHECKS.each do |method|
define_method(method) do
@vms.map(&method).all?{ |status| true & status }
end
end
def run(command)
@vms.map { |vm| vm.run(command) }.map(&:value)
end
def upload(local_path, remote_path)
@vms.map { |vm| vm.upload(local_path, remote_path) }.map(&:value)
end
def download(remote_path, local_path)
@vms.map { |vm| vm.download(remote_path, local_path) }.map(&:value)
end
end
end
| 22.285714 | 74 | 0.621795 |
91f79e31d3496e98341d2e1d27fb4ceb36dd054f | 2,013 | WORKSPACE_DIR = File.expand_path(File.dirname(__FILE__) + '/..')
def in_dir(dir)
current = Dir.pwd
begin
Dir.chdir(dir)
yield
ensure
Dir.chdir(current)
end
end
SITE_DIR = "#{WORKSPACE_DIR}/reports/site"
desc 'Build the part of the website for this branch'
task 'site:build' do
project = Buildr.project('react4j-heart-rate-monitor')
rm_rf SITE_DIR
mkdir_p SITE_DIR
cp_r Dir["#{project._('docs')}/*"], SITE_DIR
%w(react4j.hrm.HeartRateMonitorDev react4j.hrm.HeartRateMonitorProd).each do |dir|
output_dir = project._(:target, :generated, :gwt, dir)
file(output_dir).invoke
cp_r Dir["#{output_dir}/*"], SITE_DIR
rm_f Dir["#{SITE_DIR}/**/*.devmode.js"]
rm_f Dir["#{SITE_DIR}/**/compilation-mappings.txt"]
rm_rf "#{SITE_DIR}/WEB-INF"
end
end
desc 'Build the website'
task 'site:deploy' => ['site:build'] do
origin_url = 'https://github.com/react4j/react4j.github.io.git'
travis_build_number = ENV['TRAVIS_BUILD_NUMBER']
if travis_build_number
origin_url = origin_url.gsub('https://github.com/', '[email protected]:')
end
local_dir = "#{WORKSPACE_DIR}/target/remote_site"
rm_rf local_dir
sh "git clone -b master --depth 1 #{origin_url} #{local_dir}"
in_dir(local_dir) do
message ="Update Heart Rate Monitor website#{travis_build_number.nil? ? '' : " - Travis build: #{travis_build_number}"}"
rm_rf "#{local_dir}/heart-rate-monitor"
cp_r "#{SITE_DIR}/hrm", "#{local_dir}/heart-rate-monitor"
sh 'git add . -f'
puts `git commit -m "#{message}"`
if 0 == $?.exitstatus
sh 'git push -f origin master'
end
end
end
desc 'Publish the website if build is on candidate branch'
task 'site:deploy_if_candidate_branch' do
branch = ENV['TRAVIS_BRANCH']
if branch.nil? || %w(master).include?(branch)
ENV['SITE_BRANCH'] = branch
puts "Deploying site for branch '#{branch}'"
task('site:deploy').invoke
else
puts "Site deploy skipped as branch '#{branch}' is not in the candidate set"
end
end
| 28.352113 | 124 | 0.68157 |
08c690fb84b37265bef680414e6999bb630f3f59 | 2,815 | # frozen_string_literal: true
require 'rails_helper'
RSpec.feature 'Course: Experience Points: Disbursement' do
let(:instance) { Instance.default }
with_tenant(:instance) do
let(:course) { create(:course) }
let(:course_students) { create_list(:course_student, 4, course: course) }
let(:course_teaching_assistant) { create(:course_teaching_assistant, course: course) }
before { login_as(user, scope: :user) }
context 'As a Course Teaching Assistant' do
let(:user) { course_teaching_assistant.user }
scenario 'I can filter students by group' do
group1, group2 = create_list(:course_group, 2, course: course)
group1_student, group2_student, ungrouped_student = course_students
create(:course_group_user, group: group1, course_user: group1_student)
create(:course_group_user, group: group2, course_user: group2_student)
visit disburse_experience_points_course_users_path(course)
course_students.each do |student|
expect(page).to have_content_tag_for(student)
end
click_link group1.name
expect(page).to have_content_tag_for(group1_student)
expect(page).to have_no_content_tag_for(group2_student)
expect(page).to have_no_content_tag_for(ungrouped_student)
end
scenario 'I can copy points awarded for first student to all students', js: true do
course_students
visit disburse_experience_points_course_users_path(course)
first('.course_user').find('input.points_awarded').set '100'
click_button 'experience-points-disbursement-copy-button'
course_students.each do |student|
points_awarded = find(content_tag_selector(student)).find('input.points_awarded').value
expect(points_awarded).to eq('100')
end
end
scenario 'I can disburse experience points' do
course_students
visit disburse_experience_points_course_users_path(course)
fill_in 'experience_points_disbursement_reason', with: 'a reason'
student_to_award_points, student_to_set_zero, student_to_set_one,
student_to_leave_blank = course_students
expect(page).to have_content_tag_for(student_to_leave_blank)
find(content_tag_selector(student_to_award_points)).find('input.points_awarded').set '100'
find(content_tag_selector(student_to_set_one)).find('input.points_awarded').set '1'
find(content_tag_selector(student_to_set_zero)).find('input.points_awarded').set '0'
expect do
click_button I18n.t('course.experience_points.disbursement.new.submit')
end.to change(Course::ExperiencePointsRecord, :count).by(2)
expect(current_path).to eq(disburse_experience_points_course_users_path(course))
end
end
end
end
| 39.097222 | 98 | 0.722558 |
6137f043b3c312f99d2e62e48ff33928e4190c18 | 13,471 | # frozen_string_literal: true
require 'optparse'
require 'fileutils'
require_relative 'core_ext/regexp'
module Rack
class Server
using ::Rack::RegexpExtensions
class Options
def parse!(args)
options = {}
opt_parser = OptionParser.new("", 24, ' ') do |opts|
opts.banner = "Usage: rackup [ruby options] [rack options] [rackup config]"
opts.separator ""
opts.separator "Ruby options:"
lineno = 1
opts.on("-e", "--eval LINE", "evaluate a LINE of code") { |line|
eval line, TOPLEVEL_BINDING, "-e", lineno
lineno += 1
}
opts.on("-d", "--debug", "set debugging flags (set $DEBUG to true)") {
options[:debug] = true
}
opts.on("-w", "--warn", "turn warnings on for your script") {
options[:warn] = true
}
opts.on("-q", "--quiet", "turn off logging") {
options[:quiet] = true
}
opts.on("-I", "--include PATH",
"specify $LOAD_PATH (may be used more than once)") { |path|
(options[:include] ||= []).concat(path.split(":"))
}
opts.on("-r", "--require LIBRARY",
"require the library, before executing your script") { |library|
options[:require] = library
}
opts.separator ""
opts.separator "Rack options:"
opts.on("-b", "--builder BUILDER_LINE", "evaluate a BUILDER_LINE of code as a builder script") { |line|
options[:builder] = line
}
opts.on("-s", "--server SERVER", "serve using SERVER (thin/puma/webrick)") { |s|
options[:server] = s
}
opts.on("-o", "--host HOST", "listen on HOST (default: localhost)") { |host|
options[:Host] = host
}
opts.on("-p", "--port PORT", "use PORT (default: 9292)") { |port|
options[:Port] = port
}
opts.on("-O", "--option NAME[=VALUE]", "pass VALUE to the server as option NAME. If no VALUE, sets it to true. Run '#{$0} -s SERVER -h' to get a list of options for SERVER") { |name|
name, value = name.split('=', 2)
value = true if value.nil?
options[name.to_sym] = value
}
opts.on("-E", "--env ENVIRONMENT", "use ENVIRONMENT for defaults (default: development)") { |e|
options[:environment] = e
}
opts.on("-D", "--daemonize", "run daemonized in the background") { |d|
options[:daemonize] = d ? true : false
}
opts.on("-P", "--pid FILE", "file to store PID") { |f|
options[:pid] = ::File.expand_path(f)
}
opts.separator ""
opts.separator "Profiling options:"
opts.on("--heap HEAPFILE", "Build the application, then dump the heap to HEAPFILE") do |e|
options[:heapfile] = e
end
opts.on("--profile PROFILE", "Dump CPU or Memory profile to PROFILE (defaults to a tempfile)") do |e|
options[:profile_file] = e
end
opts.on("--profile-mode MODE", "Profile mode (cpu|wall|object)") do |e|
{ cpu: true, wall: true, object: true }.fetch(e.to_sym) do
raise OptionParser::InvalidOption, "unknown profile mode: #{e}"
end
options[:profile_mode] = e.to_sym
end
opts.separator ""
opts.separator "Common options:"
opts.on_tail("-h", "-?", "--help", "Show this message") do
puts opts
puts handler_opts(options)
exit
end
opts.on_tail("--version", "Show version") do
puts "Rack #{Rack.version} (Release: #{Rack.release})"
exit
end
end
begin
opt_parser.parse! args
rescue OptionParser::InvalidOption => e
warn e.message
abort opt_parser.to_s
end
options[:config] = args.last if args.last && !args.last.empty?
options
end
def handler_opts(options)
begin
info = []
server = Rack::Handler.get(options[:server]) || Rack::Handler.default
if server && server.respond_to?(:valid_options)
info << ""
info << "Server-specific options for #{server.name}:"
has_options = false
server.valid_options.each do |name, description|
next if /^(Host|Port)[^a-zA-Z]/.match?(name.to_s) # ignore handler's host and port options, we do our own.
info << " -O %-21s %s" % [name, description]
has_options = true
end
return "" if !has_options
end
info.join("\n")
rescue NameError
return "Warning: Could not find handler specified (#{options[:server] || 'default'}) to determine handler-specific options"
end
end
end
# Start a new rack server (like running rackup). This will parse ARGV and
# provide standard ARGV rackup options, defaulting to load 'config.ru'.
#
# Providing an options hash will prevent ARGV parsing and will not include
# any default options.
#
# This method can be used to very easily launch a CGI application, for
# example:
#
# Rack::Server.start(
# :app => lambda do |e|
# [200, {'Content-Type' => 'text/html'}, ['hello world']]
# end,
# :server => 'cgi'
# )
#
# Further options available here are documented on Rack::Server#initialize
def self.start(options = nil)
new(options).start
end
attr_writer :options
# Options may include:
# * :app
# a rack application to run (overrides :config and :builder)
# * :builder
# a string to evaluate a Rack::Builder from
# * :config
# a rackup configuration file path to load (.ru)
# * :environment
# this selects the middleware that will be wrapped around
# your application. Default options available are:
# - development: CommonLogger, ShowExceptions, and Lint
# - deployment: CommonLogger
# - none: no extra middleware
# note: when the server is a cgi server, CommonLogger is not included.
# * :server
# choose a specific Rack::Handler, e.g. cgi, fcgi, webrick
# * :daemonize
# if true, the server will daemonize itself (fork, detach, etc)
# * :pid
# path to write a pid file after daemonize
# * :Host
# the host address to bind to (used by supporting Rack::Handler)
# * :Port
# the port to bind to (used by supporting Rack::Handler)
# * :AccessLog
# webrick access log options (or supporting Rack::Handler)
# * :debug
# turn on debug output ($DEBUG = true)
# * :warn
# turn on warnings ($-w = true)
# * :include
# add given paths to $LOAD_PATH
# * :require
# require the given libraries
#
# Additional options for profiling app initialization include:
# * :heapfile
# location for ObjectSpace.dump_all to write the output to
# * :profile_file
# location for CPU/Memory (StackProf) profile output (defaults to a tempfile)
# * :profile_mode
# StackProf profile mode (cpu|wall|object)
def initialize(options = nil)
@ignore_options = []
if options
@use_default_options = false
@options = options
@app = options[:app] if options[:app]
else
argv = defined?(SPEC_ARGV) ? SPEC_ARGV : ARGV
@use_default_options = true
@options = parse_options(argv)
end
end
def options
merged_options = @use_default_options ? default_options.merge(@options) : @options
merged_options.reject { |k, v| @ignore_options.include?(k) }
end
def default_options
environment = ENV['RACK_ENV'] || 'development'
default_host = environment == 'development' ? 'localhost' : '0.0.0.0'
{
environment: environment,
pid: nil,
Port: 9292,
Host: default_host,
AccessLog: [],
config: "config.ru"
}
end
def app
@app ||= options[:builder] ? build_app_from_string : build_app_and_options_from_config
end
class << self
def logging_middleware
lambda { |server|
/CGI/.match?(server.server.name) || server.options[:quiet] ? nil : [Rack::CommonLogger, $stderr]
}
end
def default_middleware_by_environment
m = Hash.new {|h, k| h[k] = []}
m["deployment"] = [
[Rack::ContentLength],
logging_middleware,
[Rack::TempfileReaper]
]
m["development"] = [
[Rack::ContentLength],
logging_middleware,
[Rack::ShowExceptions],
[Rack::Lint],
[Rack::TempfileReaper]
]
m
end
def middleware
default_middleware_by_environment
end
end
def middleware
self.class.middleware
end
def start &blk
if options[:warn]
$-w = true
end
if includes = options[:include]
$LOAD_PATH.unshift(*includes)
end
if library = options[:require]
require library
end
if options[:debug]
$DEBUG = true
require 'pp'
p options[:server]
pp wrapped_app
pp app
end
check_pid! if options[:pid]
# Touch the wrapped app, so that the config.ru is loaded before
# daemonization (i.e. before chdir, etc).
handle_profiling(options[:heapfile], options[:profile_mode], options[:profile_file]) do
wrapped_app
end
daemonize_app if options[:daemonize]
write_pid if options[:pid]
trap(:INT) do
if server.respond_to?(:shutdown)
server.shutdown
else
exit
end
end
server.run wrapped_app, options, &blk
end
def server
@_server ||= Rack::Handler.get(options[:server])
unless @_server
@_server = Rack::Handler.default
# We already speak FastCGI
@ignore_options = [:File, :Port] if @_server.to_s == 'Rack::Handler::FastCGI'
end
@_server
end
private
def build_app_and_options_from_config
if !::File.exist? options[:config]
abort "configuration #{options[:config]} not found"
end
app, options = Rack::Builder.parse_file(self.options[:config], opt_parser)
@options.merge!(options) { |key, old, new| old }
app
end
def handle_profiling(heapfile, profile_mode, filename)
if heapfile
require "objspace"
ObjectSpace.trace_object_allocations_start
yield
GC.start
::File.open(heapfile, "w") { |f| ObjectSpace.dump_all(output: f) }
exit
end
if profile_mode
require "stackprof"
require "tempfile"
make_profile_name(filename) do |filename|
::File.open(filename, "w") do |f|
StackProf.run(mode: profile_mode, out: f) do
yield
end
puts "Profile written to: #{filename}"
end
end
exit
end
yield
end
def make_profile_name(filename)
if filename
yield filename
else
::Dir::Tmpname.create("profile.dump") do |tmpname, _, _|
yield tmpname
end
end
end
def build_app_from_string
Rack::Builder.new_from_string(self.options[:builder])
end
def parse_options(args)
# Don't evaluate CGI ISINDEX parameters.
# http://www.meb.uni-bonn.de/docs/cgi/cl.html
args.clear if ENV.include?(REQUEST_METHOD)
@options = opt_parser.parse!(args)
@options[:config] = ::File.expand_path(options[:config])
ENV["RACK_ENV"] = options[:environment]
@options
end
def opt_parser
Options.new
end
def build_app(app)
middleware[options[:environment]].reverse_each do |middleware|
middleware = middleware.call(self) if middleware.respond_to?(:call)
next unless middleware
klass, *args = middleware
app = klass.new(app, *args)
end
app
end
def wrapped_app
@wrapped_app ||= build_app app
end
def daemonize_app
Process.daemon
end
def write_pid
::File.open(options[:pid], ::File::CREAT | ::File::EXCL | ::File::WRONLY ){ |f| f.write("#{Process.pid}") }
at_exit { ::FileUtils.rm_f(options[:pid]) }
rescue Errno::EEXIST
check_pid!
retry
end
def check_pid!
case pidfile_process_status
when :running, :not_owned
$stderr.puts "A server is already running. Check #{options[:pid]}."
exit(1)
when :dead
::File.delete(options[:pid])
end
end
def pidfile_process_status
return :exited unless ::File.exist?(options[:pid])
pid = ::File.read(options[:pid]).to_i
return :dead if pid == 0
Process.kill(0, pid)
:running
rescue Errno::ESRCH
:dead
rescue Errno::EPERM
:not_owned
end
end
end
| 28.907725 | 192 | 0.554821 |
abd1644141f1e3c75e64cc28a02c6ed5c34eab15 | 287 | FactoryGirl.define do
# Define your Spree extensions Factories within this file to enable applications, and other extensions to use and override them.
#
# Example adding this to your spec_helper will load these Factories for use:
# require 'spree_travel_adventure/factories'
end
| 41 | 130 | 0.794425 |
182ef224113dd993a07c8b77ea9c46a740a6a302 | 2,736 | require "rails_helper"
require "spec_helper"
require "test_data_helper"
describe Player do
let(:train_car_type) { TrainCarType.find_by(name: "Locomotive") }
let(:name) { "Player 1" }
let(:name_alt) { "Player 2" }
let(:colour) { "Green" }
let(:players) { test_players }
let(:game) { Game.new(current_player: players.first) }
let(:game_alt) { Game.new(current_player: players.second) }
let(:player) { Player.new(parameters) }
let(:train_pieces) { 45 }
let(:score) { 0 }
let(:parameters) { {name: name, colour: colour, game: game, train_pieces: train_pieces, score: score} }
context "provided a valid name, colour and game" do
it "is valid" do
expect(player).to be_valid
end
end
shared_examples "player is invalid" do
it "is invalid" do
expect(player).not_to be_valid
end
end
describe "on initialize" do
context "if the name is nil" do
let(:name) { nil }
include_examples "player is invalid"
end
context "when the score is nil" do
let(:score) { nil }
include_examples "player is invalid"
end
context "when the colour is nil" do
let(:colour) { nil }
include_examples "player is invalid"
end
context "when the game is nil" do
let(:game) { nil }
include_examples "player is invalid"
end
context "when the train_pieces are nil" do
let(:train_pieces) { nil }
include_examples "player is invalid"
end
context "when the train_pieces are negative" do
let(:train_pieces) { -40 }
include_examples "player is invalid"
end
context "when the score is negative" do
let(:score) { -40 }
include_examples "player is invalid"
end
context "when there is an existing Green player" do
before do
Player.create!(parameters)
end
it "creating another Green player for the same game is invalid" do
expect(Player.new({name: name_alt, colour: colour, game: game, train_pieces: 20, score: 0})).not_to be_valid
end
it "creating another Green player for a different game is valid" do
expect(Player.new({name: name_alt, colour: colour, game: game_alt, train_pieces: 20, score: 0})).to be_valid
end
end
end
describe "#dealt_train_cars" do
context "when no dealt train cars" do
it "should be empty" do
expect(player.dealt_train_cars).to eq []
end
end
context "when a player is dealt a train car" do
before do
@dealt_car = DealtTrainCar.create!(player: player, train_car_type: train_car_type)
end
it "should contain the dealt car" do
expect(player.dealt_train_cars).to eq [@dealt_car]
end
end
end
end
| 25.811321 | 116 | 0.649854 |
1a747a16cceb1e6f692d3cb1e2777cbc6ff3b5a3 | 5,282 | module CTA
class CustomerAlerts
@cache_responses = true
# Returns the connection object we use to talk to the CustomerAlerts API
def self.connection
@connection ||= Faraday.new do |faraday|
faraday.url_prefix = 'http://www.transitchicago.com/api/1.0/'
faraday.use CTA::CustomerAlerts::Parser, !!@debug
if @cache_responses
faraday.response :caching, (@cache || SimpleCache.new(Hash.new))
end
faraday.adapter Faraday.default_adapter
end
end
# Returns an overview of system status.
# @param [Hash] options
# @option options [Array<Integer> Array<String>, Integer, String] :routes Routes to query for status
# @option options [String, Integer] :stations Station to query for status
# @return [CTA::CustomerAlerts::RouteStatusResponse]
# @example
# CTA::CustomerAlerts.status!(:routes => [8,22])
def self.status!(options = {})
allowed_keys = [:routes, :station]
if options.keys.any? { |k| !allowed_keys.include?(k) }
raise "Illegal argument!"
end
routes = Array.wrap(options[:routes]).flatten.compact.uniq.join(',')
stations = Array.wrap(options[:station]).flatten.compact.uniq
if stations.size > 1
raise "Can only specify one station!"
end
connection.get('routes.aspx', { :type => options[:type], :routeid => routes, :stationid => stations.first })
end
# Returns alerts for given routes or stations
# @param [Hash] options
# @option options [Array<Integer> Array<String>, Integer, String] :routes Routes to query for alerts. Not available with :station
# @option options [Integer, String] :station Station to query for alerts. Not available with :route
# @option options [true, false] :active Only return active alerts
# @option options [true, false] :accessibility Include alerts related to accessibility (elevators, etc)
# @option options [true, false] :planned Only return planned alerts
# @option options [Integer] :recent_days Only return alerts within the specified number of days
# @option options [Integer] :before Only return alerts starting prior to the specified number of days
# @return [CTA::CustomerAlerts::AlertsResponse]
# @example
# CTA::CustomerAlerts.alerts!(:route => 8)
def self.alerts!(options = {})
allowed_keys = [:active, :accessibility, :planned, :routes, :station, :recent_days, :before]
if options.keys.any? { |k| !allowed_keys.include?(k) }
raise "Illegal argument!"
end
params = {}
params.merge!({ :activeonly => options[:active] }) if options[:active]
params.merge!({ :accessibility => options[:accessiblity] }) if options[:accessibility]
params.merge!({ :planned => options[:planned] }) if options[:planned]
routes = Array.wrap(options[:routes]).flatten.compact.uniq
stations = Array.wrap(options[:station]).flatten.compact.uniq
if stations.size > 1
raise "Can only specify one station!"
end
if routes.any? && stations.any?
raise "Cannot use route and station together!"
end
if options[:recent_days] && options[:before]
raise "Cannot use recent_days and before together!"
end
params.merge!({ :stationid => stations.first }) if stations.any?
params.merge!({ :routeid => routes.join(',') }) if routes.any?
params.merge!({ :recentdays => options[:recent_days] }) if options[:recent_days]
params.merge!({ :bystartdate => options[:before] }) if options[:before]
connection.get('alerts.aspx', params)
end
# Returns the debug status of the API. When in debug mode, all API responses will additionally return
# the parsed XML tree, and the original XML for inspection
def self.debug
!!@debug
end
# Sets the debug status of the API. When in debug mode, all API responses will additionally return
# the parsed XML tree, and the original XML for inspection
# @param debug [true, false]
def self.debug=(debug)
@debug = debug
@connection = nil
end
# Returns whether or not cta_redux is caching responses
# @return [true, false]
def self.cache_responses
@cache_responses
end
# Sets whether or not cta_redux is caching responses
# @param [true, false] should_cache
def self.cache_responses=(should_cache)
@cache_responses = should_cache
@connection = nil
end
# Returns the underlying cache object caching responses (if we're actually caching responses)
# @return [Object]
def self.cache
if self.cache_responses
# This is ugly
@cache || self.connection.builder.handlers.find { |x| x == FaradayMiddleware::Caching }.instance_variable_get(:@args).first
else
nil
end
end
# Sets the underlying cache object caching responses. Any object can be used that responds to #read, #write, and #fetch
# @note Setting the cache object resets the connection. If you're using the default SimpleCache strategy (built-in 60
# second caching), then it will also *clear* the cache.
# @param [Object] cache
def self.cache=(cache)
@cache = cache
@connection = nil
end
end
end
| 39.41791 | 133 | 0.665468 |
6114f42a4ff039cdd3680b779052b09e252d4947 | 2,755 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Security::Mgmt::V2018_06_01
module Models
#
# Pricing tier will be applied for the scope based on the resource ID
#
class Pricing < Resource
include MsRestAzure
# @return [PricingTier] The pricing tier value. Azure Security Center is
# provided in two pricing tiers: free and standard, with the standard
# tier available with a trial period. The standard tier offers advanced
# security capabilities, while the free tier offers basic security
# features. Possible values include: 'Free', 'Standard'
attr_accessor :pricing_tier
# @return [Duration] The duration left for the subscriptions free trial
# period - in ISO 8601 format (e.g. P3Y6M4DT12H30M5S).
attr_accessor :free_trial_remaining_time
#
# Mapper for Pricing class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Pricing',
type: {
name: 'Composite',
class_name: 'Pricing',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
pricing_tier: {
client_side_validation: true,
required: true,
serialized_name: 'properties.pricingTier',
type: {
name: 'String'
}
},
free_trial_remaining_time: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.freeTrialRemainingTime',
type: {
name: 'TimeSpan'
}
}
}
}
}
end
end
end
end
| 30.611111 | 78 | 0.505263 |
18154a27e9d0b8f298e2d64086b5762bc54658df | 674 | describe "Convert Background Color" do
before do
@object = Object.new
@object.extend Stalactoast::Conversions
end
it "sets the appropriate CRToast key" do
d = {}
@object.convert_background_color({}, d)
d[KCRToastBackgroundColorKey].should != nil
end
it "has red as default" do
@object.convert_background_color({}, {}).should == UIColor.redColor
end
it "sets the value correctly" do
c = UIColor.blueColor
@object.convert_background_color({background_color: c}, {}).should == c
end
it "supports bg_color as well" do
c = UIColor.blueColor
@object.convert_background_color({bg_color: c}, {}).should == c
end
end
| 23.241379 | 75 | 0.68546 |
ab2190a6f78d71ea76a7265566a59567ba6e51e5 | 6,110 | require "autoscaling/models/AlarmDiff"
module Cumulus
module AutoScaling
# Public: A class that encapsulates data about configuration for an autoscaling
# Cloudwatch alarm.
#
# The action to be taken is inferred to be activating the policy that contains
# the alarm. As such, we don't keep arrays of actions to take when various alarm
# states are triggered. We just apply the action (activating the policy) to the
# states contained in the `action_states` array. Valid values are "alarm", "ok",
# and "insufficient-data".
class AlarmConfig
attr_reader :action_states
attr_reader :actions_enabled
attr_reader :comparison
attr_reader :description
attr_reader :dimensions
attr_reader :evaluation_periods
attr_reader :metric
attr_reader :name
attr_reader :namespace
attr_reader :period
attr_reader :statistic
attr_reader :threshold
attr_reader :unit
# Public: Constructor
#
# json - a hash containing the JSON configuration for the alarm
def initialize(json = nil)
if !json.nil?
@action_states = json["action-states"]
@actions_enabled = json["actions-enabled"]
@comparison = json["comparison"]
@description = json["description"]
@dimensions = json["dimensions"]
@evaluation_periods = json["evaluation-periods"]
@metric = json["metric"]
@name = json["name"]
@namespace = json["namespace"]
@period = json["period-seconds"]
@statistic = json["statistic"]
@threshold = json["threshold"]
@unit = json["unit"]
end
end
# Public: Get the configuration as a hash
#
# Returns the hash
def hash
{
"name" => @name,
"action-states" => @action_states,
"actions-enabled" => @actions_enabled,
"comparison" => @comparison,
"description" => @description,
"dimensions" => @dimensions,
"evaluation-periods" => @evaluation_periods,
"metric" => @metric,
"namespace" => @namespace,
"period-seconds" => @period,
"statistic" => @statistic,
"threshold" => @threshold,
"unit" => @unit
}.reject { |k, v| v.nil? }
end
# Public: Populate the AlarmConfig from an existing AWS cloudwatch alarm
#
# policy_arn - the arn of the policy the alarm should be attached to
# resource - the aws resource to populate from
def populate(policy_arn, resource)
action_states = []
if resource.ok_actions.include?(policy_arn) then action_states << "ok" end
if resource.alarm_actions.include?(policy_arn) then action_states << "alarm" end
if resource.insufficient_data_actions.include?(policy_arn) then action_states << "insufficient-data" end
@action_states = action_states
@actions_enabled = resource.actions_enabled
@comparison = resource.comparison_operator
@description = resource.alarm_description
@dimensions = Hash[resource.dimensions.map { |d| [d.name, d.value] }]
@evaluation_periods = resource.evaluation_periods
@metric = resource.metric_name
@name = resource.alarm_name
@namespace = resource.namespace
@period = resource.period
@statistic = resource.statistic
@threshold = resource.threshold
@unit = resource.unit
end
# Public: Produce the differences between this local configuration and the
# configuration in AWS
#
# aws - the alarm in AWS
# policy_arn - the policy arn is the action this alarm should take
#
# Returns an array of AlarmDiff objects representing the differences
def diff(aws, policy_arn)
diffs = []
if @description != aws.alarm_description
diffs << AlarmDiff.new(AlarmChange::DESCRIPTION, aws, self)
end
if @actions_enabled != aws.actions_enabled
diffs << AlarmDiff.new(AlarmChange::ENABLED, aws, self)
end
if @comparison != aws.comparison_operator
diffs << AlarmDiff.new(AlarmChange::COMPARISON, aws, self)
end
if @evaluation_periods != aws.evaluation_periods
diffs << AlarmDiff.new(AlarmChange::EVALUATION, aws, self)
end
if @metric != aws.metric_name
diffs << AlarmDiff.new(AlarmChange::METRIC, aws, self)
end
if @namespace != aws.namespace
diffs << AlarmDiff.new(AlarmChange::NAMESPACE, aws, self)
end
if @period != aws.period
diffs << AlarmDiff.new(AlarmChange::PERIOD, aws, self)
end
if @statistic != aws.statistic
diffs << AlarmDiff.new(AlarmChange::STATISTIC, aws, self)
end
if @threshold != aws.threshold
diffs << AlarmDiff.new(AlarmChange::THRESHOLD, aws, self)
end
if @unit != aws.unit
diffs << AlarmDiff.new(AlarmChange::UNIT, aws, self)
end
aws_dimensions = Hash[aws.dimensions.map { |d| [d.name, d.value] }]
if @dimensions != aws_dimensions
diffs << AlarmDiff.new(AlarmChange::DIMENSIONS, aws, self)
end
["ok", "alarm", "insufficient-data"].each do |state|
case state
when "ok"
actions = aws.ok_actions
change_type = AlarmChange::OK
when "alarm"
actions = aws.alarm_actions
change_type = AlarmChange::ALARM
when "insufficient-data"
actions = aws.insufficient_data_actions
change_type = AlarmChange::INSUFFICIENT
end
if (!@action_states.include?(state) and actions.size != 0) or
(@action_states.include?(state) and (actions.size != 1 or actions[0] != policy_arn))
diff = AlarmDiff.new(change_type, aws, self)
diff.policy_arn = policy_arn
diffs << diff
end
end
diffs
end
end
end
end
| 36.807229 | 112 | 0.60982 |
e887b9d2ed096aafb100d66651d208ab4212fafc | 181 | listen '/home/docd/docd/shared/tmp/sockets/unicorn.sock'
pid '/home/docd/docd/shared/tmp/pids/unicorn.pid'
timeout 60
worker_processes 4
working_directory '/home/docd/docd/current'
| 30.166667 | 56 | 0.801105 |
d5021a1fd10c5222cd352daf6d1f64cdd89abc5a | 7,834 | # Copyright (c) 2012-2019 Snowplow Analytics Ltd. All rights reserved.
#
# This program is licensed to you under the Apache License Version 2.0,
# and you may not use this file except in compliance with the Apache License Version 2.0.
# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the Apache License Version 2.0 is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
# Author:: Ben Fradet (mailto:[email protected])
# Copyright:: Copyright (c) 2012-2019 Snowplow Analytics Ltd
# License:: Apache License Version 2.0
require 'contracts'
require 'iglu-client'
# Implementation of Generator for emr clusters
module Snowplow
module EmrEtlRunner
class EmrConfigGenerator
include Snowplow::EmrEtlRunner::Generator
include Snowplow::EmrEtlRunner::Utils
include Contracts
STANDARD_HOSTED_ASSETS = "s3://snowplow-hosted-assets"
AMI_4 = Gem::Version.new("4.0.0")
AMI_5 = Gem::Version.new("5.0.0")
Contract String => Iglu::SchemaKey
def get_schema_key(version)
Iglu::SchemaKey.parse_key("iglu:com.snowplowanalytics.dataflowrunner/ClusterConfig/avro/#{version}")
end
Contract ConfigHash, Bool, Maybe[String], String, ArrayOf[String] => Hash
def create_datum(config, debug=false, resume_from=nil, resolver='', enrichments=[])
ami_version = Gem::Version.new(config[:aws][:emr][:ami_version])
region = config[:aws][:emr][:region]
{
"name" => config[:aws][:emr][:jobflow][:job_name],
"logUri" => config[:aws][:s3][:buckets][:log],
"region" => region,
"credentials" => {
"accessKeyId" => config[:aws][:access_key_id],
"secretAccessKey" => config[:aws][:secret_access_key]
},
"roles" => {
"jobflow" => config[:aws][:emr][:jobflow_role],
"service" => config[:aws][:emr][:service_role]
},
"ec2" => {
"amiVersion" => config[:aws][:emr][:ami_version],
"keyName" => config[:aws][:emr][:ec2_key_name],
"location" => get_location_hash(
config[:aws][:emr][:ec2_subnet_id], config[:aws][:emr][:placement]),
"instances" => {
"master" => { "type" => config[:aws][:emr][:jobflow][:master_instance_type] },
"core" => {
"type" => config[:aws][:emr][:jobflow][:core_instance_type],
"count" => config[:aws][:emr][:jobflow][:core_instance_count],
"ebsConfiguration" =>
get_ebs_configuration(config[:aws][:emr][:jobflow][:core_instance_ebs])
},
"task" => {
"type" => config[:aws][:emr][:jobflow][:task_instance_type],
"count" => config[:aws][:emr][:jobflow][:task_instance_count],
"bid" => config[:aws][:emr][:jobflow][:task_instance_bid].to_s
}
}
},
"tags" => get_tags(config[:monitoring][:tags]),
"bootstrapActionConfigs" => get_bootstrap_actions(
config[:aws][:emr][:bootstrap],
config.dig(:collectors, :format),
ami_version,
region,
config[:enrich][:versions][:spark_enrich]
),
"configurations" => get_configurations(ami_version),
"applications" => ["Hadoop", "Spark"]
}
end
private
Contract Hash => ArrayOf[Hash]
def get_tags(tags)
ts = tags.map do |k, v|
{ "key" => k.to_s, "value" => v.to_s }
end
ts.to_a
end
Contract Maybe[String], Maybe[String] => Hash
def get_location_hash(subnet, placement)
if subnet.nil?
{ "classic" => { "availabilityZone" => placement } }
else
{ "vpc" => { "subnetId" => subnet } }
end
end
Contract Gem::Version => ArrayOf[Hash]
def get_configurations(ami_version)
if ami_version < AMI_4
[]
else
[
{
"classification" => "core-site",
"properties" => { "io.file.buffer.size" => "65536" }
},
{
"classification" => "mapred-site",
"properties" => { "mapreduce.user.classpath.first" => "true" }
}
]
end
end
Contract Maybe[Hash] => Hash
def get_ebs_configuration(ebs_config)
if ebs_config.nil?
{}
else
{
"ebsOptimized" => ebs_config[:ebs_optimized].nil? ? true : ebs_config[:ebs_optimized],
"ebsBlockDeviceConfigs" => [
{
"volumesPerInstance" => 1,
"volumeSpecification" => {
"iops" => ebs_config[:volume_type] == "io1" ? ebs_config[:volume_iops] : 1,
"sizeInGB" => ebs_config[:volume_size],
"volumeType" => ebs_config[:volume_type]
}
}
]
}
end
end
Contract ArrayOf[Hash], Maybe[String], Gem::Version, String, String => ArrayOf[Hash]
def get_bootstrap_actions(actions, collector_format, ami_version, region, enrich_version)
bs_actions = []
bs_actions += actions
if collector_format == 'thrift' && ami_version < AMI_4
bs_actions += [
get_action("Hadoop bootstrap action (buffer size)",
"s3n://elasticmapreduce/bootstrap-actions/configure-hadoop",
[ "-c", "io.file.buffer.size=65536" ]
),
get_action("Hadoop bootstrap action (user cp first)",
"s3n://elasticmapreduce/bootstrap-actions/configure-hadoop",
[ "-m", "mapreduce.user.classpath.first=true" ]
)
]
else
end
bs_actions << get_ami_action(ami_version, region, enrich_version)
bs_actions
end
Contract String => Hash
def get_lingual_action(lingual_version)
get_action("Bootstrap action (installing Lingual)",
"s3://files.concurrentinc.com/lingual/#{lingual_version}/lingual-client/install-lingual-client.sh")
end
Contract String => Hash
def get_hbase_action(region)
get_action("Bootstrap action (installing HBase)",
"s3://#{region}.elasticmapreduce/bootstrap-actions/setup-hbase")
end
Contract Gem::Version, String, String => Hash
def get_ami_action(ami_version, region, enrich_version)
standard_assets_bucket =
get_hosted_assets_bucket(STANDARD_HOSTED_ASSETS, STANDARD_HOSTED_ASSETS, region)
bootstrap_script_location = if ami_version < AMI_4
"#{standard_assets_bucket}common/emr/snowplow-ami3-bootstrap-0.1.0.sh"
elsif ami_version >= AMI_4 && ami_version < AMI_5
"#{standard_assets_bucket}common/emr/snowplow-ami4-bootstrap-0.2.0.sh"
else
"#{standard_assets_bucket}common/emr/snowplow-ami5-bootstrap-0.1.0.sh"
end
cc_version = get_cc_version(enrich_version)
unless cc_version.nil?
get_action("Bootstrap action (ami bootstrap script)",
bootstrap_script_location, [ cc_version ])
end
end
Contract String, String, ArrayOf[String] => Hash
def get_action(name, path, args=[])
{
"name" => name,
"scriptBootstrapAction" => {
"path" => path,
"args" => args
}
}
end
end
end
end
| 37.304762 | 113 | 0.57761 |
ff8f07b16ae5a55c94bf4dbf11d958eb3bd1a4cd | 157 | # Load the rails application
require File.expand_path('../application', __FILE__)
# Initialize the rails application
ProviderStore::Application.initialize!
| 26.166667 | 52 | 0.808917 |
ff7a871c0eb94bab7ae2769f13ccd34f55179083 | 1,716 | require 'pry'
class BookBytes::Scraper
attr_accessor :genre
def self.genre
@genre
end
def self.get_genre_page(genre_name)
@genre = BookBytes::Genre.find_genre(genre_name)
genre_html = Nokogiri::HTML(open("http://www.bookdaily.com/browse")).css("section.genre-categories li > h3").detect do |genre|
genre.css("a").attribute("title").value == genre_name
end
extension = genre_html.css("a").attribute("href").value
"http://www.bookdaily.com#{extension}?perpage=60"
end
def self.get_random_book_page(genre_name)
# from the genre page with many books, get a random book page
url = Nokogiri::HTML(open(get_genre_page(genre_name))).css("#nodeGrid article:nth-child(#{rand(59) + 1}) a").attribute("href").value
"http://www.bookdaily.com#{url}"
end
def self.get_random_book(genre_name)
book_info = Nokogiri::HTML(open(get_random_book_page(genre_name))).css("section.book")
title = book_info.css("section.book-intro h1.booktitle").text
author = book_info.css("p:nth-child(2) a").text
genre = self.genre
if self.check_for_duplicate?(title, author)
text = ""
i = 0
# adds a paragraph at a time to the text string until it is longer than 1000 characters
until text.length > 1000
i += 1
text << book_info.css("article.book-details p:nth-child(#{i})").text
end
BookBytes::Book.create_new_book(title, author, genre, text)
else
get_random_book(genre_name)
end
end
def self.check_for_duplicate?(title, author)
# returns true if no duplicates
BookBytes::Book.shown.all? do |book|
book.title != title && book.author != author
end
end
end
| 29.084746 | 136 | 0.668998 |
bf484197c48edac920150c002752ad0e7fae356a | 841 | class StartScanWorker
include Sidekiq::Worker
include Sidekiq::Status::Worker
include Base
sidekiq_options({ :queue => :default,:retry => true })
def perform
scans = Scaner.where("team_id is not null and (status='Queued' or status='Scanning')").order("created_at DESC")
unless scans.present?
scans = Scaner.where("team_id is not null").order("created_at DESC")
end
scans.each do |scan|
begin
scaner_instance = scan.scaner_instances.build
scaner_instance.save
if scan.source != 'gitlab' && scan.source != 'github'
klass = (scan.source.camelize + 'ScanWorker').constantize
Timeout::timeout(2.hours) { klass.new.perform(scan.id,scaner_instance.id) }
else
Timeout::timeout(2.hours) { GitScanWorker.new.perform(scan.id,scaner_instance.id) }
end
rescue Exception=>e
end
end
end
end
| 31.148148 | 114 | 0.705113 |
38c65c480dc858e2c02a83c01acf9e0efd59bce3 | 352 | # frozen_string_literal: true
module Api
module V3
class SubdomainsController < Api::V1::ApiApplicationController
include Syncable
respond_to :json
def index
project = Project.find params[:project_id]
@subdomains = to_sync(project.subdomains, 'subdomains', params[:last_sync_time])
end
end
end
end
| 22 | 88 | 0.690341 |
33131297482ce0201416c9f01e23fad4a4bc2054 | 8,486 | require "test_helper"
require "gds_api/test_helpers/publishing_api"
class PublishingApi::DetailedGuidePresenterTest < ActiveSupport::TestCase
include GdsApi::TestHelpers::PublishingApi
def present(edition)
PublishingApi::DetailedGuidePresenter.new(edition)
end
test "DetailedGuide presenter passes schema tests" do
create(:government)
detailed_guide = create(
:detailed_guide,
title: "Some detailed guide",
summary: "Some summary",
body: "Some content",
)
presented_item = present(detailed_guide)
assert_valid_against_schema(presented_item.content, "detailed_guide")
assert_valid_against_links_schema({ links: presented_item.links }, "detailed_guide")
end
test "DetailedGuide presents correct information" do
government = create(:government)
detailed_guide = create(
:detailed_guide,
title: "Some detailed guide",
summary: "Some summary",
body: "Some content",
)
EditionPolicy.create!(edition_id: detailed_guide.id, policy_content_id: "dc6d2e0e-8f5d-4c3f-aaea-c890e07d0cf8")
public_path = Whitehall.url_maker.public_document_path(detailed_guide)
expected_content = {
base_path: public_path,
title: "Some detailed guide",
description: "Some summary",
public_updated_at: detailed_guide.updated_at,
schema_name: "detailed_guide",
document_type: "detailed_guide",
locale: "en",
publishing_app: "whitehall",
rendering_app: "government-frontend",
routes: [
{ path: public_path, type: "exact" },
],
redirects: [],
update_type: "major",
details: {
body: "<div class=\"govspeak\"><p>Some content</p></div>",
change_history: [],
tags: {
browse_pages: [],
topics: [],
},
political: false,
related_mainstream_content: [],
emphasised_organisations: detailed_guide.lead_organisations.map(&:content_id),
attachments: [],
},
links: {
organisations: detailed_guide.organisations.map(&:content_id),
primary_publishing_organisation: [
detailed_guide.lead_organisations.first.content_id,
],
original_primary_publishing_organisation: [
detailed_guide.document.editions.first.lead_organisations.first.content_id,
],
parent: [],
related_guides: [],
related_mainstream_content: [],
government: [government.content_id],
},
}
expected_links = {
topics: [],
}
presented_item = present(detailed_guide)
assert_equal expected_content.except(:details), presented_item.content.except(:details)
assert_equivalent_html expected_content[:details].delete(:body), presented_item.content[:details].delete(:body)
assert_equal expected_content[:details], presented_item.content[:details].except(:body)
assert_hash_includes presented_item.links, expected_links
assert_equal detailed_guide.document.content_id, presented_item.content_id
end
test "links hash includes topics and parent if set" do
edition = create(:detailed_guide)
create(:specialist_sector, topic_content_id: "content_id_1", edition: edition, primary: true)
create(:specialist_sector, topic_content_id: "content_id_2", edition: edition, primary: false)
links = present(edition).links
edition_links = present(edition).edition_links
assert_equal links[:topics], %w[content_id_1 content_id_2]
assert_equal edition_links[:parent], %w[content_id_1]
end
test "DetailedGuide presents related mainstream in links and details" do
lookup_hash = {
"/mainstream-content" => "9dd9e077-ae45-45f6-ad9d-2a484e5ff312",
"/another-mainstream-content" => "9af50189-de1c-49af-a334-6b1d87b593a6",
}
stub_publishing_api_has_lookups(lookup_hash)
create(:government)
detailed_guide = create(
:detailed_guide,
title: "Some detailed guide",
summary: "Some summary",
body: "Some content",
related_mainstream_content_url: "http://www.gov.uk/mainstream-content",
additional_related_mainstream_content_url: "http://www.gov.uk/another-mainstream-content",
)
presented_item = present(detailed_guide)
edition_links = presented_item.edition_links
details = presented_item.content[:details]
assert_equal %w[9dd9e077-ae45-45f6-ad9d-2a484e5ff312 9af50189-de1c-49af-a334-6b1d87b593a6], details[:related_mainstream_content]
assert_equal %w[9dd9e077-ae45-45f6-ad9d-2a484e5ff312 9af50189-de1c-49af-a334-6b1d87b593a6].sort!, edition_links[:related_mainstream_content].sort!
end
test "DetailedGuide presents related_mainstream with dodgy data" do
lookup_hash = {
"/guidance/lorem" => "cd7fde45-5f79-4982-8939-cedc4bed161c",
}
stub_publishing_api_has_lookups(lookup_hash)
create(:government)
detailed_guide = create(
:detailed_guide,
title: "Some detailed guide",
summary: "Some summary",
body: "Some content",
related_mainstream_content_title: "Lorem",
related_mainstream_content_url: "http://www.gov.uk/guidance/lorem?query=string",
)
presented_item = present(detailed_guide)
edition_links = presented_item.edition_links
expected_ids = %w[cd7fde45-5f79-4982-8939-cedc4bed161c]
assert_equal expected_ids.sort, edition_links[:related_mainstream_content].sort
end
test "DetailedGuide presents political information correctly" do
government = create(:government)
detailed_guide = create(
:published_detailed_guide,
title: "Some detailed guide",
summary: "Some summary",
body: "Some content",
political: true,
)
presented_item = present(detailed_guide)
details = presented_item.content[:details]
assert_equal details[:political], true
assert_equal presented_item.edition_links[:government][0], government.content_id
end
test "DetailedGuide presents related_guides correctly" do
create(:government)
some_detailed_guide = create(:published_detailed_guide)
detailed_guide = create(
:published_detailed_guide,
title: "Some detailed guide",
summary: "Some summary",
body: "Some content",
related_editions: [some_detailed_guide],
)
presented_item = present(detailed_guide)
related_guides = presented_item.edition_links[:related_guides]
expected_related_guides = [
some_detailed_guide.content_id,
]
assert_equal related_guides, expected_related_guides
end
test "DetailedGuide presents national_applicability correctly when some are specified" do
scotland_nation_inapplicability = create(
:nation_inapplicability,
nation: Nation.scotland,
alternative_url: "http://scotland.com",
)
create(:government)
detailed_guide = create(
:published_detailed_guide_with_excluded_nations,
nation_inapplicabilities: [
scotland_nation_inapplicability,
],
)
presented_item = present(detailed_guide)
details = presented_item.content[:details]
expected_national_applicability = {
england: {
label: "England",
applicable: true,
},
northern_ireland: {
label: "Northern Ireland",
applicable: true,
},
scotland: {
label: "Scotland",
applicable: false,
alternative_url: "http://scotland.com",
},
wales: {
label: "Wales",
applicable: true,
},
}
assert_valid_against_schema(presented_item.content, "detailed_guide")
assert_equal expected_national_applicability, details[:national_applicability]
end
test "DetailedGuide presents an image correctly" do
detailed_guide = create(
:published_detailed_guide,
title: "Some detailed guide",
summary: "Some summary",
body: "Some content",
logo_url: "http://www.example.com/foo.jpg",
)
presented_item = present(detailed_guide)
assert_equal "http://www.example.com/foo.jpg", presented_item.content[:details][:image][:url]
end
test "DetailedGuide presents attachments" do
detailed_guide = create(:published_detailed_guide, :with_file_attachment)
presented_item = present(detailed_guide)
assert_valid_against_schema(presented_item.content, "detailed_guide")
assert_equal presented_item.content.dig(:details, :attachments, 0, :id),
detailed_guide.attachments.first.id.to_s
end
end
| 33.808765 | 150 | 0.70363 |
114f508878aa7b327e16d1708a9134294e41e2b9 | 1,711 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper' )
module IntegrationTests
class BlogCategoriesTest < ActionController::IntegrationTest
def setup
super
@section = Blog.first
@site = @section.site
use_site! @site
@special_characters_category = @section.categories.find_by_title('$%&')
@non_ascii_category = @section.categories.find_by_title('öäü')
@section.categories.build(:title => 'uk').save
@section.categories.build(:title => 'london').save
@london = @section.categories.find_by_title('london')
@uk = @section.categories.find_by_title('uk')
@london.move_to_child_of(@uk)
@section.categories.update_paths!
end
test "user views categories of a blog that has nested categories" do
login_as_user
visit_blog_index
if default_theme?
visit_category(@uk)
visit_category(@london)
end
end
test "category with special characters permalink is accessible" do
login_as_user
visit_blog_index
visit_category(@non_ascii_category) if default_theme?
end
# FIXME categories does not work with characters like $%&
# test "category with special characters permalink is accessible" do
# login_as_user
# visit_blog_index
# visit_category(@special_characters_category)
# end
def visit_blog_index
visit blog_path(@section)
assert_template 'blogs/articles/index'
end
def visit_category(category)
click_link category.title
assert_template 'blogs/articles/index'
assert_select 'h2.list_header', "Articles about #{category.title}"
end
end
end | 31.109091 | 77 | 0.678551 |
87e1c54fe2db8f57c120f8e39a006d0109a2213d | 558 | # frozen_string_literal: true
module Vedeu
module Interfaces
# Provides a non-existent model to swallow messages.
#
# @api private
#
class Null < Vedeu::Null::Generic
include Vedeu::Presentation
include Vedeu::Presentation::Colour
include Vedeu::Presentation::Position
include Vedeu::Presentation::Styles
# @!attribute [r] attributes
# @return [String]
attr_reader :attributes
# @return [String]
def group
''
end
end # Null
end # Interfaces
end # Vedeu
| 17.4375 | 56 | 0.625448 |
1c238ea71ae2d4a399c4103cb2e22d32ac8d6f75 | 600 | class CreatePromptReports < ActiveRecord::Migration[5.2]
def change
create_table :prompt_reports do |t|
t.integer :user_id, null: false
t.datetime :read_at, null: true, default: nil
t.bigint :removed_uid, null: true, default: nil
t.text :changes_json, null: false
t.string :token, null: false
t.string :message_id, null: false
t.string :message, null: false, default: ''
t.timestamps null: false
t.index :user_id
t.index :token, unique: true
t.index :created_at
end
end
end
| 30 | 58 | 0.601667 |
269f283f5325c2f6f2a29760b18c3bddc9c5c299 | 369 | # frozen_string_literal: true
Deposit::Engine.routes.draw do
root to: 'collection#index'
resources :collection, only: [:index]
scope '/collection' do
match '/record' => 'collection#record_payments', :via => :post, :as => 'do_record_payments'
match '/account_invoices' => 'collection#account_invoices', :via => :get, :as => 'account_invoices'
end
end
| 28.384615 | 103 | 0.693767 |
e20015b72019c8bf991e4f341f693c8b0c2de08c | 38 | module Dacker
VERSION = "0.0.1"
end
| 9.5 | 19 | 0.657895 |
21e921aa9a576434a67bbfe81922621ac6951107 | 3,668 | FactoryBot.define do
factory :ems_folder do
sequence(:name) { |n| "Test Folder #{seq_padded_for_sorting(n)}" }
end
factory :datacenter, :parent => :ems_folder, :class => "Datacenter"
factory :storage_cluster, :parent => :ems_folder, :class => "StorageCluster"
factory :inventory_group,
:class => "ManageIQ::Providers::AutomationManager::InventoryGroup",
:parent => :ems_folder
factory :inventory_root_group,
:class => "ManageIQ::Providers::AutomationManager::InventoryRootGroup",
:parent => :ems_folder
#
# VMware specific folders
#
factory :vmware_folder, :parent => :ems_folder do
sequence(:ems_ref) { |n| "group-d#{n}" }
sequence(:ems_ref_obj) { |n| VimString.new("group-d#{n}", "Folder", "ManagedObjectReference") }
end
factory :vmware_folder_vm, :parent => :ems_folder do
sequence(:ems_ref) { |n| "group-v#{n}" }
sequence(:ems_ref_obj) { |n| VimString.new("group-v#{n}", "Folder", "ManagedObjectReference") }
end
factory :vmware_folder_host, :parent => :ems_folder do
sequence(:ems_ref) { |n| "group-h#{n}" }
sequence(:ems_ref_obj) { |n| VimString.new("group-h#{n}", "Folder", "ManagedObjectReference") }
end
factory :vmware_folder_datastore, :parent => :ems_folder do
sequence(:ems_ref) { |n| "group-s#{n}" }
sequence(:ems_ref_obj) { |n| VimString.new("group-s#{n}", "Folder", "ManagedObjectReference") }
end
factory :vmware_folder_network, :parent => :ems_folder do
sequence(:ems_ref) { |n| "group-n#{n}" }
sequence(:ems_ref_obj) { |n| VimString.new("group-n#{n}", "Folder", "ManagedObjectReference") }
end
factory :vmware_folder_root, :parent => :vmware_folder do
name { "Datacenters" }
hidden { true }
end
factory :vmware_folder_vm_root, :parent => :vmware_folder_vm do
name { "vm" }
hidden { true }
end
factory :vmware_folder_host_root, :parent => :vmware_folder_host do
name { "host" }
hidden { true }
end
factory :vmware_folder_datastore_root, :parent => :vmware_folder_datastore do
name { "datastore" }
hidden { true }
end
factory :vmware_folder_network_root, :parent => :vmware_folder_network do
name { "network" }
hidden { true }
end
factory :vmware_datacenter, :parent => :vmware_folder, :class => "Datacenter" do
sequence(:name) { |n| "Test Datacenter #{seq_padded_for_sorting(n)}" }
sequence(:ems_ref) { |n| "datacenter-#{n}" }
sequence(:ems_ref_obj) { |n| VimString.new("datacenter-#{n}", "Datacenter", "ManagedObjectReference") }
end
end
def build_vmware_folder_structure!(ems)
ems.add_child(
FactoryBot.create(:vmware_folder_root, :ems_id => ems.id).tap do |root|
root.add_child(
FactoryBot.create(:vmware_folder, :name => "yellow1", :ems_id => ems.id).tap do |f|
f.add_child(
FactoryBot.create(:vmware_datacenter, :ems_id => ems.id).tap do |dc|
dc.add_children(
FactoryBot.create(:vmware_folder_vm_root, :ems_id => ems.id) do |vm|
vm.add_children(
FactoryBot.create(:vmware_folder_vm, :name => "blue1", :ems_id => ems.id),
FactoryBot.create(:vmware_folder_vm, :name => "blue2", :ems_id => ems.id)
)
end,
FactoryBot.create(:vmware_folder_host_root, :ems_id => ems.id),
FactoryBot.create(:vmware_folder_datastore_root, :ems_id => ems.id),
FactoryBot.create(:vmware_folder_network_root, :ems_id => ems.id)
)
end
)
end
)
end
)
end
| 35.269231 | 107 | 0.624591 |
6123e81ba0a955fb13367f954738e8022befbbbd | 4,561 | class Vim < Formula
desc "Vi \"workalike\" with many additional features"
homepage "http://www.vim.org/"
# *** Vim should be updated no more than once every 7 days ***
url "https://github.com/vim/vim/archive/v7.4.979.tar.gz"
sha256 "b91bd610a07d6d72cb113fe06f2db722a130f8218568e0958a84c266758bd75b"
head "https://github.com/vim/vim.git"
bottle :disable, "To use the user's Python."
# We only have special support for finding depends_on :python, but not yet for
# :ruby, :perl etc., so we use the standard environment that leaves the
# PATH as the user has set it right now.
env :std
option "override-system-vi", "Override system vi"
option "disable-nls", "Build vim without National Language Support (translated messages, keymaps)"
option "with-client-server", "Enable client/server mode"
LANGUAGES_OPTIONAL = %w[lua mzscheme python3 tcl]
LANGUAGES_DEFAULT = %w[perl python ruby]
option "with-python3", "Build vim with python3 instead of python[2] support"
LANGUAGES_OPTIONAL.each do |language|
option "with-#{language}", "Build vim with #{language} support"
end
LANGUAGES_DEFAULT.each do |language|
option "without-#{language}", "Build vim without #{language} support"
end
depends_on :python => :recommended
depends_on :python3 => :optional
depends_on "lua" => :optional
depends_on "luajit" => :optional
depends_on :x11 if build.with? "client-server"
conflicts_with "ex-vi",
:because => "vim and ex-vi both install bin/ex and bin/view"
def install
ENV["LUA_PREFIX"] = HOMEBREW_PREFIX if build.with?("lua") || build.with?("luajit")
# vim doesn't require any Python package, unset PYTHONPATH.
ENV.delete("PYTHONPATH")
if build.with?("python") && which("python").to_s == "/usr/bin/python" && !MacOS.clt_installed?
# break -syslibpath jail
ln_s "/System/Library/Frameworks", buildpath
ENV.append "LDFLAGS", "-F#{buildpath}/Frameworks"
end
opts = []
(LANGUAGES_OPTIONAL + LANGUAGES_DEFAULT).each do |language|
opts << "--enable-#{language}interp" if build.with? language
end
if opts.include?("--enable-pythoninterp") && opts.include?("--enable-python3interp")
# only compile with either python or python3 support, but not both
# (if vim74 is compiled with +python3/dyn, the Python[3] library lookup segfaults
# in other words, a command like ":py3 import sys" leads to a SEGV)
opts -= %W[--enable-pythoninterp]
end
opts << "--disable-nls" if build.include? "disable-nls"
opts << "--enable-gui=no"
if build.with? "client-server"
opts << "--with-x"
else
opts << "--without-x"
end
if build.with? "luajit"
opts << "--with-luajit"
opts << "--enable-luainterp"
end
# XXX: Please do not submit a pull request that hardcodes the path
# to ruby: vim can be compiled against 1.8.x or 1.9.3-p385 and up.
# If you have problems with vim because of ruby, ensure a compatible
# version is first in your PATH when building vim.
# We specify HOMEBREW_PREFIX as the prefix to make vim look in the
# the right place (HOMEBREW_PREFIX/share/vim/{vimrc,vimfiles}) for
# system vimscript files. We specify the normal installation prefix
# when calling "make install".
system "./configure", "--prefix=#{HOMEBREW_PREFIX}",
"--mandir=#{man}",
"--enable-multibyte",
"--with-tlib=ncurses",
"--enable-cscope",
"--with-features=huge",
"--with-compiledby=Homebrew",
*opts
system "make"
# If stripping the binaries is enabled, vim will segfault with
# statically-linked interpreters like ruby
# https://github.com/vim/vim/issues/114
system "make", "install", "prefix=#{prefix}", "STRIP=true"
bin.install_symlink "vim" => "vi" if build.include? "override-system-vi"
end
test do
# Simple test to check if Vim was linked to Python version in $PATH
if build.with? "python"
vim_path = bin/"vim"
# Get linked framework using otool
otool_output = `otool -L #{vim_path} | grep -m 1 Python`.gsub(/\(.*\)/, "").strip.chomp
# Expand the link and get the python exec path
vim_framework_path = Pathname.new(otool_output).realpath.dirname.to_s.chomp
system_framework_path = `python-config --exec-prefix`.chomp
assert_equal system_framework_path, vim_framework_path
end
end
end
| 38.008333 | 100 | 0.654681 |
01dacac666cee80493b033d9bfe52a54ffc78c92 | 13,219 | # Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = '831c3cf29e8b414533028ed43a6165b5909c3bb1796cb5848f29d2ff91bde86b406223d17c1d089c6090a40527d15954b05124b4927e94a753632cd710676d86'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/mongoid'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 11. If
# using other algorithms, it sets how many times you want the password to be hashed.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 11
# Set up a pepper to generate the hashed password.
# config.pepper = '17df78d42a843e14d46b8163c609375ae63628a6de4f2667a2d25caa61be617d3124d0817e29f07c17e03bf22504b6969d969ef9553d6d7a3ffdebb672e37710'
# Send a notification email when the user's password is changed
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day. Default is 0.days, meaning
# the user cannot access the website without confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
end
| 49.141264 | 154 | 0.75104 |
1c04921bbda93307f5ba26ffe2047141e5b8e068 | 1,376 | # frozen_string_literal: true
# Copyright 2015 Australian National Botanic Gardens
#
# This file is part of the NSL Editor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "test_helper"
require "models/name/as_typeahead/name_parent/name_parent_test_helper"
# Single Name typeahead test.
class ForSubspeciesPartiallyRestrictedTest < ActiveSupport::TestCase
def setup
set_name_parent_rank_restrictions_off
end
test "name parent suggestion for subspecies" do
assert !ShardConfig.name_parent_rank_restriction?,
"Name parent rank restriction should be off for this test."
typeahead = Name::AsTypeahead::ForParent.new(
term: "%",
avoid_id: 1,
rank_id: NameRank.find_by(name: "Subspecies").id
)
suggestions_should_only_include(
typeahead.suggestions, "Subspecies", %w(Species)
)
end
end
| 33.560976 | 76 | 0.742006 |
d52217869a3e826b9e6ecb32da26cf2a4e565e84 | 456 | class User < ApplicationRecord
before_save { email.downcase! }
validates :name, presence: true, length: { maximum:50 }
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-]+(\.[a-z\d\-]+)*\.[a-z]+\z/i
validates :email, presence: true, length: { maximum:255},
format: { with: VALID_EMAIL_REGEX},
uniqueness: {case_sensitive: false}
validates :password, presence: true, length: { minimum:6 }
has_secure_password
end
| 41.454545 | 72 | 0.618421 |
6201bda125ad9693beb6e0564a095eadca5257f2 | 4,610 | # helper class from https://gist.github.com/defunkt/278994
module RequiredTime
class << self
def hook
Kernel.class_eval do
alias_method :__require_benchmarking_old_require, :require
def require(path, *args)
RequiredTime.benchmark_require(path, caller) { __require_benchmarking_old_require(path, *args) }
end
end
@hooked = true
end
def hooked?
@hooked
end
def benchmark_require(path, full_backtrace, &block)
output = nil
backtrace = full_backtrace.reject {|x| x =~ /require|dependencies/ }
caller = File.expand_path(backtrace[0].split(":")[0])
parent = required_files.find {|f| f[:fullpath] == caller }
unless parent
parent = {
:index => required_files.size,
:fullpath => caller,
:parent => nil,
:is_root => true
}
required_files << parent
end
fullpath = find_file(path)
expanded_path = path; expanded_path = File.expand_path(path) if path =~ /^\//
new_file = {
:index => required_files.size,
:xpath => path,
:path => expanded_path,
:fullpath => fullpath,
:backtrace => full_backtrace,
:parent => parent,
:is_root => false
}
# add this before the required is required so that anything that is required
# within the required that's about to be required already has a parent present
required_files << new_file
start = Time.now
output = yield # do the require here
new_file[:time] = Time.now.to_f - start.to_f
output
end
def generate_benchmark_report
@printed_files = []
generate_benchmark_report_level(@required_files.select {|file| file[:is_root] })
end
private
def required_files
@required_files ||= []
end
def printed_files
@printed_files ||= []
end
def proj_dir
@proj_dir ||= File.expand_path(File.dirname(__FILE__) + "/..")
end
def find_file(path)
return File.expand_path(path) if path =~ /^\//
expanded_path = nil
# Try to find the path in the ActiveSupport load paths and then the built-in load paths
catch :found_path do
%w(rb bundle so).each do |ext|
path_suffix = path; path_suffix = "#{path}.#{ext}" unless path_suffix =~ /\.#{ext}$/
($:).each do |path_prefix|
possible_path = File.join(path_prefix, path_suffix)
if File.file? possible_path
expanded_path = File.expand_path(possible_path)
throw :found_path
end
end
expanded_path
end
end
expanded_path
end
def generate_benchmark_report_level(files, printing_all=false)
report = []
if printing_all
files = files.sort {|a,b| b[:time] <=> a[:time] }
else
files = files.sort_by {|f| [(f[:parent] ? 1 : 0), -(f[:time] || 0), f[:index]] }
end
for required in files
already_printed = printed_files.include?(required[:fullpath])
# don't print this required if it's already been printed,
# or it will have been printed
next if already_printed
if required[:parent] && !printing_all
next if required[:index] < required[:parent][:index]
end
path = required[:fullpath] ? format_path(required[:fullpath]) : required[:path]
out = "#{required[:index]+1}) "
if required[:time] && !already_printed
#if required[:time] >= 0.5
# out << "%s: %.4f s" % [path, required[:time]]
#else
ms = required[:time].to_f * 1000
out << "%s: %.1f ms" % [path, ms]
#end
else
out << path
end
if required[:is_root] && required[:parent]
out << " (required by #{required[:parent][:fullpath]})"
end
unless required[:parent]
out << " (already loaded)"
end
if already_printed
out << " (already printed)"
end
report << out
unless already_printed
printed_files << required[:fullpath]
unless printing_all
children = @required_files.select {|f| !f[:is_root] && f[:parent] && f[:parent][:fullpath] == required[:fullpath] }
if children.any?
report.concat(generate_benchmark_report_level(children).map{|s| ' ' + s })
end
end
end
end
report
end
def format_path(path)
path.sub(proj_dir, "*")
end
end
end | 30.130719 | 127 | 0.570933 |
b9136fe5fd9f719a227bfdff11ad792aee475771 | 607 | class PagesController < ApplicationController
skip_before_action :require_login
def index
@random_feedbacks = Feedback.published.random
render template: "pages/home"
end
def show
if valid_page?
@random_feedbacks = Feedback.published.random
@collaborators = Collaborator.all
@categories = Category.all
render template: "pages/#{params[:page]}"
else
render file: "public/error-page.html", status: :not_found
end
end
private
def valid_page?
File.exist?(Pathname.new(Rails.root + "app/views/pages/#{params[:page]}.html.erb"))
end
end
| 23.346154 | 88 | 0.691928 |
bf654cc7a7d2fed344a2052e467cfff0fcecdf5e | 1,704 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/ads/google_ads/v1/services/campaign_label_service.proto for package 'Google::Ads::GoogleAds::V1::Services'
# Original file comments:
# Copyright 2019 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
require 'grpc'
require 'google/ads/google_ads/v1/services/campaign_label_service_pb'
module Google::Ads::GoogleAds::V1::Services
module CampaignLabelService
# Proto file describing the Campaign Label service.
#
# Service to manage labels on campaigns.
class Service
include GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.ads.googleads.v1.services.CampaignLabelService'
# Returns the requested campaign-label relationship in full detail.
rpc :GetCampaignLabel, GetCampaignLabelRequest, Google::Ads::GoogleAds::V1::Resources::CampaignLabel
# Creates and removes campaign-label relationships.
# Operation statuses are returned.
rpc :MutateCampaignLabels, MutateCampaignLabelsRequest, MutateCampaignLabelsResponse
end
Stub = Service.rpc_stub_class
end
end
| 37.043478 | 123 | 0.759977 |
3869582f5445ba2649806201d75e94da1d7a46ce | 509 | require 'spec_helper'
describe Ashbe::Compression do
it "knows how many algorithms there are" do
Ashbe::Compression.algorithms.size.must_equal 3
end
it "can lookup up an algorithm by symbol" do
Ashbe::Compression.algorithm_for( :lzo ).name.must_equal "LZO"
Ashbe::Compression.algorithm_for( :lzo ).must_equal org.apache.hadoop.hbase.io.hfile.Compression::Algorithm::LZO
end
it "knows compression algorithsm by constant" do
Ashbe::Compression::LZO.name.must_equal "LZO"
end
end
| 28.277778 | 116 | 0.750491 |
7a51059a0d781957e214d2a5f883c451c221288f | 4,342 | # frozen_string_literal: true
module Rails # :nodoc:
module GraphQL # :nodoc:
class Type # :nodoc:
# = GraphQL InputType
#
# Input defines a set of input fields; the input fields are either
# scalars, enums, or other input objects.
# See http://spec.graphql.org/June2018/#InputObjectTypeDefinition
class Input < Type
extend Helpers::WithAssignment
extend Helpers::WithFields
setup! kind: :input_object, input: true
self.field_type = Field::InputField
self.valid_field_types = [
Type::Enum,
Type::Input,
Type::Scalar,
].freeze
class << self
# A little override on the name of the object due to the suffix config
def gql_name
return @gql_name if defined?(@gql_name)
suffix = GraphQL.config.auto_suffix_input_objects
return super if suffix.blank?
result = super
result += suffix if result && !result.end_with?(suffix)
@gql_name = result
end
# Check if a given value is a valid non-deserialized input
def valid_input?(value)
value = value.to_h if value.respond_to?(:to_h)
return false unless value.is_a?(Hash)
fields = enabled_fields
value = value.transform_keys { |key| key.to_s.camelize(:lower) }
value = build_defaults.merge(value)
return false unless value.size.eql?(fields.size)
fields.all? { |item| item.valid_input?(value[item.gql_name]) }
end
# Turn the given value into an isntance of the input object
def deserialize(value)
value = value.to_h if value.respond_to?(:to_h)
value = {} unless value.is_a?(Hash)
value = enabled_fields.map do |field|
next unless value.key?(field.gql_name) || value.key?(field.name)
[field.name, field.deserialize(value[field.gql_name] || value[field.name])]
end.compact.to_h
new(OpenStruct.new(value))
end
# Build a hash with the default values for each of the given fields
def build_defaults
enabled_fields.map { |field| [field.gql_name, field.default] }.to_h
end
def inspect # :nodoc:
args = fields.each_value.map(&:inspect)
args = args.presence && "(#{args.join(', ')})"
"#<GraphQL::Input #{gql_name}#{args}>"
end
end
attr_reader :args
attr_writer :resource
delegate :fields, to: :class
delegate :[], to: :args
delegate_missing_to :resource
def initialize(args = nil, **xargs)
@args = args || OpenStruct.new(xargs.transform_keys { |key| key.to_s.underscore })
@args.freeze
validate! if args.nil?
end
# If the input is assigned to a class, then initialize it with the
# received arguments. It also accepts extra arguments for inheritance
# purposes
def resource(*args, **xargs, &block)
@resource ||= (klass = safe_assigned_class).nil? ? nil : begin
xargs = xargs.reverse_merge(params)
klass.new(*args, **xargs, &block)
end
end
# Just return the arguments as an hash
def params
parametrize(self)
end
# Checks if all the values provided to the input instance are valid
def validate!(*)
errors = []
fields.each do |name, field|
field.validate_output!(@args[name.to_s])
rescue InvalidValueError => error
errors << error.message
end
return if errors.empty?
raise InvalidValueError, <<~MSG.squish
Invalid value provided to #{gql_name} field: #{errors.to_sentence}.
MSG
end
private
# Make sure to turn inputs into params
def parametrize(input)
case input
when Type::Input then parametrize(input.args.to_h)
when Array then input.map(&method(:parametrize))
when Hash then input.transform_values(&method(:parametrize))
else input
end
end
end
end
end
end
| 32.162963 | 92 | 0.573468 |
1a559c4596479fb911123e8114cb1482296daff6 | 6,615 | #! /usr/bin/env ruby
require 'spec_helper'
require 'puppet_spec/compiler'
describe "when using a hiera data provider" do
include PuppetSpec::Compiler
# There is a fully configured 'sample' environment in fixtures at this location
let(:environmentpath) { parent_fixture('environments') }
let(:facts) { Puppet::Node::Facts.new("facts", {}) }
around(:each) do |example|
# Initialize settings to get a full compile as close as possible to a real
# environment load
Puppet.settings.initialize_global_settings
# Initialize loaders based on the environmentpath. It does not work to
# just set the setting environmentpath for some reason - this achieves the same:
# - first a loader is created, loading directory environments from the fixture (there is
# one environment, 'sample', which will be loaded since the node references this
# environment by name).
# - secondly, the created env loader is set as 'environments' in the puppet context.
#
loader = Puppet::Environments::Directories.new(environmentpath, [])
Puppet.override(:environments => loader) do
example.run
end
end
def compile_and_get_notifications(environment, code = nil)
Puppet[:code] = code if code
node = Puppet::Node.new("testnode", :facts => facts, :environment => environment)
compiler = Puppet::Parser::Compiler.new(node)
compiler.compile().resources.map(&:ref).select { |r| r.start_with?('Notify[') }.map { |r| r[7..-2] }
end
it 'uses default configuration for environment and module data' do
resources = compile_and_get_notifications('hiera_defaults')
expect(resources).to include('module data param_a is 100, param default is 200, env data param_c is 300')
end
it 'reads hiera.yaml in environment root and configures multiple json and yaml providers' do
resources = compile_and_get_notifications('hiera_env_config')
expect(resources).to include('env data param_a is 10, env data param_b is 20, env data param_c is 30, env data param_d is 40, env data param_e is 50')
end
it 'reads hiera.yaml in module root and configures multiple json and yaml providers' do
resources = compile_and_get_notifications('hiera_module_config')
expect(resources).to include('module data param_a is 100, module data param_b is 200, module data param_c is 300, module data param_d is 400, module data param_e is 500')
end
it 'reads does not perform merge of values declared in environment and module when resolving parameters' do
resources = compile_and_get_notifications('hiera_misc')
expect(resources).to include('env 1, ')
end
it 'reads performs hash merge of values declared in environment and module' do
resources = compile_and_get_notifications('hiera_misc', '$r = lookup(one::test::param, Hash[String,String], hash) notify{"${r[key1]}, ${r[key2]}":}')
expect(resources).to include('env 1, module 2')
end
it 'reads performs unique merge of values declared in environment and module' do
resources = compile_and_get_notifications('hiera_misc', '$r = lookup(one::array, Array[String], unique) notify{"${r}":}')
expect(resources.size).to eq(1)
expect(resources[0][1..-2].split(', ')).to contain_exactly('first', 'second', 'third', 'fourth')
end
it 'does find unqualified keys in the environment' do
resources = compile_and_get_notifications('hiera_misc', 'notify{lookup(ukey1):}')
expect(resources).to include('Some value')
end
it 'does not find unqualified keys in the module' do
expect do
compile_and_get_notifications('hiera_misc', 'notify{lookup(ukey2):}')
end.to raise_error(Puppet::ParseError, /did not find a value for the name 'ukey2'/)
end
it 'can use interpolation lookup method "alias"' do
resources = compile_and_get_notifications('hiera_misc', 'notify{lookup(km_alias):}')
expect(resources).to include('Value from interpolation with alias')
end
it 'can use interpolation lookup method "lookup"' do
resources = compile_and_get_notifications('hiera_misc', 'notify{lookup(km_lookup):}')
expect(resources).to include('Value from interpolation with lookup')
end
it 'can use interpolation lookup method "hiera"' do
resources = compile_and_get_notifications('hiera_misc', 'notify{lookup(km_hiera):}')
expect(resources).to include('Value from interpolation with hiera')
end
it 'can use interpolation lookup method "literal"' do
resources = compile_and_get_notifications('hiera_misc', 'notify{lookup(km_literal):}')
expect(resources).to include('Value from interpolation with literal')
end
it 'can use interpolation lookup method "scope"' do
resources = compile_and_get_notifications('hiera_misc', '$target_scope = "with scope" notify{lookup(km_scope):}')
expect(resources).to include('Value from interpolation with scope')
end
it 'can use interpolation using default lookup method (scope)' do
resources = compile_and_get_notifications('hiera_misc', '$target_default = "with default" notify{lookup(km_default):}')
expect(resources).to include('Value from interpolation with default')
end
it 'performs single quoted interpolation' do
resources = compile_and_get_notifications('hiera_misc', 'notify{lookup(km_sqalias):}')
expect(resources).to include('Value from interpolation with alias')
end
it 'traps endless interpolate recursion' do
expect do
compile_and_get_notifications('hiera_misc', '$r1 = "%{r2}" $r2 = "%{r1}" notify{lookup(recursive):}')
end.to raise_error(Puppet::DataBinding::RecursiveLookupError, /detected in \[recursive, r1, r2\]/)
end
it 'traps bad alias declarations' do
expect do
compile_and_get_notifications('hiera_misc', "$r1 = 'Alias within string %{alias(\"r2\")}' $r2 = '%{r1}' notify{lookup(recursive):}")
end.to raise_error(Puppet::DataBinding::LookupError, /'alias' interpolation is only permitted if the expression is equal to the entire string/)
end
it 'reports syntax errors for JSON files' do
expect do
compile_and_get_notifications('hiera_bad_syntax_json')
end.to raise_error(Puppet::DataBinding::LookupError, /Unable to parse \(#{environmentpath}[^)]+\):/)
end
it 'reports syntax errors for YAML files' do
expect do
compile_and_get_notifications('hiera_bad_syntax_yaml')
end.to raise_error(Puppet::DataBinding::LookupError, /Unable to parse \(#{environmentpath}[^)]+\):/)
end
def parent_fixture(dir_name)
File.absolute_path(File.join(my_fixture_dir(), "../#{dir_name}"))
end
def resources_in(catalog)
catalog.resources.map(&:ref)
end
end
| 45 | 174 | 0.728496 |
61b8c75eb61d89ee306851bd2321423df44e443c | 2,634 | #!/usr/bin/env ruby
# frozen_string_literal: true
module Enumerable
def my_each
return to_enum unless block_given?
to_a.length.times { |i| yield(to_a[i]) }
self
end
def my_each_with_index
return to_enum unless block_given?
to_a.length.times { |i| yield(to_a[i], i) }
self
end
def my_select
return to_enum unless block_given?
selected = []
my_each { |obj| selected << obj if yield(obj) }
selected
end
def my_all?(pattern = nil)
if block_given?
my_each { |obj| return false unless yield(obj) }
elsif pattern
my_each do |obj|
return false unless pattern_match?(obj, pattern)
end
else
my_each { |obj| return false unless obj }
true
end
true
end
def my_any?(pattern = nil)
if block_given?
my_each { |obj| return true if yield(obj) }
elsif pattern
my_each do |obj|
return true if pattern_match?(obj, pattern)
end
else
my_each { |obj| return true if obj }
end
false
end
def my_none?(pattern = nil)
if block_given?
my_each { |obj| return false if yield(obj) }
elsif pattern
my_each do |obj|
return false if pattern_match?(obj, pattern)
end
else
my_each { |obj| return false if obj }
end
true
end
def pattern_match?(obj, pattern)
(obj.respond_to?(:eql?) && obj.eql?(pattern)) ||
(pattern.is_a?(Class) && obj.is_a?(pattern)) ||
(pattern.is_a?(Regexp) && pattern.match(obj))
end
def my_count(item = nil)
count = 0
if block_given?
my_each { |item_| count += 1 if yield(item_) }
elsif item
my_each { |item_| count += 1 if item_ == item }
else
count = length
end
count
end
# 11 - Modify your #my_map method to take a proc instead.
# def my_map(proc)
# return to_enum unless block_given?
# result = []
# my_each { |item| result << proc.call(item) }
# result
# end
def my_map
return to_enum unless block_given?
new_array = []
my_each { |item| new_array << yield(item) }
new_array
end
def my_inject(*args)
initial, sym = prepare_params(*args)
array = initial ? to_a : to_a[1..-1]
initial ||= to_a[0]
if block_given?
array.my_each { |item| initial = yield(initial, item) }
elsif sym
array.my_each { |item| initial = initial.public_send(sym, item) }
end
initial
end
def prepare_params(*args)
initial, sym = nil
args.each do |arg|
initial = arg if arg.is_a? Numeric
sym = arg unless arg.is_a? Numeric
end
[initial, sym]
end
end
| 21.241935 | 71 | 0.60858 |
080993f02d3ee1110741c890261108136c99dd58 | 1,908 | class LibpqxxAT6 < Formula
desc "C++ connector for PostgreSQL"
homepage "http://pqxx.org/development/libpqxx/"
url "https://github.com/jtv/libpqxx/archive/6.4.7.tar.gz"
sha256 "3fe9f38df1f0f9b72c8fe1b4bc0185cf14b4ed801a9c783189b735404361ce7f"
license "BSD-3-Clause"
revision 1
bottle do
sha256 cellar: :any, arm64_big_sur: "b6f56911155c390dfbe7351fda8334b1dd47d7fed3d7001e767228144e45cc67"
sha256 cellar: :any, big_sur: "e21e51c071cc9cb879d7ab688f3fba8cf8e32cf14f34779b04db95ec67d1289b"
sha256 cellar: :any, catalina: "29def17a973940490a25c20f5722f6ea4d0551e41cd7986b9025abef40b1534e"
sha256 cellar: :any, mojave: "4b544c65887866135d96226e2bf7c2b586664f8e1a049f6d3dbeca7195884a6f"
sha256 cellar: :any, high_sierra: "39aa6c090c8341c0e9be80d055345c8322ee6a9a908a0f7863479784cbd609f5"
sha256 cellar: :any_skip_relocation, x86_64_linux: "048e4249607f887c9787a40ca3b4b2092e5ffa8adc649f52940af42406fe5080"
end
keg_only :versioned_formula
deprecate! date: "2020-06-23", because: :versioned_formula
depends_on "pkg-config" => :build
depends_on "[email protected]" => :build
depends_on "xmlto" => :build
depends_on "libpq"
def install
ENV.prepend_path "PATH", Formula["[email protected]"].opt_libexec/"bin"
ENV["PG_CONFIG"] = Formula["libpq"].opt_bin/"pg_config"
system "./configure", "--prefix=#{prefix}", "--enable-shared"
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <pqxx/pqxx>
int main(int argc, char** argv) {
pqxx::connection con;
return 0;
}
EOS
system ENV.cxx, "-std=c++11", "test.cpp", "-L#{lib}", "-lpqxx",
"-I#{include}", "-o", "test"
# Running ./test will fail because there is no running postgresql server
# system "./test"
end
end
| 38.938776 | 122 | 0.683962 |
33532526f032871fee1ea908de259d23eb01c97c | 572 | # Copyright 2011-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
| 38.133333 | 78 | 0.75 |
ffc86864d2af3c3a44e1ea93bd078fddef60b1e8 | 971 | # frozen_string_literal: true
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Gapic
module Presenters
##
# A presenter for proto enum values.
#
class EnumValuePresenter
def initialize value
@value = value
end
def name
@value.name
end
def doc_description
@value.docs_leading_comments
end
def number
@value.number
end
end
end
end
| 23.682927 | 74 | 0.69104 |
269e1195d6124bb5b7548327f9dfd65260efa4ff | 724 | module NZMPsPopolo
module Serializers
class BaseSerializer
attr_reader :record
def initialize(record)
@record = record
end
class << self
attr_reader :defined_popolo_type
def popolo_type(type)
raise ArgumentError, 'Can only be :person, :organization, :membership ' +
'or :post' unless [:person, :organization,
:membership, :post].include? type
@defined_popolo_type = type
end
end
def serialize(options = {})
klass = Object.const_get(defined_popolo_type.to_s.capitalize + 'Serializer')
klass.to_popolo
end
end
end
end
| 24.965517 | 84 | 0.563536 |
795ea8b4de9e36b76c1e079da0f996560b3ddd20 | 2,734 | # -*- encoding: utf-8 -*-
# stub: jekyll 3.6.2 ruby lib
Gem::Specification.new do |s|
s.name = "jekyll"
s.version = "3.6.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Tom Preston-Werner"]
s.bindir = "exe"
s.date = "2017-10-21"
s.description = "Jekyll is a simple, blog aware, static site generator."
s.email = "[email protected]"
s.executables = ["jekyll"]
s.extra_rdoc_files = ["README.markdown", "LICENSE"]
s.files = ["LICENSE", "README.markdown", "exe/jekyll"]
s.homepage = "https://github.com/jekyll/jekyll"
s.licenses = ["MIT"]
s.rdoc_options = ["--charset=UTF-8"]
s.required_ruby_version = Gem::Requirement.new(">= 2.1.0")
s.rubygems_version = "2.5.2.1"
s.summary = "A simple, blog aware, static site generator."
s.installed_by_version = "2.5.2.1" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 2
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<addressable>, ["~> 2.4"])
s.add_runtime_dependency(%q<colorator>, ["~> 1.0"])
s.add_runtime_dependency(%q<jekyll-sass-converter>, ["~> 1.0"])
s.add_runtime_dependency(%q<jekyll-watch>, ["~> 1.1"])
s.add_runtime_dependency(%q<kramdown>, ["~> 1.14"])
s.add_runtime_dependency(%q<liquid>, ["~> 4.0"])
s.add_runtime_dependency(%q<mercenary>, ["~> 0.3.3"])
s.add_runtime_dependency(%q<pathutil>, ["~> 0.9"])
s.add_runtime_dependency(%q<rouge>, ["< 3", ">= 1.7"])
s.add_runtime_dependency(%q<safe_yaml>, ["~> 1.0"])
else
s.add_dependency(%q<addressable>, ["~> 2.4"])
s.add_dependency(%q<colorator>, ["~> 1.0"])
s.add_dependency(%q<jekyll-sass-converter>, ["~> 1.0"])
s.add_dependency(%q<jekyll-watch>, ["~> 1.1"])
s.add_dependency(%q<kramdown>, ["~> 1.14"])
s.add_dependency(%q<liquid>, ["~> 4.0"])
s.add_dependency(%q<mercenary>, ["~> 0.3.3"])
s.add_dependency(%q<pathutil>, ["~> 0.9"])
s.add_dependency(%q<rouge>, ["< 3", ">= 1.7"])
s.add_dependency(%q<safe_yaml>, ["~> 1.0"])
end
else
s.add_dependency(%q<addressable>, ["~> 2.4"])
s.add_dependency(%q<colorator>, ["~> 1.0"])
s.add_dependency(%q<jekyll-sass-converter>, ["~> 1.0"])
s.add_dependency(%q<jekyll-watch>, ["~> 1.1"])
s.add_dependency(%q<kramdown>, ["~> 1.14"])
s.add_dependency(%q<liquid>, ["~> 4.0"])
s.add_dependency(%q<mercenary>, ["~> 0.3.3"])
s.add_dependency(%q<pathutil>, ["~> 0.9"])
s.add_dependency(%q<rouge>, ["< 3", ">= 1.7"])
s.add_dependency(%q<safe_yaml>, ["~> 1.0"])
end
end
| 41.424242 | 105 | 0.607169 |
edadf4b5dc6d6fc221b10f96c6bdd4a5dc560d3e | 2,123 | require 'spec_helper'
resource 'JobTask' do
before do
log_in users(:owner)
end
post "/workspaces/:workspace_id/jobs/:job_id/job_tasks" do
parameter :job_id, "Job ID"
parameter :workspace_id, "Workspace ID"
parameter :action, "Task Type"
parameter :source_id, "Source Table ID"
parameter :destination_name, "Destination Table Name"
parameter :truncate, "Truncate destination table?"
parameter :row_limit, "Row limit"
required_parameters :action, :job_id, :workspace_id
scope_parameters :job_task, [:action, :source_id, :destination_name, :truncate, :row_limit]
let(:workspace_id) { jobs(:default).workspace.id }
let(:job_id) { jobs(:default).id }
let(:action) { 'import_source_data' }
let(:source_id) { datasets(:table).id }
let(:destination_name) { 'create_me' }
let(:truncate) { 'false' }
let(:row_limit) { '1000' }
example_request "Create a Job Task in a job in a workspace" do
status.should == 201
end
end
delete "/workspaces/:workspace_id/jobs/:job_id/job_tasks/:id" do
parameter :name, "Name"
parameter :job_id, "Job ID"
parameter :id, "Job Task ID"
let(:workspace_id) { jobs(:default).workspace.id }
let(:job_id) { jobs(:default).id }
let(:id) { jobs(:default).job_tasks.first.id }
example_request "Delete a Job Task in a job in a workspace" do
status.should == 200
end
end
put "/workspaces/:workspace_id/jobs/:job_id/job_tasks/:id" do
parameter :job_id, "Job ID"
parameter :workspace_id, "Workspace ID"
parameter :id, "Task ID"
required_parameters :id, :job_id, :workspace_id
let(:workspace_id) { jobs(:default).workspace.id }
let(:job_id) { jobs(:default).id }
let(:id) { jobs(:default).job_tasks.first.id }
example_request "Update an existing task in a workspace" do
status.should == 200
end
end
put "/job_tasks/:id" do
parameter :id, "Task ID"
required_parameters :id
let(:id) { jobs(:default).job_tasks.first.id }
example_request "Update an existing task" do
status.should == 200
end
end
end | 29.901408 | 95 | 0.667923 |
79a936c3c3aa597f8986b100b1283b70e709a156 | 1,031 |
Pod::Spec.new do |s|
s.name = "KCUserKit"
s.version = "0.1.85"
s.summary = "A short description of KCUserKit."
s.license = 'MIT'
s.author = { "Emil Wojtaszek" => "[email protected]" }
s.source = { :git => "[email protected]:newmedia/kingschat-user-ios.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'KCUserKit/*.{h,m}'
s.homepage = 'https://www.appunite.com'
# kingschat
s.dependency 'KCEnvironmentKit'
s.dependency 'KCSStorageClient/Core'
s.dependency 'KCCountriesKit'
# networking
s.dependency 'AFNetworking', '< 3.0'
s.dependency 'AFgzipRequestSerializer'
# phone validation
s.dependency 'APAddressBook', '~> 0.2.2'
s.dependency 'libPhoneNumber-iOS', '~> 0.8'
# storage
s.dependency 'ObjectiveLevelDBappunite', '~> 2.1'
s.dependency 'Mantle', '~> 2.0'
s.dependency 'FastCoding', '~> 3.2'
# others
s.dependency 'Valet', '~> 2.0'
end
| 29.457143 | 113 | 0.600388 |
1d38c53b94c69bffa17093ae5ce95cd802617df4 | 606 | module Fog
module ContainerInfra
class Telefonica
class Real
def create_cluster(params)
request(
:expects => [202, 201, 200],
:method => 'POST',
:path => "clusters",
:body => Fog::JSON.encode(params)
)
end
end
class Mock
def create_cluster(_params)
response = Excon::Response.new
response.status = 202
response.body = {
"uuid" => "746e779a-751a-456b-a3e9-c883d734946f"
}
response
end
end
end
end
end
| 21.642857 | 60 | 0.486799 |
182f9096e17f038f0d283af4b34a3753e7b89429 | 140 | require 'test_helper'
class UserlistControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 17.5 | 62 | 0.742857 |
91c5d880e87d60623d95e73b2f161f66da6e64f6 | 443 | # InSpec test for recipe linux_patching::default
# The InSpec reference, with examples and extensive documentation, can be
# found at https://docs.chef.io/inspec/resources/
unless os.windows?
# This is an example test, replace with your own test.
describe user('root'), :skip do
it { should exist }
end
end
# This is an example test, replace it with your own test.
describe port(80), :skip do
it { should_not be_listening }
end
| 26.058824 | 73 | 0.731377 |
e8cb5b63053ccc55350e1a505f4fdb6a48611259 | 1,910 | require 'spec_helper'
require 'vigilem/x11/display'
describe Vigilem::X11::Display do
after(:each) do
[described_class].each do |klass|
(klass.instance_variables - [:@layout]).each do |ivar|
klass.send(:remove_instance_variable, ivar)
end
end
end
it 'is a subclass of Xlib::Display' do
expect(described_class).to be < Xlib::Display
end
it 'has the same structure as Xlib::Display' do
expect(described_class.layout).to eql(Xlib::Display.layout)
end
describe '#initialize' do
it 'takes a pointer and creates a display from it' do
expect do
described_class.new(Xlib.XOpenDisplay(ENV['DISPLAY']))
end.not_to raise_error and be_a described_class
end
it 'takes a display name and creates a Display' do
expect do
described_class.new(ENV['DISPLAY'])
end.not_to raise_error and be_a described_class
end
end
context 'post_init' do
subject { described_class.new(ENV['DISPLAY']) }
describe '#fileno' do
it 'returns the fileno of the associated IO' do
expect(subject.fileno).to be_an Integer
end
end
describe '#to_io' do
it 'returns the display as IO' do
expect(subject.to_io).to be_a IO
end
end
end
describe '::wrap' do
it 'returns a new display object' do
expect { described_class.wrap(ENV['DISPLAY']) }.not_to raise_error and be_a Display
end
it 'takes a pointer and creates a display from it' do
expect { described_class.wrap(Xlib.XOpenDisplay(ENV['DISPLAY'])) }.not_to raise_error and be_a Display
end
it 'takes a Display object and returns the same Display object' do
dpy = described_class.wrap(ENV['DISPLAY'])
expect { described_class.wrap(dpy) }.not_to raise_error and eql(dpy)
end
end
#describe '::open' do
#
#end
end
| 26.164384 | 108 | 0.653927 |
e8841ed4a95e036d44b133c93666ef7f56fea47d | 1,360 | require 'rails_helper'
RSpec.describe AssetPolicy, type: :policy do
let(:school_user) { create(:school_user) }
let(:rb_user) { create(:local_authority_user) }
let(:support_user) { create(:support_user) }
let(:cc_user) { create(:computacenter_user) }
subject(:policy) { described_class }
describe 'Scope' do
specify { expect { Pundit.policy_scope!(nil, Asset) }.to raise_error /must be logged in/ }
specify { expect(Pundit.policy_scope!(cc_user, Asset)).to eq(Asset.none) }
specify { expect(Pundit.policy_scope!(school_user, Asset)).to eq(Asset.all) }
specify { expect(Pundit.policy_scope!(rb_user, Asset)).to eq(Asset.all) }
specify { expect(Pundit.policy_scope!(support_user, Asset)).to eq(Asset.all) }
end
permissions :show? do
specify { expect(policy).not_to permit(cc_user, Asset.new) }
specify { expect(policy).to permit(school_user, Asset.new) }
specify { expect(policy).to permit(rb_user, Asset.new) }
specify { expect(policy).to permit(support_user, Asset.new) }
end
permissions :create?, :update?, :destroy? do
specify { expect(policy).not_to permit(cc_user, Asset.new) }
specify { expect(policy).not_to permit(school_user, Asset.new) }
specify { expect(policy).not_to permit(rb_user, Asset.new) }
specify { expect(policy).not_to permit(support_user, Asset.new) }
end
end
| 41.212121 | 94 | 0.708088 |
216052b8894d696776cafd7a314a354943dfe5a5 | 1,054 | class EmailPreference < ActiveRecord::Base
belongs_to :user
belongs_to :category, class_name: :EmailCategory, foreign_key: :email_category_id
validates :user_id, :email_category_id, presence: true
validates :subscribed, inclusion: { in: [true, false] }
include CustomTimestampAttributesForUpdate
before_save :track_subscription_change
after_commit :sync_mailchimp_subscription_status_async
def self.for_category category
where(email_category_id: EmailPreferencesService.category_id(category))
end
private
def track_subscription_change
if subscribed_changed? && !subscription_changed_at_changed?
@custom_timestamp_attributes_for_update = [:subscription_changed_at]
end
end
def sync_mailchimp_subscription_status_async
return unless EmailPreferencesService.enable_mailchimp_callback?
return unless previous_changes.keys.include? 'subscribed'
return unless category.name.in? ['newsletter']
AsyncService.new(EmailPreferencesService)
.sync_mailchimp_subscription_status(self)
end
end
| 31.939394 | 83 | 0.805503 |
3898dc0c7e388339ec77962dc1f390c038ebcb1a | 1,242 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v8/enums/search_term_targeting_status.proto
require 'google/api/annotations_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v8/enums/search_term_targeting_status.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v8.enums.SearchTermTargetingStatusEnum" do
end
add_enum "google.ads.googleads.v8.enums.SearchTermTargetingStatusEnum.SearchTermTargetingStatus" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :ADDED, 2
value :EXCLUDED, 3
value :ADDED_EXCLUDED, 4
value :NONE, 5
end
end
end
module Google
module Ads
module GoogleAds
module V8
module Enums
SearchTermTargetingStatusEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v8.enums.SearchTermTargetingStatusEnum").msgclass
SearchTermTargetingStatusEnum::SearchTermTargetingStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v8.enums.SearchTermTargetingStatusEnum.SearchTermTargetingStatus").enummodule
end
end
end
end
end
| 36.529412 | 225 | 0.758454 |
d5bdd94f543520ce55d454fea230555674f32eb8 | 1,251 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'amap_map_fluttify'
s.version = '0.0.1'
s.summary = 'An `Amap` map component, based on `fluttify` engine.'
s.description = <<-DESC
A new flutter plugin project.
DESC
s.homepage = 'https://github.com/fluttify-project/amap_map_fluttify'
s.license = { :file => '../LICENSE' }
s.author = { 'yohom' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.dependency 'foundation_fluttify'
# flutter插件依赖
s.dependency 'amap_core_fluttify'
s.ios.deployment_target = '8.0'
# 包含工程下的framework
s.vendored_frameworks = '**/*.framework'
# 包含工程下的.a
s.vendored_libraries = '**/*.a'
# 默认使用静态framework
# s.static_framework = true
# 系统framework
s.frameworks = [
"QuartzCore", "CoreLocation", "SystemConfiguration", "CoreTelephony", "Security", "OpenGLES", "CoreText", "CoreGraphics", "GLKit"
]
# 系统library
s.libraries = [
"z", "c++"
]
# 需要添加的资源文件
s.resources = '*.framework/*.bundle'
end
| 30.512195 | 137 | 0.603517 |
f83c9b8f5b224ed501ed3b3acaedf8359c38fede | 1,646 | module Dapp
module Deployment
module Config
module Directive
class Expose < Base
attr_reader :_port
attr_reader :_type
def initialize(dapp:)
@_port = []
@_type = 'ClusterIP'
super
end
def cluster_ip
sub_directive_eval { @_type = 'ClusterIP' }
end
def load_balancer
sub_directive_eval { @_type = 'LoadBalancer' }
end
def node_port
sub_directive_eval { @_type = 'NodePort' }
end
def port(number, &blk)
sub_directive_eval { @_port << Port.new(number, dapp: dapp, &blk) }
end
class Port < Base
attr_reader :_number, :_target, :_protocol
def initialize(number, dapp:, &blk)
self._number = number
@_protocol = 'TCP'
super(dapp: dapp, &blk)
end
def target(number)
@_target = define_number(number, :unsupported_target_number)
end
def tcp
@_protocol = 'TCP'
end
def udp
@_protocol = 'UDP'
end
def _number=(number)
@_number = define_number(number, :unsupported_port_number)
end
protected
def define_number(number, code)
number.to_i.tap do |n|
raise ::Dapp::Error::Config, code: code, data: { number: number } unless (0..65536).cover?(n)
end
end
end
end
end
end
end
end
| 23.855072 | 109 | 0.482989 |
e83d54890a342cca824c013165d203c4e6eed50f | 5,171 | #
# Tests, setup, and teardown common to the application and plugin generator suites.
#
module SharedGeneratorTests
def setup
Rails.application = TestApp::Application
super
Rails::Generators::AppGenerator.instance_variable_set('@desc', nil)
Kernel::silence_warnings do
Thor::Base.shell.send(:attr_accessor, :always_force)
@shell = Thor::Base.shell.new
@shell.send(:always_force=, true)
end
end
def teardown
super
Rails::Generators::AppGenerator.instance_variable_set('@desc', nil)
Rails.application = TestApp::Application.instance
end
def test_skeleton_is_created
run_generator
default_files.each { |path| assert_file path }
end
def assert_generates_with_bundler(options = {})
generator([destination_root], options)
command_check = -> command do
@install_called ||= 0
case command
when 'install'
@install_called += 1
assert_equal 1, @install_called, "install expected to be called once, but was called #{@install_called} times"
when 'exec spring binstub --all'
# Called when running tests with spring, let through unscathed.
end
end
generator.stub :bundle_command, command_check do
quietly { generator.invoke_all }
end
end
def test_generation_runs_bundle_install
assert_generates_with_bundler
end
def test_plugin_new_generate_pretend
run_generator ["testapp", "--pretend"]
default_files.each{ |path| assert_no_file File.join("testapp",path) }
end
def test_invalid_database_option_raises_an_error
content = capture(:stderr){ run_generator([destination_root, "-d", "unknown"]) }
assert_match(/Invalid value for \-\-database option/, content)
end
def test_test_files_are_skipped_if_required
run_generator [destination_root, "--skip-test"]
assert_no_file "test"
end
def test_name_collision_raises_an_error
reserved_words = %w[application destroy plugin runner test]
reserved_words.each do |reserved|
content = capture(:stderr){ run_generator [File.join(destination_root, reserved)] }
assert_match(/Invalid \w+ name #{reserved}. Please give a name which does not match one of the reserved rails words: application, destroy, plugin, runner, test\n/, content)
end
end
def test_name_raises_an_error_if_name_already_used_constant
%w{ String Hash Class Module Set Symbol }.each do |ruby_class|
content = capture(:stderr){ run_generator [File.join(destination_root, ruby_class)] }
assert_match(/Invalid \w+ name #{ruby_class}, constant #{ruby_class} is already in use. Please choose another \w+ name.\n/, content)
end
end
def test_shebang_is_added_to_rails_file
run_generator [destination_root, "--ruby", "foo/bar/baz", "--full"]
assert_file "bin/rails", /#!foo\/bar\/baz/
end
def test_shebang_when_is_the_same_as_default_use_env
run_generator [destination_root, "--ruby", Thor::Util.ruby_command, "--full"]
assert_file "bin/rails", /#!\/usr\/bin\/env/
end
def test_template_raises_an_error_with_invalid_path
quietly do
content = capture(:stderr){ run_generator([destination_root, "-m", "non/existent/path"]) }
assert_match(/The template \[.*\] could not be loaded/, content)
assert_match(/non\/existent\/path/, content)
end
end
def test_template_is_executed_when_supplied_an_https_path
path = "https://gist.github.com/josevalim/103208/raw/"
template = %{ say "It works!" }
template.instance_eval "def read; self; end" # Make the string respond to read
check_open = -> *args do
assert_equal [ path, 'Accept' => 'application/x-thor-template' ], args
template
end
generator([destination_root], template: path).stub(:open, check_open, template) do
quietly { assert_match(/It works!/, capture(:stdout) { generator.invoke_all }) }
end
end
def test_dev_option
assert_generates_with_bundler dev: true
rails_path = File.expand_path('../../..', Rails.root)
assert_file 'Gemfile', /^gem\s+["']rails["'],\s+path:\s+["']#{Regexp.escape(rails_path)}["']$/
end
def test_edge_option
assert_generates_with_bundler edge: true
assert_file 'Gemfile', %r{^gem\s+["']rails["'],\s+github:\s+["']#{Regexp.escape("rails/rails")}["']$}
end
def test_skip_gemfile
assert_not_called(generator([destination_root], skip_gemfile: true), :bundle_command) do
quietly { generator.invoke_all }
assert_no_file 'Gemfile'
end
end
def test_skip_bundle
assert_not_called(generator([destination_root], skip_bundle: true), :bundle_command) do
quietly { generator.invoke_all }
# skip_bundle is only about running bundle install, ensure the Gemfile is still
# generated.
assert_file 'Gemfile'
end
end
def test_skip_git
run_generator [destination_root, '--skip-git', '--full']
assert_no_file('.gitignore')
end
def test_skip_keeps
run_generator [destination_root, '--skip-keeps', '--full']
assert_file '.gitignore' do |content|
assert_no_match(/\.keep/, content)
end
assert_no_file('app/models/concerns/.keep')
end
end
| 32.522013 | 178 | 0.704893 |
edda5e951421e86338682970559b721799f5c9d0 | 2,409 | module Enjoy::News
module Controllers
module Categories
extend ActiveSupport::Concern
def index
@categories = category_class.enabled.sorted.to_a
@root_catalog = category_class.enabled.roots.sorted.all.to_a
after_initialize
end
def show
@category = category_class.enabled.find(params[:id])
if [email protected]_slug.blank? and @category.text_slug != params[:id]
redirect_to @category, status_code: 301
return
end
@seo_parent_page = find_seo_page(url_for(action: :index))
@children = @category.children.enabled.sorted.all.to_a
@news = @category.news.enabled.sorted.all.to_a
after_initialize
end
def page_title
if @category
@category.page_title
else
super
end
end
private
def category_class
Enjoy::News::Category
end
def news_class
Enjoy::News::News
end
def after_initialize
end
# def index_crumbs
# if @seo_parent_page
# catalog_title = Settings.ns('breadcrumbs').catalog_title(default: "Каталог", label: "'Каталог' в breadcrumbs")
# _crumb = catalog_title
# _crumb = @seo_parent_page.name if _crumb.blank?
# _crumb = @seo_parent_page.title if _crumb.blank?
# _crumb = @seo_parent_page.h1 if _crumb.blank?
# add_crumb _crumb, @seo_parent_page.fullpath
# else
# catalog_title = Settings.ns('breadcrumbs').catalog_title(default: "Каталог", label: "'Каталог' в breadcrumbs")
# _crumb = catalog_title
# add_crumb _crumb, item_categories_path
# end
# end
#
# def category_crumbs
# if @item_category
# _parent = @item_category.parent
# if _parent
# _crumb = _parent.name if _crumb.blank?
# _crumb = _parent.title if _crumb.blank?
# _crumb = _parent.h1 if _crumb.blank?
# add_crumb _crumb, item_category_path(_parent)
# _crumb = nil
# end
# _crumb = @item_category.name if _crumb.blank?
# _crumb = @item_category.title if _crumb.blank?
# _crumb = @item_category.h1 if _crumb.blank?
# add_crumb _crumb, item_category_path(@item_category)
# end
# end
end
end
end
| 30.1125 | 124 | 0.599419 |
61dbcc92a1b6d1598d0efc8299f8af6d4eef46d0 | 4,120 | module Geokit
module Geocoders
# Open Street Map geocoder implementation.
class OSMGeocoder < Geocoder
private
# Template method which does the geocode lookup.
def self.do_geocode(address, options = {})
options_str = generate_bool_param_for_option(:polygon, options)
options_str << generate_param_for_option(:json_callback, options)
options_str << generate_param_for_option(:countrycodes, options)
options_str << generate_param_for_option(:viewbox, options)
address_str = address.is_a?(GeoLoc) ? address.to_geocodeable_s : address
url = "http://nominatim.openstreetmap.org/search?format=json#{options_str}&addressdetails=1&q=#{Geokit::Inflector::url_escape(address_str)}"
process :json, url
end
def self.do_reverse_geocode(latlng, options = {})
latlng = LatLng.normalize(latlng)
options_str = generate_param_for(:lat, latlng.lat)
options_str << generate_param_for(:lon, latlng.lng)
options_str << generate_param_for_option(:zoom, options)
options_str << generate_param_for_option(:osm_type, options)
options_str << generate_param_for_option(:osm_id, options)
options_str << generate_param_for_option(:json_callback, options)
url = "http://nominatim.openstreetmap.org/reverse?format=json&addressdetails=1#{options_str}"
process :json, url
end
def self.generate_param_for(param, value)
"&#{param}=#{Geokit::Inflector::url_escape(value.to_s)}"
end
def self.generate_param_for_option(param, options)
options[param] ? "&#{param}=#{Geokit::Inflector::url_escape(options[param])}" : ''
end
def self.generate_bool_param_for_option(param, options)
options[param] ? "&#{param}=1" : "&#{param}=0"
end
def self.parse_json(results)
if results.is_a?(Hash)
return GeoLoc.new if results['error']
results = [results]
end
return GeoLoc.new if results.empty?
loc = nil
results.each do |result|
extract_geoloc = extract_geoloc(result)
if loc.nil?
loc = extract_geoloc
else
loc.all.push(extract_geoloc)
end
end
loc
end
def self.extract_geoloc(result_json)
loc = new_loc
# basic
loc.lat = result_json['lat']
loc.lng = result_json['lon']
set_address_components(result_json['address'], loc)
set_precision(result_json, loc)
set_bounds(result_json['boundingbox'], loc)
loc.success = true
loc
end
def self.set_address_components(address_data, loc)
return unless address_data
loc.country = address_data['country']
loc.country_code = address_data['country_code'].upcase if address_data['country_code']
loc.state_name = address_data['state']
loc.city = address_data['city']
loc.city = address_data['county'] if loc.city.nil? && address_data['county']
loc.zip = address_data['postcode']
loc.district = address_data['city_district']
loc.district = address_data['state_district'] if loc.district.nil? && address_data['state_district']
loc.street_address = "#{address_data['road']} #{address_data['house_number']}".strip if address_data['road']
loc.street_name = address_data['road']
loc.street_number = address_data['house_number']
end
def self.set_precision(result_json, loc)
# Todo accuracy does not work as Yahoo and Google maps on OSM
#loc.accuracy = %w{unknown amenity building highway historic landuse leisure natural place railway shop tourism waterway man_made}.index(loc.precision)
loc.precision = result_json['class']
loc.accuracy = result_json['type']
end
def self.set_bounds(result_json, loc)
return unless result_json
loc.suggested_bounds = Bounds.normalize(
[result_json[0], result_json[1]],
[result_json[2], result_json[3]])
end
end
end
end
| 37.454545 | 159 | 0.654369 |
bb49e162918921e0b757efba54a03bb90824f3cf | 1,987 | #!/usr/bin/env ruby
require 'open-uri'
require 'tracker_api'
require 'pry'
require 'yaml'
class CategorizeSecurityNotices
attr_reader :stories
def initialize(tracker_client, stories_file, stack_receipt, stack)
ref = JSON.parse(File.read(stories_file))
@tracker_client = tracker_client
@stories = JSON.parse(ref['version']['ref'])
@receipt = File.read(stack_receipt)
@stack = stack
end
def run
stories.each do |story|
packages = get_story_packages(story)
if affected?(packages)
label_story(story, "affected")
zero_point_story(story['id'])
start_story(story['id'])
else
label_story(story, "unaffected")
zero_point_story(story['id'])
deliver_story(story['id'])
end
end
end
private
def get_story_packages(story)
exp =
case @stack
when 'cflinuxfs2'
Regexp.new('\*\*14.04 Packages:\*\*\n(.*?)((\n\*\*.*Packages)|\Z)', Regexp::MULTILINE)
when 'cflinuxfs3'
Regexp.new('\*\*18.04 Packages:\*\*\n(.*?)\Z', Regexp::MULTILINE)
else
raise "Unsupported stack: #{stack}"
end
package_list = exp.match(story['description'])[1].split("\n")
package_list.map { |package| package.lstrip }
end
def affected?(packages)
packages.each do |package|
package_name = package.split(" ").first
exp = Regexp.new("^\\w+\\s+" + Regexp.escape(package_name) + ":?\\S*\\s+")
return true if exp.match(@receipt)
end
false
end
def label_story(story, label)
@tracker_client.add_label_to_story(story: story, label: label)
end
def zero_point_story(story_id)
@tracker_client.point_story(story_id: story_id, estimate: 0)
end
def deliver_story(story_id)
@tracker_client.change_story_state(story_id: story_id, current_state: "delivered")
end
def start_story(story_id)
@tracker_client.change_story_state(story_id: story_id, current_state: "started")
end
end
| 25.805195 | 96 | 0.650226 |
f7680daa65ebc6c0a8093f66aaf988ffb7a36e1b | 776 | require 'fog/core/model'
module Fog
module Compute
class XenServer
class PBD < Fog::Model
# API Reference here:
# http://docs.vmd.citrix.com/XenServer/6.2.0/1.0/en_gb/api/?c=PBD
identity :reference
attribute :uuid
attribute :__host, :aliases => :host
attribute :__sr, :aliases => :SR
attribute :currently_attached
attribute :device_config
attribute :other_config
def sr
service.storage_repositories.get __sr
end
def storage_repository
sr
end
def host
service.hosts.get __host
end
def unplug
service.unplug_pbd reference
end
end
end
end
end
| 18.926829 | 73 | 0.554124 |
33daa07dd8018a59cffd79b5ec2ba7d97009ac45 | 1,353 | #snippet-comment:[These are tags for the AWS doc team's sample catalog. Do not remove.]
#snippet-sourceauthor:[Doug-AWS]
#snippet-sourcedescription:[Encrypts a string.]
#snippet-keyword:[AWS Key Management Service]
#snippet-keyword:[encrypt method]
#snippet-keyword:[Ruby]
#snippet-service:[kms]
#snippet-sourcetype:[full-example]
#snippet-sourcedate:[2018-03-16]
# Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This file is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. A copy of the
# License is located at
#
# http://aws.amazon.com/apache2.0/
#
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
require 'aws-sdk-kms' # v2: require 'aws-sdk'
# ARN of the customer master key (CMK).
#
# Replace the fictitious key ARN with a valid key ID
keyId = 'arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab'
text = '1234567890'
client = Aws::KMS::Client.new(region: 'us-west-2')
resp = client.encrypt({
key_id: keyId,
plaintext: text,
})
puts 'Blob:'
puts resp.ciphertext_blob.unpack('H*')
| 33 | 88 | 0.724316 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.