hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
ed26a0f265357fc272559a689773968e3e4c1a4f | 324 | Whodat::Engine.routes.draw do
get 'users/index'
get 'users/new'
get 'users/create'
resources :users, only: [:new, :create]
resources :sessions, only: [:new, :create, :destroy]
root to: 'dashboard#index'
#root 'dashboard#index'
get 'dashboard/index' => 'dashboard#index', as: :dashboard
end
| 20.25 | 60 | 0.645062 |
91da7aed9934c07a1defe08f73da31eafe0bd759 | 2,846 | include System
include System::Windows
include System::Windows::Browser
include System::Windows::Controls
$DEBUG = false
class SilverlightApplication
def document
HtmlPage.document
end
def application
Application.current
end
def self.use_xaml(options = {})
options = {:type => UserControl, :name => "app"}.merge(options)
Application.current.load_root_visual(options[:type].new, "#{options[:name]}.xaml")
end
def root
application.root_visual
end
def puts(msg)
if document.debug_print.nil?
div = document.create_element('div')
div[:id] = "debug_print"
document.get_elements_by_tag_name("body").get_Item(0).append_child(div)
end
document.debug_print[:innerHTML] = "#{document.debug_print.innerHTML}<hr />#{msg}"
end
def debug_puts(msg)
puts(msg) if $DEBUG
end
def method_missing(m)
root.send(m)
end
end
class HtmlDocument
def method_missing(m)
get_element_by_id(m)
end
alias_method :orig_get_element_by_id, :get_element_by_id
def get_element_by_id(id)
orig_get_element_by_id(id.to_s.to_clr_string)
end
end
class HtmlElement
def [](index)
a = get_attribute(index)
return get_property(index) if a.nil?
return a
end
def []=(index, value)
set_property(index, value)
rescue
begin
set_attribute(index, value)
rescue => e
raise e
end
end
def method_missing(m, &block)
if(block.nil?)
self[m]
else
attach_event(m.to_s.to_clr_string, System::EventHandler.new(&block))
end
end
def style
HtmlStyle.new(self)
end
alias_method :orig_get_attribute, :get_attribute
def get_attribute(index)
orig_get_attribute(index.to_s.to_clr_string)
end
alias_method :orig_set_attribute, :set_attribute
def set_attribute(index, value)
orig_set_attribute(index.to_s.to_clr_string, value)
end
alias_method :orig_get_property, :get_property
def get_property(index)
orig_get_property(index.to_s.to_clr_string)
end
alias_method :orig_set_property, :set_property
def set_property(index, value)
orig_set_property(index.to_s.to_clr_string, value)
end
alias_method :orig_get_style_attribute, :get_style_attribute
def get_style_attribute(index)
orig_get_style_attribute(index.to_s.to_clr_string)
end
alias_method :orig_set_style_attribute, :set_style_attribute
def set_style_attribute(index, value)
orig_set_style_attribute(index.to_s.to_clr_string, value)
end
end
class HtmlStyle
def initialize(element)
@element = element
end
def [](index)
@element.get_style_attribute(index)
end
def []=(index, value)
@element.set_style_attribute(index, value)
end
def method_missing(m)
self[m]
end
end
class FrameworkElement
def method_missing(m)
find_name(m.to_s.to_clr_string)
end
end
| 20.623188 | 86 | 0.723823 |
619b87edf1da15e9c5c2215e1b5e937d56832a1c | 1,005 | require "language/node"
class Truffle < Formula
desc "Development environment, testing framework and asset pipeline for Ethereum"
homepage "https://trufflesuite.com"
url "https://registry.npmjs.org/truffle/-/truffle-5.2.2.tgz"
sha256 "f3be4bf340d9eb2be6b02e1a24f4fb4b99a41e8ad453bd36aee9070a96ca3adb"
license "MIT"
bottle do
sha256 arm64_big_sur: "e8006dd9dd1ce5c17ad796d45c8ff1498bb180be16fcc72988deaebeb523173f"
sha256 big_sur: "682effbb45a19c3ba3a1e8ff6f58e87afa253ae31a1a476503f3a8c810deb111"
sha256 catalina: "60317fe8bca44644e33fbedc93aff6fc3a7aa47ef85f09b6a4c3167c4b8b145d"
sha256 mojave: "64d8c8e4fa60fb426c3147df5c9cda8a9d54a69caaa5a68b13c4deacb08d5fc2"
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
system bin/"truffle", "init"
system bin/"truffle", "compile"
system bin/"truffle", "test"
end
end
| 33.5 | 92 | 0.768159 |
61587d45a217d395f8a530906e01940b138afaf7 | 122 | MRuby::Build.new do |conf|
toolchain :gcc
conf.gembox 'default'
conf.gem '../haconiwa-stats'
conf.enable_test
end
| 17.428571 | 30 | 0.704918 |
03da29c4c686930c5020b6fac27400b204a485aa | 655 | Gitlab::Seeder.quiet do
20.times do |i|
begin
User.create!(
username: FFaker::Internet.user_name,
name: FFaker::Name.name,
email: FFaker::Internet.email,
confirmed_at: DateTime.now,
password: '12345678'
)
print '.'
rescue ActiveRecord::RecordInvalid
print 'F'
end
end
5.times do |i|
begin
User.create!(
username: "user#{i}",
name: "User #{i}",
email: "user#{i}@example.com",
confirmed_at: DateTime.now,
password: '12345678'
)
print '.'
rescue ActiveRecord::RecordInvalid
print 'F'
end
end
end
| 19.848485 | 45 | 0.549618 |
ab42bd88118a418195007186984b66d08cbbfcce | 729 | Pod::Spec.new do |s|
s.name = 'AWSSageMakerRuntime'
s.version = '2.12.8'
s.summary = 'Amazon Web Services SDK for iOS.'
s.description = 'The AWS SDK for iOS provides a library, code samples, and documentation for developers to build connected mobile applications using AWS.'
s.homepage = 'http://aws.amazon.com/mobile/sdk'
s.license = 'Apache License, Version 2.0'
s.author = { 'Amazon Web Services' => 'amazonwebservices' }
s.platform = :ios, '8.0'
s.source = { :git => 'https://github.com/aws/aws-sdk-ios.git',
:tag => s.version}
s.requires_arc = true
s.dependency 'AWSCore', '2.12.8'
s.source_files = 'AWSSageMakerRuntime/*.{h,m}'
end
| 40.5 | 157 | 0.621399 |
387a580230f21e51d578a94605ebc9c674e014cb | 645 | class Jmxterm < Formula
desc "Open source, command-line based interactive JMX client"
homepage "https://docs.cyclopsgroup.org/jmxterm"
url "https://github.com/jiaqi/jmxterm/releases/download/v1.0.1/jmxterm-1.0.1-uber.jar"
sha256 "76e0dae56b410c77724b561897e9073f088bd0b6158d668147d466debac6e9b0"
bottle :unneeded
depends_on :java => "1.8"
def install
libexec.install "jmxterm-#{version}-uber.jar"
bin.write_jar_script libexec/"jmxterm-#{version}-uber.jar", "jmxterm", "", :java_version => "1.8"
end
test do
assert_match(/"software\.name".=."jmxterm";/, shell_output("echo about | #{bin}/jmxterm -n"))
end
end
| 32.25 | 101 | 0.72093 |
ac9c1c933448ff2c8087fd4dd369bf25c45b2245 | 3,300 | class AddHistoryToNfsStoreFilters < ActiveRecord::Migration
def change
#
reversible do |dir|
dir.up do
execute <<EOF
BEGIN;
-- Command line:
-- table_generators/generate.sh admin_history_table create nfs_store_filters app_type_id role_name user_id resource_name filter description
CREATE OR REPLACE FUNCTION log_nfs_store_filter_update() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO nfs_store_filter_history
(
app_type_id,
role_name,
user_id,
resource_name,
filter,
description,
admin_id,
disabled,
created_at,
updated_at,
nfs_store_filter_id
)
SELECT
NEW.app_type_id,
NEW.role_name,
NEW.user_id,
NEW.resource_name,
NEW.filter,
NEW.description,
NEW.admin_id,
NEW.disabled,
NEW.created_at,
NEW.updated_at,
NEW.id
;
RETURN NEW;
END;
$$;
CREATE TABLE nfs_store_filter_history (
id integer NOT NULL,
app_type_id bigint,
role_name varchar,
user_id bigint,
resource_name varchar,
filter varchar,
description varchar,
admin_id integer,
disabled boolean,
created_at timestamp without time zone,
updated_at timestamp without time zone,
nfs_store_filter_id integer
);
CREATE SEQUENCE nfs_store_filter_history_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE nfs_store_filter_history_id_seq OWNED BY nfs_store_filter_history.id;
ALTER TABLE ONLY nfs_store_filter_history ALTER COLUMN id SET DEFAULT nextval('nfs_store_filter_history_id_seq'::regclass);
ALTER TABLE ONLY nfs_store_filter_history
ADD CONSTRAINT nfs_store_filter_history_pkey PRIMARY KEY (id);
CREATE INDEX index_nfs_store_filter_history_on_nfs_store_filter_id ON nfs_store_filter_history USING btree (nfs_store_filter_id);
CREATE INDEX index_nfs_store_filter_history_on_admin_id ON nfs_store_filter_history USING btree (admin_id);
CREATE TRIGGER nfs_store_filter_history_insert AFTER INSERT ON nfs_store_filters FOR EACH ROW EXECUTE PROCEDURE log_nfs_store_filter_update();
CREATE TRIGGER nfs_store_filter_history_update AFTER UPDATE ON nfs_store_filters FOR EACH ROW WHEN ((old.* IS DISTINCT FROM new.*)) EXECUTE PROCEDURE log_nfs_store_filter_update();
ALTER TABLE ONLY nfs_store_filter_history
ADD CONSTRAINT fk_nfs_store_filter_history_admins FOREIGN KEY (admin_id) REFERENCES admins(id);
ALTER TABLE ONLY nfs_store_filter_history
ADD CONSTRAINT fk_nfs_store_filter_history_nfs_store_filters FOREIGN KEY (nfs_store_filter_id) REFERENCES nfs_store_filters(id);
GRANT SELECT,INSERT,UPDATE,DELETE ON ALL TABLES IN SCHEMA ml_app TO fphs;
GRANT USAGE ON ALL SEQUENCES IN SCHEMA ml_app TO fphs;
GRANT SELECT ON ALL SEQUENCES IN SCHEMA ml_app TO fphs;
COMMIT;
EOF
end
dir.down do
execute <<EOF
DROP TABLE if exists nfs_store_filter_history CASCADE;
DROP FUNCTION if exists log_nfs_store_filter_update() CASCADE;
EOF
end
end
end
end
| 28.448276 | 180 | 0.70303 |
e89d06767562bce37c5fc045ce778bf5b7d21af5 | 211 | class CreateForemPosts < ActiveRecord::Migration[4.2]
def change
create_table :forem_posts do |t|
t.integer :topic_id
t.text :text
t.integer :user_id
t.timestamps
end
end
end
| 19.181818 | 53 | 0.658768 |
1cd70cdbcc3ec72aa402fa81b53b6b8c2981fd67 | 4,069 | # -*- coding: utf-8 -*-
=begin
Copyright (C) 2014-2015 Takashi SUGA
You may use and/or modify this file according to the license described in the LICENSE.txt file included in this archive.
=end
module When
class BasicTypes::M17n
Positivist = [self, [
"locale:[=en:, ja=ja:, alias]",
"names:[Positivist=]",
"[Positivist=en:Positivist_calendar, オーギュスト・コントの暦=ja:%%<13の月の暦>#%.<実証暦>]"
]]
end
#
# Positivist Week
#
class CalendarNote::PositivistWeek < CalendarNote::Week
#
# Positivist Note
#
Notes = [When::BasicTypes::M17n, [
"locale:[=en:, ja=ja:, alias]",
"names:[Positivist]",
# Notes for year ----------------------------
[When::BasicTypes::M17n,
"names:[note for year=, 年の暦注=, *year]"
],
# Notes for month ----------------------------
[When::BasicTypes::M17n,
"names:[note for month=, 月の暦注=, *month]",
[When::BasicTypes::M17n,
"names:[month name=en:Month, 月の名前=ja:%%<月_(暦)>, zh:該月的名稱=, *alias:Month=]",
"[Moses, モーセ ]",
"[Homer, ホメーロス ]",
"[Aristotle, アリストテレス ]",
"[Archimedes, アルキメデス ]",
"[Caesar=en:Julius_Caesar, カエサル=ja:%%<ガイウス・ユリウス・カエサル> ]",
"[Saint Paul=en:Paul_of_Tarsus, パウロ ]",
"[Charlemagne, シャルルマーニュ=ja:%%<カール大帝> ]",
"[Dante=en:Dante_Alighieri, ダンテ=ja:%%<ダンテ・アリギエーリ> ]",
"[Gutenberg=en:Johann_Gutenberg, グーテンベルク=ja:%%<ヨハネス・グーテンベルク> ]",
"[Shakespeare=en:William_Shakespeare, シェイクスピア=ja:%%<ウィリアム・シェイクスピア> ]",
"[Descartes=en:%%<René_Descartes>, デカルト=ja:%%<ルネ・デカルト> ]",
"[Frederick=en:Frederick_II_of_Prussia, フリードリヒ=ja:%%<フリードリヒ2世 (プロイセン王)> ]",
"[Bichat=en:%%<Marie_François_Xavier Bichat>, ビシャ=ja:%%<マリー・フランソワ・クサヴィエ・ビシャ>]"
]
],
# Notes for day ----------------------------
[When::BasicTypes::M17n,
"names:[note for day=, 日の暦注=, *day]",
[When::BasicTypes::M17n,
"names:[Week, 週, zh:星期]",
[DayOfWeek, "label:[Monday, 月曜日, /date/day_names/1]", {'delta'=> 7}],
[DayOfWeek, "label:[Tuesday, 火曜日, /date/day_names/2]", {'delta'=> 7}],
[DayOfWeek, "label:[Wednesday, 水曜日, /date/day_names/3]", {'delta'=> 7}],
[DayOfWeek, "label:[Thursday, 木曜日, /date/day_names/4]", {'delta'=> 7}],
[DayOfWeek, "label:[Friday, 金曜日, /date/day_names/5]", {'delta'=> 7}],
[DayOfWeek, "label:[Saturday, 土曜日, /date/day_names/6]", {'delta'=> 7}],
[DayOfWeek, "label:[Sunday, 日曜日, /date/day_names/0]", {'delta'=> 7}],
[DayOfWeek, "label:[Festival_of_the_Dead, 祖先の祭=]", {'delta'=> 366}],
[DayOfWeek, "label:[Festival_of_Holy_Women=, 聖女の祭=]", {'delta'=>1461}]
],
"[Common_Week=]"
]
]]
fixed_week_definitions
end
module CalendarTypes
#
# Positivist calendar based on Gregorian calendar
#
Positivist = [SolarYearTableBased, {
'label' => 'Positivist::Positivist',
'indices' => [
When.Index('PositivistWeekNotes::month::Month', {:unit =>13}),
When::Coordinates::DefaultDayIndex
],
'origin_of_MSC' => -1788,
'diff_to_CE' => 0,
'rule_table' => {
365 => {'Length'=>[28]*12 + [29]},
366 => {'Length'=>[28]*12 + [30]}
},
'note' => 'PositivistWeek'
}]
end
end
| 40.287129 | 122 | 0.44753 |
1ddf2294804cdd1bc4875c61579f68cff81e44a8 | 1,270 | begin
require "rspec/core/rake_task"
desc "Run RSpec code examples"
RSpec::Core::RakeTask.new(:regression_test => :integration_test) do |t|
# Glob pattern to match files.
t.pattern = "spec/regression/**/test_*.rb"
# Whether or not to fail Rake when an error occurs (typically when
# examples fail).
t.fail_on_error = true
# A message to print to stderr when there are failures.
t.failure_message = nil
# Use verbose output. If this is set to true, the task will print the
# executed spec command to stdout.
t.verbose = true
# Use rcov for code coverage?
# t.rcov = false
# Path to rcov.
# t.rcov_path = "rcov"
# Command line options to pass to rcov. See 'rcov --help' about this
# t.rcov_opts = []
# Command line options to pass to ruby. See 'ruby --help' about this
t.ruby_opts = []
# Path to rspec
# t.rspec_path = "rspec"
# Command line options to pass to rspec. See 'rspec --help' about this
t.rspec_opts = ["--color", "--backtrace"]
end
rescue LoadError => ex
task :regression_test do
abort 'rspec is not available. In order to run spec, you must: gem install rspec'
end
ensure
task :spec => [:regression_test]
task :test => [:regression_test]
end
| 28.222222 | 85 | 0.658268 |
012d6e6a991bf76e807100e6170d767c4c4f778a | 292 | class CreateDirectors < ActiveRecord::Migration[5.0]
def change
create_table :directors do |t|
t.references :account, foreign_key: true
t.references :camp, foreign_key: true
t.string :official_phone
t.string :official_email
t.timestamps
end
end
end
| 22.461538 | 52 | 0.688356 |
1d94dc39d64c57d14df4819c6dc79575290b1765 | 369 | class AddCountsToAgencies < ActiveRecord::Migration
def change
add_column :agencies, :draft_outlet_count, :integer, default: 0
add_column :agencies, :draft_mobile_app_count, :integer, default: 0
add_column :agencies, :published_outlet_count, :integer, default: 0
add_column :agencies, :published_mobile_app_count, :integer, default: 0
end
end
| 33.545455 | 75 | 0.758808 |
bb69eb53c958faaf83be14e4924d341d15344456 | 7,257 | # frozen_string_literal: true
RSpec.describe RuboCop::Cop::Style::ZeroLengthPredicate do
subject(:cop) { described_class.new }
let(:source) { '' }
before do
inspect_source(source)
end
shared_examples 'code with offense' do |code, message, expected|
context "when checking #{code}" do
let(:source) { code }
it 'registers an offense' do
expect(cop.offenses.size).to eq(1)
expect(cop.offenses.first.message).to eq(message)
expect(cop.highlights).to eq([code])
end
it 'auto-corrects' do
expect(autocorrect_source(code)).to eq expected
end
end
end
shared_examples 'code without offense' do |code|
let(:source) { code }
it 'does not register any offense' do
expect(cop.offenses.empty?).to be(true)
end
end
context 'with arrays' do
it_behaves_like 'code with offense', '[1, 2, 3].length == 0',
'Use `empty?` instead of `length == 0`.',
'[1, 2, 3].empty?'
it_behaves_like 'code with offense', '[1, 2, 3].size == 0',
'Use `empty?` instead of `size == 0`.',
'[1, 2, 3].empty?'
it_behaves_like 'code with offense', '0 == [1, 2, 3].length',
'Use `empty?` instead of `0 == length`.',
'[1, 2, 3].empty?'
it_behaves_like 'code with offense', '0 == [1, 2, 3].size',
'Use `empty?` instead of `0 == size`.',
'[1, 2, 3].empty?'
it_behaves_like 'code with offense', '[1, 2, 3].length < 1',
'Use `empty?` instead of `length < 1`.',
'[1, 2, 3].empty?'
it_behaves_like 'code with offense', '[1, 2, 3].size < 1',
'Use `empty?` instead of `size < 1`.',
'[1, 2, 3].empty?'
it_behaves_like 'code with offense', '1 > [1, 2, 3].length',
'Use `empty?` instead of `1 > length`.',
'[1, 2, 3].empty?'
it_behaves_like 'code with offense', '1 > [1, 2, 3].size',
'Use `empty?` instead of `1 > size`.',
'[1, 2, 3].empty?'
it_behaves_like 'code with offense', '[1, 2, 3].length > 0',
'Use `!empty?` instead of `length > 0`.',
'![1, 2, 3].empty?'
it_behaves_like 'code with offense', '[1, 2, 3].size > 0',
'Use `!empty?` instead of `size > 0`.',
'![1, 2, 3].empty?'
it_behaves_like 'code with offense', '[1, 2, 3].length != 0',
'Use `!empty?` instead of `length != 0`.',
'![1, 2, 3].empty?'
it_behaves_like 'code with offense', '[1, 2, 3].size != 0',
'Use `!empty?` instead of `size != 0`.',
'![1, 2, 3].empty?'
it_behaves_like 'code with offense', '0 < [1, 2, 3].length',
'Use `!empty?` instead of `0 < length`.',
'![1, 2, 3].empty?'
it_behaves_like 'code with offense', '0 < [1, 2, 3].size',
'Use `!empty?` instead of `0 < size`.',
'![1, 2, 3].empty?'
it_behaves_like 'code with offense', '0 != [1, 2, 3].length',
'Use `!empty?` instead of `0 != length`.',
'![1, 2, 3].empty?'
it_behaves_like 'code with offense', '0 != [1, 2, 3].size',
'Use `!empty?` instead of `0 != size`.',
'![1, 2, 3].empty?'
end
context 'with hashes' do
it_behaves_like 'code with offense', '{ a: 1, b: 2 }.size == 0',
'Use `empty?` instead of `size == 0`.',
'{ a: 1, b: 2 }.empty?'
it_behaves_like 'code with offense', '0 == { a: 1, b: 2 }.size',
'Use `empty?` instead of `0 == size`.',
'{ a: 1, b: 2 }.empty?'
it_behaves_like 'code with offense', '{ a: 1, b: 2 }.size != 0',
'Use `!empty?` instead of `size != 0`.',
'!{ a: 1, b: 2 }.empty?'
it_behaves_like 'code with offense', '0 != { a: 1, b: 2 }.size',
'Use `!empty?` instead of `0 != size`.',
'!{ a: 1, b: 2 }.empty?'
end
context 'with strings' do
it_behaves_like 'code with offense', '"string".size == 0',
'Use `empty?` instead of `size == 0`.',
'"string".empty?'
it_behaves_like 'code with offense', '0 == "string".size',
'Use `empty?` instead of `0 == size`.',
'"string".empty?'
it_behaves_like 'code with offense', '"string".size != 0',
'Use `!empty?` instead of `size != 0`.',
'!"string".empty?'
it_behaves_like 'code with offense', '0 != "string".size',
'Use `!empty?` instead of `0 != size`.',
'!"string".empty?'
end
context 'with collection variables' do
it_behaves_like 'code with offense', 'collection.size == 0',
'Use `empty?` instead of `size == 0`.',
'collection.empty?'
it_behaves_like 'code with offense', '0 == collection.size',
'Use `empty?` instead of `0 == size`.',
'collection.empty?'
it_behaves_like 'code with offense', 'collection.size != 0',
'Use `!empty?` instead of `size != 0`.',
'!collection.empty?'
it_behaves_like 'code with offense', '0 != collection.size',
'Use `!empty?` instead of `0 != size`.',
'!collection.empty?'
end
context 'when name of the variable is `size` or `length`' do
it_behaves_like 'code without offense', 'size == 0'
it_behaves_like 'code without offense', 'length == 0'
it_behaves_like 'code without offense', '0 == size'
it_behaves_like 'code without offense', '0 == length'
it_behaves_like 'code without offense', 'size <= 0'
it_behaves_like 'code without offense', 'length > 0'
it_behaves_like 'code without offense', '0 <= size'
it_behaves_like 'code without offense', '0 > length'
it_behaves_like 'code without offense', 'size != 0'
it_behaves_like 'code without offense', 'length != 0'
it_behaves_like 'code without offense', '0 != size'
it_behaves_like 'code without offense', '0 != length'
end
context 'when inspecting a File::Stat object' do
it 'does not register an offense' do
expect_no_offenses(<<-RUBY.strip_indent)
File.stat(foo).size == 0
RUBY
end
end
context 'when inspecting a StringIO object' do
context 'when initialized with a string' do
it 'does not register an offense' do
expect_no_offenses(<<-RUBY.strip_indent)
StringIO.new('foo').size == 0
RUBY
end
end
context 'when initialized without arguments' do
it 'does not register an offense' do
expect_no_offenses(<<-RUBY.strip_indent)
StringIO.new.size == 0
RUBY
end
end
end
context 'when inspecting a Tempfile object' do
it 'does not register an offense' do
expect_no_offenses(<<-RUBY.strip_indent)
Tempfile.new('foo').size == 0
RUBY
end
end
end
| 37.796875 | 68 | 0.513298 |
21a5c2cff2d437718bcb3cab489489e40dcf0e84 | 168 | module Adminly
module Serializer
def self.render(current_scope, includes: [])
current_scope.as_json(include: includes)
end
end
end | 18.666667 | 61 | 0.636905 |
e21063bd10d1e4b51d94f1c808c7294b2c917102 | 334 | cask 'zoc' do
version '7.24.0'
sha256 'db419b70189b2c80c7fcff44588b37a9ca61be97fc12959d3c6302269e2cd9bb'
url "https://www.emtec.com/downloads/zoc/zoc#{version.no_dots}.dmg"
appcast 'https://www.emtec.com/downloads/zoc/zoc_changes.txt'
name 'ZOC'
homepage 'https://www.emtec.com/zoc/'
app "zoc#{version.major}.app"
end
| 27.833333 | 75 | 0.742515 |
1c0d0e46bc2d47fd394d56bc2eab344db6f6f2f4 | 1,802 | module EvaluationGroupsHelper
def evaluation_group_element_id(evaluation_group)
"evaluation-group-#{evaluation_group.id}"
end
def evaluation_group_label(evaluation_group, css_class: nil)
if evaluation_group.needs_review?
icon = :alert
label_class = "alert"
else
icon = case evaluation_group.status.to_sym
when :pending then :clock
when :done then :check
end
label_class = case evaluation_group.status.to_sym
when :done then :success
end
end
label_class = "#{label_class} #{css_class}" unless css_class.nil?
content_tag(:span, foundation_icon(icon), class: "label radius #{label_class}")
end
def evaluation_group_should_collapse?(evaluation_group)
evaluation_group.done? && !evaluation_group.needs_review?
end
def evaluation_group_title(evaluation_group)
title = evaluation_group.rating_group.title
title += if evaluation_group.rating_group.points != 0
" (#{evaluation_group.points}/#{evaluation_group.rating_group.points})"
else
" (#{evaluation_group.points})"
end
title
end
def evaluation_group_bubble(evaluation_group)
link_to evaluation_group_label(evaluation_group, css_class: "round"), "##{evaluation_group_anchor_id(evaluation_group)}", title: evaluation_group_title(evaluation_group)
end
def evaluation_group_title_id(evaluation_group)
"evaluation-group-title-#{evaluation_group.id}"
end
def evaluation_group_label_id(evaluation_group)
"evaluation-group-label-#{evaluation_group.id}"
end
def evaluation_group_anchor_id(evaluation_group)
"eg-#{evaluation_group.title.parameterize}-#{evaluation_group.id}"
end
def evaluation_group_bubble_id(evaluation_group)
"evaluation-group-bubble-#{evaluation_group.id}"
end
end
| 30.033333 | 173 | 0.746393 |
1aa2eb741109f898d54f6516be69d59361386a5a | 1,569 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_mailbox/engine"
require "action_text/engine"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Turtello
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
config.middleware.use ActionDispatch::Cookies
config.middleware.use ActionDispatch::Session::CookieStore
config.action_controller.session_store :cookie_store,
key: "_session_turtello_#{Rails.env}",
same_site: :none,
secure: true
end
end
| 34.108696 | 82 | 0.769917 |
7a6ed7368b09f3e59a9f216c9950932b62e3195b | 1,925 | #
# RIS format parser
#
# Parses a valid RIS text file into a Ruby Hash.
#
class RisParser < CitationParser
def logger
CitationParser.logger
end
#Determine if given data is RIS,
# and if so, parse it!
def parse_data(risdata)
risdata = risdata.dup.strip!.gsub!("\r", "\n")
#determine if this is RIS data or not (looking for the 'ER' field)
unless risdata =~ /^ER \-/
return nil
end
logger.debug("\n\n* This file is RIS format.")
#Individual records are separated by 'ER' field
records = risdata.split(/^ER\s.*/i)
records.each_with_index do |rec, i|
errorCheck = 1
rec.strip!
cite = ParsedCitation.new(:ris)
# Save original data for inclusion in final hash
cite.properties[:original_data] = rec
# Use a lookahead -- if the regex consumes characters, split() will
# filter them out.
# Keys (or 'tags') are specified by the following regex.
# See spec at http://www.refman.com/support/risformat_fields_01.asp
logger.debug("\nParsing...")
rec.split(/(?=^[A-Z][A-Z0-9]\s{2}\-\s+)/).each do |component|
# Limit here in case we have a legit " - " in the string
key, val = component.split(/\s+\-\s+/, 2)
# Don't call to_sym on empty string!
key = key.downcase.strip.to_sym unless key.downcase.strip.empty?
# Skip components we can't parse
next unless key and val
errorCheck = 0
# Add all values as an Array
cite.properties[key] = Array.new if cite.properties[key].nil?
cite.properties[key] << val.strip
end
# The following error should only occur if no part of the citation
# is consistent with the RIS format.
if errorCheck == 1
logger.error("\n There was an error on the following citation:\n #{rec}\n\n")
else
@citations << cite
end
end
@citations
end
end | 27.898551 | 85 | 0.62026 |
117b4143e60e8a385deda092a8427014bc27de50 | 323 | class Nard::Rails::DictionaryMetaService
def initialize( ref, debug_mode )
@ref = ref
@ary = ref.split('.')
@debug_mode = debug_mode
if @debug_mode
puts ''
puts "ref: #{ @ref }"
puts "ary: #{ @ary.to_s }"
end
end
private
def h
ApplicationController.helpers
end
end
| 14.043478 | 40 | 0.585139 |
ff31906b71e20195991f396cbf4e41c0d561d0d0 | 2,299 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Ads
module GoogleAds
module V3
module Services
module CampaignCriterionService
# Path helper methods for the CampaignCriterionService API.
module Paths
##
# Create a fully-qualified Campaign resource string.
#
# The resource will be in the following format:
#
# `customers/{customer}/campaigns/{campaign}`
#
# @param customer [String]
# @param campaign [String]
#
# @return [::String]
def campaign_path customer:, campaign:
raise ::ArgumentError, "customer cannot contain /" if customer.to_s.include? "/"
"customers/#{customer}/campaigns/#{campaign}"
end
##
# Create a fully-qualified CampaignCriterion resource string.
#
# The resource will be in the following format:
#
# `customers/{customer}/campaignCriteria/{campaign_criterion}`
#
# @param customer [String]
# @param campaign_criterion [String]
#
# @return [::String]
def campaign_criterion_path customer:, campaign_criterion:
raise ::ArgumentError, "customer cannot contain /" if customer.to_s.include? "/"
"customers/#{customer}/campaignCriteria/#{campaign_criterion}"
end
extend self
end
end
end
end
end
end
end
| 32.380282 | 96 | 0.585037 |
1a519b8396332f137bcece2f3623eb6642cd63a5 | 1,592 | require 'spec_helper'
describe 'simp::server::ldap' do
context 'supported operating systems' do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts) { os_facts }
if os_facts[:kernel] == 'windows'
it { expect{ is_expected.to compile.with_all_deps }.to raise_error(/'windows .+' is not supported/) }
else
context 'default parameters' do
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('simp_openldap') }
it { is_expected.to create_class('simp_openldap::server') }
it { is_expected.to create_class('simp_openldap::slapo::ppolicy') }
it { is_expected.to create_class('simp_openldap::slapo::syncprov') }
it { is_expected.to_not create_simp_openldap__server__syncrepl('111') }
it { is_expected.to create_simp_openldap__server__limits('Host_Bind_DN_Unlimited_Query') }
it { is_expected.to create_simp_openldap__server__limits('LDAP_Sync_DN_Unlimited_Query') }
end
context 'is_consumer' do
let(:params){{ :is_consumer => true }}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_simp_openldap__server__syncrepl('111') }
end
context 'use_lastbind' do
let(:params){{ :enable_lastbind => true }}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('simp_openldap::slapo::lastbind') }
end
end
end
end
end
end
| 38.829268 | 111 | 0.630653 |
1d909d3fe7231fae8824b036efc47ab6f5c54373 | 2,265 | # frozen_string_literal: true
require 'spec_helper_acceptance'
describe 'autofs::mount tests' do
context 'basic mount test' do
before(:context) do
pp = "file { '/etc/auto.data': ensure => 'absent' }"
apply_manifest(pp, catch_failures: true)
end
it 'applies' do
pp = <<-EOS
class { 'autofs': }
autofs::mount { '/mnt/data':
mapfile => '/etc/auto.data',
}
EOS
apply_manifest(pp, catch_failures: true)
apply_manifest(pp, catch_changes: true)
end
describe file('/etc/auto.master') do
it 'exists and have content' do
expect(subject).to exist
expect(subject).to be_owned_by 'root'
expect(subject).to be_grouped_into 'root'
expect(subject).to be_mode 644
end
its(:content) { is_expected.to match(%r{^\s*/mnt/data\s+/etc/auto.data\s*$}) }
end
describe file('/etc/auto.data') do
it { is_expected.not_to exist }
end
describe package('autofs') do
it { is_expected.to be_installed }
end
describe service('autofs') do
it { is_expected.to be_enabled }
it { is_expected.to be_running }
end
end
context 'remove mountpoint test' do
before(:context) do
apply_manifest("file { '/etc/auto.data': ensure => 'file', content => 'TEST CONTENT' }",
catch_failures: true)
end
it 'applies' do
pp = <<-MANIFEST
class { 'autofs': }
autofs::mount { 'data':
ensure => 'absent',
mount => '/mnt/data',
mapfile => '/etc/auto.data',
}
MANIFEST
apply_manifest(pp, catch_failures: true)
apply_manifest(pp, catch_changes: true)
end
describe file('/etc/auto.data') do
it 'is unaffected' do
expect(subject).to exist
expect(subject).to contain 'TEST CONTENT'
end
end
describe file('/etc/auto.master') do
it 'exists and have content' do
expect(subject).to exist
expect(subject).to be_owned_by 'root'
expect(subject).to be_grouped_into 'root'
expect(subject).to be_mode 644
end
its(:content) { is_expected.not_to match(%r{^\s*/mnt/data\s}) }
end
end
end
| 25.738636 | 94 | 0.584106 |
62cdb1ea87bb50b6425071c18d74ae7d789b25b6 | 4,490 | module ActiveRecord
module Associations
# Association proxies in Active Record are middlemen between the object that
# holds the association, known as the <tt>@owner</tt>, and the actual associated
# object, known as the <tt>@target</tt>. The kind of association any proxy is
# about is available in <tt>@reflection</tt>. That's an instance of the class
# ActiveRecord::Reflection::AssociationReflection.
#
# For example, given
#
# class Blog < ActiveRecord::Base
# has_many :posts
# end
#
# blog = Blog.first
#
# the association proxy in <tt>blog.posts</tt> has the object in +blog+ as
# <tt>@owner</tt>, the collection of its posts as <tt>@target</tt>, and
# the <tt>@reflection</tt> object represents a <tt>:has_many</tt> macro.
#
# This class has most of the basic instance methods removed, and delegates
# unknown methods to <tt>@target</tt> via <tt>method_missing</tt>. As a
# corner case, it even removes the +class+ method and that's why you get
#
# blog.posts.class # => Array
#
# though the object behind <tt>blog.posts</tt> is not an Array, but an
# ActiveRecord::Associations::HasManyAssociation.
#
# The <tt>@target</tt> object is not \loaded until needed. For example,
#
# blog.posts.count
#
# is computed directly through SQL and does not trigger by itself the
# instantiation of the actual post records.
class CollectionProxy # :nodoc:
alias :proxy_extend :extend
instance_methods.each { |m| undef_method m unless m.to_s =~ /^(?:nil\?|send|object_id|to_a)$|^__|^respond_to|proxy_/ }
delegate :group, :order, :limit, :joins, :where, :preload, :eager_load, :includes, :from,
:lock, :readonly, :having, :pluck, :to => :scoped
delegate :target, :load_target, :loaded?, :to => :@association
delegate :select, :find, :first, :last,
:build, :create, :create!,
:concat, :replace, :delete_all, :destroy_all, :delete, :destroy, :uniq,
:sum, :count, :size, :length, :empty?,
:any?, :many?, :include?,
:to => :@association
def initialize(association)
@association = association
Array.wrap(association.options[:extend]).each { |ext| proxy_extend(ext) }
end
alias_method :new, :build
def proxy_association
@association
end
def scoped
association = @association
association.scoped.extending do
define_method(:proxy_association) { association }
end
end
def respond_to?(name, include_private = false)
super ||
(load_target && target.respond_to?(name, include_private)) ||
proxy_association.klass.respond_to?(name, include_private)
end
def method_missing(method, *args, &block)
match = DynamicFinderMatch.match(method)
if match && match.instantiator?
send(:find_or_instantiator_by_attributes, match, match.attribute_names, *args) do |record|
proxy_association.send :set_owner_attributes, record
proxy_association.send :add_to_target, record
yield(record) if block_given?
end.tap do |record|
proxy_association.send :set_inverse_instance, record
end
elsif target.respond_to?(method) || (!proxy_association.klass.respond_to?(method) && Class.respond_to?(method))
if load_target
if target.respond_to?(method)
target.send(method, *args, &block)
else
begin
super
rescue NoMethodError => e
raise e, e.message.sub(/ for #<.*$/, " via proxy for #{target}")
end
end
end
else
scoped.readonly(nil).send(method, *args, &block)
end
end
# Forwards <tt>===</tt> explicitly to the \target because the instance method
# removal above doesn't catch it. Loads the \target if needed.
def ===(other)
other === load_target
end
def to_ary
load_target.dup
end
alias_method :to_a, :to_ary
def <<(*records)
proxy_association.concat(records) && self
end
alias_method :push, :<<
def clear
delete_all
self
end
def reload
proxy_association.reload
self
end
end
end
end
| 33.507463 | 124 | 0.605122 |
7ad260a1a9cd59fb834ddc1dbc525875cb6b05d2 | 67,326 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/cloud/securitycenter/v1/securitycenter_service_pb"
require "google/cloud/securitycenter/v1/securitycenter_service_services_pb"
require "google/cloud/security_center/v1/security_center"
class ::Google::Cloud::SecurityCenter::V1::SecurityCenter::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_create_source
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::Source.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
source = {}
create_source_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_source, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::CreateSourceRequest, request
assert_equal "hello world", request["parent"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecurityCenter::V1::Source), request["source"]
refute_nil options
end
Gapic::ServiceStub.stub :new, create_source_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_source({ parent: parent, source: source }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_source parent: parent, source: source do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_source ::Google::Cloud::SecurityCenter::V1::CreateSourceRequest.new(parent: parent, source: source) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_source({ parent: parent, source: source }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_source ::Google::Cloud::SecurityCenter::V1::CreateSourceRequest.new(parent: parent, source: source), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_source_client_stub.call_rpc_count
end
end
def test_create_finding
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::Finding.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
finding_id = "hello world"
finding = {}
create_finding_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_finding, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::CreateFindingRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["finding_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecurityCenter::V1::Finding), request["finding"]
refute_nil options
end
Gapic::ServiceStub.stub :new, create_finding_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_finding({ parent: parent, finding_id: finding_id, finding: finding }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_finding parent: parent, finding_id: finding_id, finding: finding do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_finding ::Google::Cloud::SecurityCenter::V1::CreateFindingRequest.new(parent: parent, finding_id: finding_id, finding: finding) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_finding({ parent: parent, finding_id: finding_id, finding: finding }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_finding ::Google::Cloud::SecurityCenter::V1::CreateFindingRequest.new(parent: parent, finding_id: finding_id, finding: finding), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_finding_client_stub.call_rpc_count
end
end
def test_create_notification_config
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::NotificationConfig.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
config_id = "hello world"
notification_config = {}
create_notification_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_notification_config, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::CreateNotificationConfigRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["config_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecurityCenter::V1::NotificationConfig), request["notification_config"]
refute_nil options
end
Gapic::ServiceStub.stub :new, create_notification_config_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_notification_config({ parent: parent, config_id: config_id, notification_config: notification_config }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_notification_config parent: parent, config_id: config_id, notification_config: notification_config do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_notification_config ::Google::Cloud::SecurityCenter::V1::CreateNotificationConfigRequest.new(parent: parent, config_id: config_id, notification_config: notification_config) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_notification_config({ parent: parent, config_id: config_id, notification_config: notification_config }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_notification_config ::Google::Cloud::SecurityCenter::V1::CreateNotificationConfigRequest.new(parent: parent, config_id: config_id, notification_config: notification_config), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_notification_config_client_stub.call_rpc_count
end
end
def test_delete_notification_config
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
delete_notification_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_notification_config, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::DeleteNotificationConfigRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_notification_config_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_notification_config({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_notification_config name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_notification_config ::Google::Cloud::SecurityCenter::V1::DeleteNotificationConfigRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_notification_config({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_notification_config ::Google::Cloud::SecurityCenter::V1::DeleteNotificationConfigRequest.new(name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_notification_config_client_stub.call_rpc_count
end
end
def test_get_iam_policy
# Create GRPC objects.
grpc_response = ::Google::Iam::V1::Policy.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
resource = "hello world"
options = {}
get_iam_policy_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_iam_policy, name
assert_kind_of ::Google::Iam::V1::GetIamPolicyRequest, request
assert_equal "hello world", request["resource"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Iam::V1::GetPolicyOptions), request["options"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_iam_policy_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_iam_policy({ resource: resource, options: options }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_iam_policy resource: resource, options: options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_iam_policy ::Google::Iam::V1::GetIamPolicyRequest.new(resource: resource, options: options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_iam_policy({ resource: resource, options: options }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_iam_policy ::Google::Iam::V1::GetIamPolicyRequest.new(resource: resource, options: options), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_iam_policy_client_stub.call_rpc_count
end
end
def test_get_notification_config
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::NotificationConfig.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_notification_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_notification_config, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::GetNotificationConfigRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_notification_config_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_notification_config({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_notification_config name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_notification_config ::Google::Cloud::SecurityCenter::V1::GetNotificationConfigRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_notification_config({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_notification_config ::Google::Cloud::SecurityCenter::V1::GetNotificationConfigRequest.new(name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_notification_config_client_stub.call_rpc_count
end
end
def test_get_organization_settings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::OrganizationSettings.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_organization_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_organization_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::GetOrganizationSettingsRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_organization_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_organization_settings({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_organization_settings name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_organization_settings ::Google::Cloud::SecurityCenter::V1::GetOrganizationSettingsRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_organization_settings({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_organization_settings ::Google::Cloud::SecurityCenter::V1::GetOrganizationSettingsRequest.new(name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_organization_settings_client_stub.call_rpc_count
end
end
def test_get_source
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::Source.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_source_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_source, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::GetSourceRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_source_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_source({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_source name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_source ::Google::Cloud::SecurityCenter::V1::GetSourceRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_source({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_source ::Google::Cloud::SecurityCenter::V1::GetSourceRequest.new(name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_source_client_stub.call_rpc_count
end
end
def test_group_assets
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::GroupAssetsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
filter = "hello world"
group_by = "hello world"
compare_duration = {}
read_time = {}
page_token = "hello world"
page_size = 42
group_assets_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :group_assets, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::GroupAssetsRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["filter"]
assert_equal "hello world", request["group_by"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Duration), request["compare_duration"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["read_time"]
assert_equal "hello world", request["page_token"]
assert_equal 42, request["page_size"]
refute_nil options
end
Gapic::ServiceStub.stub :new, group_assets_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.group_assets({ parent: parent, filter: filter, group_by: group_by, compare_duration: compare_duration, read_time: read_time, page_token: page_token, page_size: page_size }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.group_assets parent: parent, filter: filter, group_by: group_by, compare_duration: compare_duration, read_time: read_time, page_token: page_token, page_size: page_size do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.group_assets ::Google::Cloud::SecurityCenter::V1::GroupAssetsRequest.new(parent: parent, filter: filter, group_by: group_by, compare_duration: compare_duration, read_time: read_time, page_token: page_token, page_size: page_size) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.group_assets({ parent: parent, filter: filter, group_by: group_by, compare_duration: compare_duration, read_time: read_time, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.group_assets ::Google::Cloud::SecurityCenter::V1::GroupAssetsRequest.new(parent: parent, filter: filter, group_by: group_by, compare_duration: compare_duration, read_time: read_time, page_token: page_token, page_size: page_size), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, group_assets_client_stub.call_rpc_count
end
end
def test_group_findings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::GroupFindingsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
filter = "hello world"
group_by = "hello world"
read_time = {}
compare_duration = {}
page_token = "hello world"
page_size = 42
group_findings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :group_findings, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::GroupFindingsRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["filter"]
assert_equal "hello world", request["group_by"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["read_time"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Duration), request["compare_duration"]
assert_equal "hello world", request["page_token"]
assert_equal 42, request["page_size"]
refute_nil options
end
Gapic::ServiceStub.stub :new, group_findings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.group_findings({ parent: parent, filter: filter, group_by: group_by, read_time: read_time, compare_duration: compare_duration, page_token: page_token, page_size: page_size }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.group_findings parent: parent, filter: filter, group_by: group_by, read_time: read_time, compare_duration: compare_duration, page_token: page_token, page_size: page_size do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.group_findings ::Google::Cloud::SecurityCenter::V1::GroupFindingsRequest.new(parent: parent, filter: filter, group_by: group_by, read_time: read_time, compare_duration: compare_duration, page_token: page_token, page_size: page_size) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.group_findings({ parent: parent, filter: filter, group_by: group_by, read_time: read_time, compare_duration: compare_duration, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.group_findings ::Google::Cloud::SecurityCenter::V1::GroupFindingsRequest.new(parent: parent, filter: filter, group_by: group_by, read_time: read_time, compare_duration: compare_duration, page_token: page_token, page_size: page_size), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, group_findings_client_stub.call_rpc_count
end
end
def test_list_assets
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::ListAssetsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
filter = "hello world"
order_by = "hello world"
read_time = {}
compare_duration = {}
field_mask = {}
page_token = "hello world"
page_size = 42
list_assets_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_assets, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::ListAssetsRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["filter"]
assert_equal "hello world", request["order_by"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["read_time"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Duration), request["compare_duration"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["field_mask"]
assert_equal "hello world", request["page_token"]
assert_equal 42, request["page_size"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_assets_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_assets({ parent: parent, filter: filter, order_by: order_by, read_time: read_time, compare_duration: compare_duration, field_mask: field_mask, page_token: page_token, page_size: page_size }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_assets parent: parent, filter: filter, order_by: order_by, read_time: read_time, compare_duration: compare_duration, field_mask: field_mask, page_token: page_token, page_size: page_size do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_assets ::Google::Cloud::SecurityCenter::V1::ListAssetsRequest.new(parent: parent, filter: filter, order_by: order_by, read_time: read_time, compare_duration: compare_duration, field_mask: field_mask, page_token: page_token, page_size: page_size) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_assets({ parent: parent, filter: filter, order_by: order_by, read_time: read_time, compare_duration: compare_duration, field_mask: field_mask, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_assets ::Google::Cloud::SecurityCenter::V1::ListAssetsRequest.new(parent: parent, filter: filter, order_by: order_by, read_time: read_time, compare_duration: compare_duration, field_mask: field_mask, page_token: page_token, page_size: page_size), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_assets_client_stub.call_rpc_count
end
end
def test_list_findings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::ListFindingsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
filter = "hello world"
order_by = "hello world"
read_time = {}
compare_duration = {}
field_mask = {}
page_token = "hello world"
page_size = 42
list_findings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_findings, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::ListFindingsRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["filter"]
assert_equal "hello world", request["order_by"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["read_time"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Duration), request["compare_duration"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["field_mask"]
assert_equal "hello world", request["page_token"]
assert_equal 42, request["page_size"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_findings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_findings({ parent: parent, filter: filter, order_by: order_by, read_time: read_time, compare_duration: compare_duration, field_mask: field_mask, page_token: page_token, page_size: page_size }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_findings parent: parent, filter: filter, order_by: order_by, read_time: read_time, compare_duration: compare_duration, field_mask: field_mask, page_token: page_token, page_size: page_size do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_findings ::Google::Cloud::SecurityCenter::V1::ListFindingsRequest.new(parent: parent, filter: filter, order_by: order_by, read_time: read_time, compare_duration: compare_duration, field_mask: field_mask, page_token: page_token, page_size: page_size) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_findings({ parent: parent, filter: filter, order_by: order_by, read_time: read_time, compare_duration: compare_duration, field_mask: field_mask, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_findings ::Google::Cloud::SecurityCenter::V1::ListFindingsRequest.new(parent: parent, filter: filter, order_by: order_by, read_time: read_time, compare_duration: compare_duration, field_mask: field_mask, page_token: page_token, page_size: page_size), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_findings_client_stub.call_rpc_count
end
end
def test_list_notification_configs
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::ListNotificationConfigsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
page_token = "hello world"
page_size = 42
list_notification_configs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_notification_configs, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::ListNotificationConfigsRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["page_token"]
assert_equal 42, request["page_size"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_notification_configs_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_notification_configs({ parent: parent, page_token: page_token, page_size: page_size }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_notification_configs parent: parent, page_token: page_token, page_size: page_size do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_notification_configs ::Google::Cloud::SecurityCenter::V1::ListNotificationConfigsRequest.new(parent: parent, page_token: page_token, page_size: page_size) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_notification_configs({ parent: parent, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_notification_configs ::Google::Cloud::SecurityCenter::V1::ListNotificationConfigsRequest.new(parent: parent, page_token: page_token, page_size: page_size), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_notification_configs_client_stub.call_rpc_count
end
end
def test_list_sources
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::ListSourcesResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
page_token = "hello world"
page_size = 42
list_sources_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_sources, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::ListSourcesRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["page_token"]
assert_equal 42, request["page_size"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_sources_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_sources({ parent: parent, page_token: page_token, page_size: page_size }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_sources parent: parent, page_token: page_token, page_size: page_size do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_sources ::Google::Cloud::SecurityCenter::V1::ListSourcesRequest.new(parent: parent, page_token: page_token, page_size: page_size) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_sources({ parent: parent, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_sources ::Google::Cloud::SecurityCenter::V1::ListSourcesRequest.new(parent: parent, page_token: page_token, page_size: page_size), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_sources_client_stub.call_rpc_count
end
end
def test_run_asset_discovery
# Create GRPC objects.
grpc_response = ::Google::Longrunning::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
run_asset_discovery_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :run_asset_discovery, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::RunAssetDiscoveryRequest, request
assert_equal "hello world", request["parent"]
refute_nil options
end
Gapic::ServiceStub.stub :new, run_asset_discovery_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.run_asset_discovery({ parent: parent }) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use named arguments
client.run_asset_discovery parent: parent do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object
client.run_asset_discovery ::Google::Cloud::SecurityCenter::V1::RunAssetDiscoveryRequest.new(parent: parent) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use hash object with options
client.run_asset_discovery({ parent: parent }, grpc_options) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.run_asset_discovery ::Google::Cloud::SecurityCenter::V1::RunAssetDiscoveryRequest.new(parent: parent), grpc_options do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, run_asset_discovery_client_stub.call_rpc_count
end
end
def test_set_finding_state
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::Finding.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
state = :STATE_UNSPECIFIED
start_time = {}
set_finding_state_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_finding_state, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::SetFindingStateRequest, request
assert_equal "hello world", request["name"]
assert_equal :STATE_UNSPECIFIED, request["state"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["start_time"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_finding_state_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_finding_state({ name: name, state: state, start_time: start_time }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_finding_state name: name, state: state, start_time: start_time do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_finding_state ::Google::Cloud::SecurityCenter::V1::SetFindingStateRequest.new(name: name, state: state, start_time: start_time) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_finding_state({ name: name, state: state, start_time: start_time }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_finding_state ::Google::Cloud::SecurityCenter::V1::SetFindingStateRequest.new(name: name, state: state, start_time: start_time), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_finding_state_client_stub.call_rpc_count
end
end
def test_set_iam_policy
# Create GRPC objects.
grpc_response = ::Google::Iam::V1::Policy.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
resource = "hello world"
policy = {}
set_iam_policy_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_iam_policy, name
assert_kind_of ::Google::Iam::V1::SetIamPolicyRequest, request
assert_equal "hello world", request["resource"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Iam::V1::Policy), request["policy"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_iam_policy_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_iam_policy({ resource: resource, policy: policy }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_iam_policy resource: resource, policy: policy do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_iam_policy ::Google::Iam::V1::SetIamPolicyRequest.new(resource: resource, policy: policy) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_iam_policy({ resource: resource, policy: policy }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_iam_policy ::Google::Iam::V1::SetIamPolicyRequest.new(resource: resource, policy: policy), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_iam_policy_client_stub.call_rpc_count
end
end
def test_test_iam_permissions
# Create GRPC objects.
grpc_response = ::Google::Iam::V1::TestIamPermissionsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
resource = "hello world"
permissions = ["hello world"]
test_iam_permissions_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :test_iam_permissions, name
assert_kind_of ::Google::Iam::V1::TestIamPermissionsRequest, request
assert_equal "hello world", request["resource"]
assert_equal ["hello world"], request["permissions"]
refute_nil options
end
Gapic::ServiceStub.stub :new, test_iam_permissions_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.test_iam_permissions({ resource: resource, permissions: permissions }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.test_iam_permissions resource: resource, permissions: permissions do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.test_iam_permissions ::Google::Iam::V1::TestIamPermissionsRequest.new(resource: resource, permissions: permissions) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.test_iam_permissions({ resource: resource, permissions: permissions }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.test_iam_permissions ::Google::Iam::V1::TestIamPermissionsRequest.new(resource: resource, permissions: permissions), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, test_iam_permissions_client_stub.call_rpc_count
end
end
def test_update_finding
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::Finding.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
finding = {}
update_mask = {}
update_finding_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_finding, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::UpdateFindingRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecurityCenter::V1::Finding), request["finding"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_finding_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_finding({ finding: finding, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_finding finding: finding, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_finding ::Google::Cloud::SecurityCenter::V1::UpdateFindingRequest.new(finding: finding, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_finding({ finding: finding, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_finding ::Google::Cloud::SecurityCenter::V1::UpdateFindingRequest.new(finding: finding, update_mask: update_mask), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_finding_client_stub.call_rpc_count
end
end
def test_update_notification_config
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::NotificationConfig.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
notification_config = {}
update_mask = {}
update_notification_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_notification_config, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::UpdateNotificationConfigRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecurityCenter::V1::NotificationConfig), request["notification_config"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_notification_config_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_notification_config({ notification_config: notification_config, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_notification_config notification_config: notification_config, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_notification_config ::Google::Cloud::SecurityCenter::V1::UpdateNotificationConfigRequest.new(notification_config: notification_config, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_notification_config({ notification_config: notification_config, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_notification_config ::Google::Cloud::SecurityCenter::V1::UpdateNotificationConfigRequest.new(notification_config: notification_config, update_mask: update_mask), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_notification_config_client_stub.call_rpc_count
end
end
def test_update_organization_settings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::OrganizationSettings.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
organization_settings = {}
update_mask = {}
update_organization_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_organization_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::UpdateOrganizationSettingsRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecurityCenter::V1::OrganizationSettings), request["organization_settings"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_organization_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_organization_settings({ organization_settings: organization_settings, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_organization_settings organization_settings: organization_settings, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_organization_settings ::Google::Cloud::SecurityCenter::V1::UpdateOrganizationSettingsRequest.new(organization_settings: organization_settings, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_organization_settings({ organization_settings: organization_settings, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_organization_settings ::Google::Cloud::SecurityCenter::V1::UpdateOrganizationSettingsRequest.new(organization_settings: organization_settings, update_mask: update_mask), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_organization_settings_client_stub.call_rpc_count
end
end
def test_update_source
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::Source.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
source = {}
update_mask = {}
update_source_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_source, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::UpdateSourceRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecurityCenter::V1::Source), request["source"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_source_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_source({ source: source, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_source source: source, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_source ::Google::Cloud::SecurityCenter::V1::UpdateSourceRequest.new(source: source, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_source({ source: source, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_source ::Google::Cloud::SecurityCenter::V1::UpdateSourceRequest.new(source: source, update_mask: update_mask), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_source_client_stub.call_rpc_count
end
end
def test_update_security_marks
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::V1::SecurityMarks.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
security_marks = {}
update_mask = {}
start_time = {}
update_security_marks_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_security_marks, name
assert_kind_of ::Google::Cloud::SecurityCenter::V1::UpdateSecurityMarksRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecurityCenter::V1::SecurityMarks), request["security_marks"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["start_time"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_security_marks_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_security_marks({ security_marks: security_marks, update_mask: update_mask, start_time: start_time }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_security_marks security_marks: security_marks, update_mask: update_mask, start_time: start_time do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_security_marks ::Google::Cloud::SecurityCenter::V1::UpdateSecurityMarksRequest.new(security_marks: security_marks, update_mask: update_mask, start_time: start_time) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_security_marks({ security_marks: security_marks, update_mask: update_mask, start_time: start_time }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_security_marks ::Google::Cloud::SecurityCenter::V1::UpdateSecurityMarksRequest.new(security_marks: security_marks, update_mask: update_mask, start_time: start_time), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_security_marks_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client::Configuration, config
end
def test_operations_client
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Client.new do |config|
config.credentials = grpc_channel
end
end
assert_kind_of ::Google::Cloud::SecurityCenter::V1::SecurityCenter::Operations, client.operations_client
end
end
| 43.661479 | 306 | 0.727639 |
626b9fd7e2dd2083fbc6ee9f58b2b4f59d2b97b1 | 23,207 | # contains all functions related to management of the signup sheet for an assignment
# functions to add new topics to an assignment, edit properties of a particular topic, delete a topic, etc
# are included here
# A point to be taken into consideration is that :id (except when explicitly stated) here means topic id and not assignment id
# (this is referenced as :assignment id in the params has)
# The way it works is that assignments have their own id's, so do topics. A topic has a foreign key dependecy on the assignment_id
# Hence each topic has a field called assignment_id which points which can be used to identify the assignment that this topic belongs
# to
class SignUpSheetController < ApplicationController
require 'rgl/adjacency'
require 'rgl/dot'
require 'rgl/topsort'
def action_allowed?
case params[:action]
when 'set_priority', 'sign_up', 'delete_signup', 'list', 'show_team', 'switch_original_topic_to_approved_suggested_topic', 'publish_approved_suggested_topic'
['Instructor',
'Teaching Assistant',
'Administrator',
'Super-Administrator',
'Student'].include? current_role_name and
((%w(list).include? action_name) ? are_needed_authorizations_present?(params[:id], "reader", "submitter", "reviewer") : true)
else
['Instructor',
'Teaching Assistant',
'Administrator',
'Super-Administrator'].include? current_role_name
end
end
# Includes functions for team management. Refer /app/helpers/ManageTeamHelper
include ManageTeamHelper
# Includes functions for Dead line management. Refer /app/helpers/DeadLineHelper
include DeadlineHelper
# GETs should be safe (see http://www.w3.org/2001/tag/doc/whenToUseGet.html)
verify method: :post, only: [:destroy, :create, :update],
redirect_to: {action: :list}
# Prepares the form for adding a new topic. Used in conjunction with create
def new
@id = params[:id]
@sign_up_topic = SignUpTopic.new
@sign_up_topic.assignment = Assignment.find(params[:id])
@topic = @sign_up_topic
end
# This method is used to create signup topics
# In this code params[:id] is the assignment id and not topic id. The intuition is
# that assignment id will virtually be the signup sheet id as well as we have assumed
# that every assignment will have only one signup sheet
def create
topic = SignUpTopic.where(topic_name: params[:topic][:topic_name], assignment_id: params[:id]).first
if topic.nil?
setup_new_topic
else
update_existing_topic topic
end
end
# This method is used to delete signup topics
# Renaming delete method to destroy for rails 4 compatible
def destroy
@topic = SignUpTopic.find(params[:id])
if @topic
@topic.destroy
undo_link("The topic: \"#{@topic.topic_name}\" has been successfully deleted. ")
else
flash[:error] = "The topic could not be deleted."
end
# changing the redirection url to topics tab in edit assignment view.
redirect_to edit_assignment_path(params[:assignment_id]) + "#tabs-5"
end
# prepares the page. shows the form which can be used to enter new values for the different properties of an assignment
def edit
@topic = SignUpTopic.find(params[:id])
end
# updates the database tables to reflect the new values for the assignment. Used in conjuntion with edit
def update
@topic = SignUpTopic.find(params[:id])
if @topic
@topic.topic_identifier = params[:topic][:topic_identifier]
update_max_choosers @topic
@topic.category = params[:topic][:category]
@topic.topic_name = params[:topic][:topic_name]
@topic.micropayment = params[:topic][:micropayment]
@topic.description = params[:topic][:description]
@topic.link = params[:topic][:link]
@topic.save
undo_link("The topic: \"#{@topic.topic_name}\" has been successfully updated. ")
else
flash[:error] = "The topic could not be updated."
end
# changing the redirection url to topics tab in edit assignment view.
redirect_to edit_assignment_path(params[:assignment_id]) + "#tabs-5"
end
# This displays a page that lists all the available topics for an assignment.
# Contains links that let an admin or Instructor edit, delete, view enrolled/waitlisted members for each topic
# Also contains links to delete topics and modify the deadlines for individual topics. Staggered means that different topics can have different deadlines.
def add_signup_topics
load_add_signup_topics(params[:id])
SignUpSheet.add_signup_topic(params[:id])
end
def add_signup_topics_staggered
add_signup_topics
end
# retrieves all the data associated with the given assignment. Includes all topics,
def load_add_signup_topics(assignment_id)
@id = assignment_id
@sign_up_topics = SignUpTopic.where('assignment_id = ?', assignment_id)
@slots_filled = SignUpTopic.find_slots_filled(assignment_id)
@slots_waitlisted = SignUpTopic.find_slots_waitlisted(assignment_id)
@assignment = Assignment.find(assignment_id)
# ACS Removed the if condition (and corresponding else) which differentiate assignments as team and individual assignments
# to treat all assignments as team assignments
# Though called participants, @participants are actually records in signed_up_teams table, which
# is a mapping table between teams and topics (waitlisted recored are also counted)
@participants = SignedUpTeam.find_team_participants(assignment_id)
end
def set_values_for_new_topic
@sign_up_topic = SignUpTopic.new
@sign_up_topic.topic_identifier = params[:topic][:topic_identifier]
@sign_up_topic.topic_name = params[:topic][:topic_name]
@sign_up_topic.max_choosers = params[:topic][:max_choosers]
@sign_up_topic.category = params[:topic][:category]
@sign_up_topic.assignment_id = params[:id]
@assignment = Assignment.find(params[:id])
end
# simple function that redirects ti the /add_signup_topics or the /add_signup_topics_staggered page depending on assignment type
# staggered means that different topics can have different deadlines.
def redirect_to_sign_up(assignment_id)
assignment = Assignment.find(assignment_id)
(assignment.staggered_deadline == true) ? (redirect_to action: 'add_signup_topics_staggered', id: assignment_id) : (redirect_to action: 'add_signup_topics', id: assignment_id)
end
# simple function that redirects to assignment->edit->topic panel to display /add_signup_topics or the /add_signup_topics_staggered page
# staggered means that different topics can have different deadlines.
def redirect_to_assignment_edit(assignment_id)
assignment = Assignment.find(assignment_id)
redirect_to controller: 'assignments', action: 'edit', id: assignment_id
end
def list
@participant = AssignmentParticipant.find(params[:id].to_i)
@assignment = @participant.assignment
@slots_filled = SignUpTopic.find_slots_filled(@assignment.id)
@slots_waitlisted = SignUpTopic.find_slots_waitlisted(@assignment.id)
@show_actions = true
@priority = 0
@sign_up_topics = SignUpTopic.where(assignment_id: @assignment.id, private_to: nil)
@max_team_size = @assignment.max_team_size
team_id = @participant.team.try(:id)
if @assignment.is_intelligent
@bids = team_id.nil? ? [] : Bid.where(team_id: team_id).order(:priority)
signed_up_topics = []
@bids.each do |bid|
sign_up_topic = SignUpTopic.find_by(id: bid.topic_id)
signed_up_topics << sign_up_topic if sign_up_topic
end
signed_up_topics &= @sign_up_topics
@sign_up_topics -= signed_up_topics
@bids = signed_up_topics
end
@num_of_topics = @sign_up_topics.size
@signup_topic_deadline = @assignment.due_dates.find_by_deadline_type_id(7)
@drop_topic_deadline = @assignment.due_dates.find_by_deadline_type_id(6)
@student_bids = team_id.nil? ? [] : Bid.where(team_id: team_id)
unless @assignment.due_dates.find_by_deadline_type_id(1).nil?
if [email protected]_deadline? and @assignment.due_dates.find_by_deadline_type_id(1).due_at < Time.now
@show_actions = false
end
# Find whether the user has signed up for any topics; if so the user won't be able to
# sign up again unless the former was a waitlisted topic
# if team assignment, then team id needs to be passed as parameter else the user's id
users_team = SignedUpTeam.find_team_users(@assignment.id, session[:user].id)
@selected_topics = if users_team.empty?
nil
else
# TODO: fix this; cant use 0
SignedUpTeam.find_user_signup_topics(@assignment.id, users_team[0].t_id)
end
end
if @assignment.is_intelligent
render 'sign_up_sheet/intelligent_topic_selection' and return
end
end
def sign_up
@assignment = AssignmentParticipant.find(params[:id]).assignment
@user_id = session[:user].id
# Always use team_id ACS
# s = Signupsheet.new
# Team lazy initialization: check whether the user already has a team for this assignment
unless SignUpSheet.signup_team(@assignment.id, @user_id, params[:topic_id])
flash[:error] = "You've already signed up for a topic!"
end
redirect_to action: 'list', id: params[:id]
end
# routes to new page to specficy student
def signup_as_instructor; end
def signup_as_instructor_action
user = User.find_by(name: params[:username])
if user.nil? # validate invalid user
flash[:error] = "That student does not exist!"
else
if AssignmentParticipant.exists? user_id: user.id, parent_id: params[:assignment_id]
if SignUpSheet.signup_team(params[:assignment_id], user.id, params[:topic_id])
flash[:success] = "You have successfully signed up the student for the topic!"
else
flash[:error] = "The student has already signed up for a topic!"
end
else
flash[:error] = "The student is not registered for the assignment!"
end
end
redirect_to controller: 'assignments', action: 'edit', id: params[:assignment_id]
end
# this function is used to delete a previous signup
def delete_signup
participant = AssignmentParticipant.find(params[:id])
assignment = participant.assignment
drop_topic_deadline = assignment.due_dates.find_by_deadline_type_id(6)
# A student who has already submitted work should not be allowed to drop his/her topic!
# (A student/team has submitted if participant directory_num is non-null or submitted_hyperlinks is non-null.)
# If there is no drop topic deadline, student can drop topic at any time (if all the submissions are deleted)
# If there is a drop topic deadline, student cannot drop topic after this deadline.
if !participant.team.submitted_files.empty? or !participant.team.hyperlinks.empty?
flash[:error] = "You have already submitted your work, so you are not allowed to drop your topic."
elsif !drop_topic_deadline.nil? and Time.now > drop_topic_deadline.due_at
flash[:error] = "You cannot drop your topic after the drop topic deadline!"
else
delete_signup_for_topic(assignment.id, params[:topic_id], session[:user].id)
flash[:success] = "You have successfully dropped your topic!"
end
redirect_to action: 'list', id: params[:id]
end
def delete_signup_as_instructor
# find participant using assignment using team and topic ids
team = Team.find(params[:id])
assignment = Assignment.find(team.parent_id)
user = TeamsUser.find_by(team_id: team.id).user
participant = AssignmentParticipant.find_by(user_id: user.id, parent_id: assignment.id)
drop_topic_deadline = assignment.due_dates.find_by_deadline_type_id(6)
if !participant.team.submitted_files.empty? or !participant.team.hyperlinks.empty?
flash[:error] = "The student has already submitted their work, so you are not allowed to remove them."
elsif !drop_topic_deadline.nil? and Time.now > drop_topic_deadline.due_at
flash[:error] = "You cannot drop a student after the drop topic deadline!"
else
delete_signup_for_topic(assignment.id, params[:topic_id], participant.user_id)
flash[:success] = "You have successfully dropped the student from the topic!"
end
redirect_to controller: 'assignments', action: 'edit', id: assignment.id
end
def set_priority
participant = AssignmentParticipant.find_by(id: params[:participant_id])
assignment_id = SignUpTopic.find(params[:topic].first).assignment.id
team_id = participant.team.try(:id)
unless team_id
# Zhewei: team lazy initialization
SignUpSheet.signup_team(assignment_id, participant.user.id)
team_id = participant.team.try(:id)
end
if params[:topic].nil?
# All topics are deselected by current team
Bid.where(team_id: team_id).destroy_all
else
@bids = Bid.where(team_id: team_id)
signed_up_topics = Bid.where(team_id: team_id).map(&:topic_id)
# Remove topics from bids table if the student moves data from Selection table to Topics table
# This step is necessary to avoid duplicate priorities in Bids table
signed_up_topics -= params[:topic].map(&:to_i)
signed_up_topics.each do |topic|
Bid.where(topic_id: topic, team_id: team_id).destroy_all
end
params[:topic].each_with_index do |topic_id, index|
bid_existence = Bid.where(topic_id: topic_id, team_id: team_id)
if bid_existence.empty?
Bid.create(topic_id: topic_id, team_id: team_id, priority: index + 1)
else
Bid.where(topic_id: topic_id, team_id: team_id).update_all(priority: index + 1)
end
end
end
redirect_to action: 'list', assignment_id: params[:assignment_id]
end
# If the instructor needs to explicitly change the start/due dates of the topics
# This is true in case of a staggered deadline type assignment. Individual deadlines can
# be set on a per topic and per round basis
def save_topic_deadlines
assignment = Assignment.find(params[:assignment_id])
@assignment_submission_due_dates = assignment.due_dates.select {|due_date| due_date.deadline_type_id == 1 }
@assignment_review_due_dates = assignment.due_dates.select {|due_date| due_date.deadline_type_id == 2 }
due_dates = params[:due_date]
topics = SignUpTopic.where(assignment_id: params[:assignment_id])
review_rounds = assignment.num_review_rounds
topics.each_with_index do |topic, index|
for i in 1..review_rounds
@topic_submission_due_date = due_dates[topics[index].id.to_s + '_submission_' + i.to_s + '_due_date']
@topic_review_due_date = due_dates[topics[index].id.to_s + '_review_' + i.to_s + '_due_date']
@assignment_submission_due_date = DateTime.parse(@assignment_submission_due_dates[i - 1].due_at.to_s).strftime("%Y-%m-%d %H:%M")
@assignment_review_due_date = DateTime.parse(@assignment_review_due_dates[i - 1].due_at.to_s).strftime("%Y-%m-%d %H:%M")
%w(submission review).each do |deadline_type|
deadline_type_id = DeadlineType.find_by_name(deadline_type).id
next if instance_variable_get('@topic_' + deadline_type + '_due_date') == instance_variable_get('@assignment_' + deadline_type + '_due_date')
topic_due_date = TopicDueDate.where(parent_id: topic.id, deadline_type_id: deadline_type_id, round: i).first rescue nil
if topic_due_date.nil? # create a new record
TopicDueDate.create(
due_at: instance_variable_get('@topic_' + deadline_type + '_due_date'),
deadline_type_id: deadline_type_id,
parent_id: topic.id,
submission_allowed_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].submission_allowed_id,
review_allowed_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].review_allowed_id,
review_of_review_allowed_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].review_of_review_allowed_id,
round: i,
flag: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].flag,
threshold: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].threshold,
delayed_job_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].delayed_job_id,
deadline_name: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].deadline_name,
description_url: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].description_url,
quiz_allowed_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].quiz_allowed_id,
teammate_review_allowed_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].teammate_review_allowed_id,
type: 'TopicDueDate'
)
else # update an existed record
topic_due_date.update_attributes(
due_at: instance_variable_get('@topic_' + deadline_type + '_due_date'),
submission_allowed_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].submission_allowed_id,
review_allowed_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].review_allowed_id,
review_of_review_allowed_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].review_of_review_allowed_id,
quiz_allowed_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].quiz_allowed_id,
teammate_review_allowed_id: instance_variable_get('@assignment_' + deadline_type + '_due_dates')[i - 1].teammate_review_allowed_id
)
end
end
end
end
redirect_to_assignment_edit(params[:assignment_id])
end
# This method is called when a student click on the trumpet icon. So this is a bad method name. --Yang
def show_team
if !(assignment = Assignment.find(params[:assignment_id])).nil? and !(topic = SignUpTopic.find(params[:id])).nil?
@results = ad_info(assignment.id, topic.id)
@results.each do |result|
result.keys.each do |key|
@current_team_name = result[key] if key.equal? :name
end
end
@results.each do |result|
@team_members = ""
TeamsUser.where(team_id: result[:team_id]).each do |teamuser|
@team_members += User.find(teamuser.user_id).name + " "
end
end
# @team_members = find_team_members(topic)
end
end
def switch_original_topic_to_approved_suggested_topic
assignment = AssignmentParticipant.find(params[:id]).assignment
team_id = TeamsUser.team_id(assignment.id, session[:user].id)
original_topic_id = SignedUpTeam.topic_id(assignment.id.to_i, session[:user].id)
SignUpTopic.find(params[:topic_id]).update_attribute('private_to', nil) if SignUpTopic.exists?(params[:topic_id])
if SignedUpTeam.exists?(team_id: team_id, is_waitlisted: 0)
SignedUpTeam.where(team_id: team_id, is_waitlisted: 0).first.update_attribute('topic_id', params[:topic_id].to_i)
end
# check the waitlist of original topic. Let the first waitlisted team hold the topic, if exists.
waitlisted_teams = SignedUpTeam.where(topic_id: original_topic_id, is_waitlisted: 1)
unless waitlisted_teams.blank?
waitlisted_first_team_first_user_id = TeamsUser.where(team_id: waitlisted_teams.first.team_id).first.user_id
SignUpSheet.signup_team(assignment.id, waitlisted_first_team_first_user_id, original_topic_id)
end
redirect_to action: 'list', id: params[:id]
end
def publish_approved_suggested_topic
SignUpTopic.find(params[:topic_id]).update_attribute('private_to', nil) if SignUpTopic.exists?(params[:topic_id])
redirect_to action: 'list', id: params[:id]
end
private
def setup_new_topic
set_values_for_new_topic
if @assignment.is_microtask?
@sign_up_topic.micropayment = params[:topic][:micropayment]
end
if @assignment.staggered_deadline?
topic_set = []
topic = @sign_up_topic.id
end
if @sign_up_topic.save
undo_link "The topic: \"#{@sign_up_topic.topic_name}\" has been created successfully. "
redirect_to edit_assignment_path(@sign_up_topic.assignment_id) + "#tabs-5"
else
render action: 'new', id: params[:id]
end
end
def update_existing_topic(topic)
topic.topic_identifier = params[:topic][:topic_identifier]
update_max_choosers topic
topic.category = params[:topic][:category]
# topic.assignment_id = params[:id]
topic.save
redirect_to_sign_up params[:id]
end
def update_max_choosers(topic)
# While saving the max choosers you should be careful; if there are users who have signed up for this particular
# topic and are on waitlist, then they have to be converted to confirmed topic based on the availability. But if
# there are choosers already and if there is an attempt to decrease the max choosers, as of now I am not allowing
# it.
if SignedUpTeam.find_by_topic_id(topic.id).nil? || topic.max_choosers == params[:topic][:max_choosers]
topic.max_choosers = params[:topic][:max_choosers]
else
if topic.max_choosers.to_i < params[:topic][:max_choosers].to_i
topic.update_waitlisted_users params[:topic][:max_choosers]
topic.max_choosers = params[:topic][:max_choosers]
else
flash[:error] = 'The value of the maximum number of choosers can only be increased! No change has been made to maximum choosers.'
end
end
end
# get info related to the ad for partners so that it can be displayed when an assignment_participant
# clicks to see ads related to a topic
def ad_info(_assignment_id, topic_id)
# List that contains individual result object
@result_list = []
# Get the results
@results = SignedUpTeam.where("topic_id = ?", topic_id.to_s)
# Iterate through the results of the query and get the required attributes
@results.each do |result|
team = result.team
topic = result.topic
resultMap = {}
resultMap[:team_id] = team.id
resultMap[:comments_for_advertisement] = team.comments_for_advertisement
resultMap[:name] = team.name
resultMap[:assignment_id] = topic.assignment_id
resultMap[:advertise_for_partner] = team.advertise_for_partner
# Append to the list
@result_list.append(resultMap)
end
@result_list
end
def delete_signup_for_topic(assignment_id, topic_id, user_id)
SignUpTopic.reassign_topic(user_id, assignment_id, topic_id)
end
end | 49.063425 | 179 | 0.709657 |
1181c169f574de5169b9d062a4c29aad9432e5b4 | 1,394 | # frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190124200344_migrate_storage_migrator_sidekiq_queue.rb')
RSpec.describe MigrateStorageMigratorSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
context 'when there are jobs in the queues' do
it 'correctly migrates queue when migrating up' do
Sidekiq::Testing.disable! do
stub_worker(queue: :storage_migrator).perform_async(1, 5)
described_class.new.up
expect(sidekiq_queue_length('storage_migrator')).to eq 0
expect(sidekiq_queue_length('hashed_storage:hashed_storage_migrator')).to eq 1
end
end
it 'correctly migrates queue when migrating down' do
Sidekiq::Testing.disable! do
stub_worker(queue: :'hashed_storage:hashed_storage_migrator').perform_async(1, 5)
described_class.new.down
expect(sidekiq_queue_length('storage_migrator')).to eq 1
expect(sidekiq_queue_length('hashed_storage:hashed_storage_migrator')).to eq 0
end
end
end
context 'when there are no jobs in the queues' do
it 'does not raise error when migrating up' do
expect { described_class.new.up }.not_to raise_error
end
it 'does not raise error when migrating down' do
expect { described_class.new.down }.not_to raise_error
end
end
end
| 31.681818 | 105 | 0.731707 |
872856d0c88f693cf68b2370d9470e935b81a8e3 | 493 | module WeightedAverage
module ActiveRecordBaseClassMethods
# @see WeightedAverage::ActiveRecordRelationInstanceMethods#weighted_average
#
# @return [Float,nil]
def weighted_average(*args)
scoped.weighted_average(*args)
end
# @see WeightedAverage::ActiveRecordRelationInstanceMethods#weighted_average_relation
#
# @return [Arel::SelectManager]
def weighted_average_relation(*args)
scoped.weighted_average_relation(*args)
end
end
end
| 27.388889 | 89 | 0.744422 |
e20bb8761eb4afa11a570ca8ec730ff8c3d3e36d | 84 | class FileAtt < ActiveRecord::Base
mount_uploader :file_info, FileUploader
end
| 16.8 | 43 | 0.785714 |
871d9ddc65f1e83b2d2a440d7ac0757776fbf3cc | 142 | class NotificationTemplate < ApplicationRecord
with_options(presence: true, uniqueness: true) do
validates :key, :template_id
end
end
| 23.666667 | 51 | 0.78169 |
4a484340226b2e648b036f25297372170017c6d4 | 2,013 | # frozen_string_literal: true
require 'krane/kubernetes_resource/pod'
module Krane
class PodSetBase < KubernetesResource
def failure_message
pods.map(&:failure_message).compact.uniq.join("\n")
end
def timeout_message
pods.map(&:timeout_message).compact.uniq.join("\n")
end
def fetch_events(kubectl)
own_events = super
return own_events unless pods.present?
most_useful_pod = pods.find(&:deploy_failed?) || pods.find(&:deploy_timed_out?) || pods.first
own_events.merge(most_useful_pod.fetch_events(kubectl))
end
def fetch_debug_logs
logs = Krane::RemoteLogs.new(
logger: @logger,
parent_id: id,
container_names: container_names,
namespace: @namespace,
context: @context
)
logs.sync
logs
end
def print_debug_logs?
pods.present? # the kubectl command times out if no pods exist
end
private
def pods
raise NotImplementedError, "Subclasses must define a `pods` accessor"
end
def parent_of_pod?(_)
raise NotImplementedError, "Subclasses must define a `parent_of_pod?` method"
end
def container_names
regular_containers = @definition["spec"]["template"]["spec"]["containers"].map { |c| c["name"] }
init_containers = @definition["spec"]["template"]["spec"].fetch("initContainers", {}).map { |c| c["name"] }
regular_containers + init_containers
end
def find_pods(cache)
all_pods = cache.get_all(Pod.kind, @instance_data["spec"]["selector"]["matchLabels"])
all_pods.each_with_object([]) do |pod_data, relevant_pods|
next unless parent_of_pod?(pod_data)
pod = Pod.new(
namespace: namespace,
context: context,
definition: pod_data,
logger: @logger,
parent: "#{name.capitalize} #{type}",
deploy_started_at: @deploy_started_at
)
pod.sync(cache)
relevant_pods << pod
end
end
end
end
| 27.958333 | 113 | 0.643318 |
18273573e0406bff7d91f5ff9f145db56fa86e00 | 8,359 | require 'active_support/inflector/methods'
require 'active_support/inflector/transliterate'
# String inflections define new methods on the String class to transform names for different purposes.
# For instance, you can figure out the name of a table from the name of a class.
#
# 'ScaleScore'.tableize # => "scale_scores"
#
class String
# Returns the plural form of the word in the string.
#
# If the optional parameter +count+ is specified,
# the singular form will be returned if <tt>count == 1</tt>.
# For any other value of +count+ the plural will be returned.
#
# If the optional parameter +locale+ is specified,
# the word will be pluralized as a word of that language.
# By default, this parameter is set to <tt>:en</tt>.
# You must define your own inflection rules for languages other than English.
#
# 'post'.pluralize # => "posts"
# 'octopus'.pluralize # => "octopi"
# 'sheep'.pluralize # => "sheep"
# 'words'.pluralize # => "words"
# 'the blue mailman'.pluralize # => "the blue mailmen"
# 'CamelOctopus'.pluralize # => "CamelOctopi"
# 'apple'.pluralize(1) # => "apple"
# 'apple'.pluralize(2) # => "apples"
# 'ley'.pluralize(:es) # => "leyes"
# 'ley'.pluralize(1, :es) # => "ley"
def pluralize(count = nil, locale = :en)
locale = count if count.is_a?(Symbol)
if count == 1
self
else
ActiveSupport::Inflector.pluralize(self, locale)
end
end
# The reverse of +pluralize+, returns the singular form of a word in a string.
#
# If the optional parameter +locale+ is specified,
# the word will be singularized as a word of that language.
# By default, this parameter is set to <tt>:en</tt>.
# You must define your own inflection rules for languages other than English.
#
# 'posts'.singularize # => "post"
# 'octopi'.singularize # => "octopus"
# 'sheep'.singularize # => "sheep"
# 'word'.singularize # => "word"
# 'the blue mailmen'.singularize # => "the blue mailman"
# 'CamelOctopi'.singularize # => "CamelOctopus"
# 'leyes'.singularize(:es) # => "ley"
def singularize(locale = :en)
ActiveSupport::Inflector.singularize(self, locale)
end
# +constantize+ tries to find a declared constant with the name specified
# in the string. It raises a NameError when the name is not in CamelCase
# or is not initialized. See ActiveSupport::Inflector.constantize
#
# 'Module'.constantize # => Module
# 'Class'.constantize # => Class
# 'blargle'.constantize # => NameError: wrong constant name blargle
def constantize
ActiveSupport::Inflector.constantize(self)
end
# +safe_constantize+ tries to find a declared constant with the name specified
# in the string. It returns nil when the name is not in CamelCase
# or is not initialized. See ActiveSupport::Inflector.safe_constantize
#
# 'Module'.safe_constantize # => Module
# 'Class'.safe_constantize # => Class
# 'blargle'.safe_constantize # => nil
def safe_constantize
ActiveSupport::Inflector.safe_constantize(self)
end
# By default, +camelize+ converts strings to UpperCamelCase. If the argument to camelize
# is set to <tt>:lower</tt> then camelize produces lowerCamelCase.
#
# +camelize+ will also convert '/' to '::' which is useful for converting paths to namespaces.
#
# 'active_record'.camelize # => "ActiveRecord"
# 'active_record'.camelize(:lower) # => "activeRecord"
# 'active_record/errors'.camelize # => "ActiveRecord::Errors"
# 'active_record/errors'.camelize(:lower) # => "activeRecord::Errors"
def camelize(first_letter = :upper)
case first_letter
when :upper
ActiveSupport::Inflector.camelize(self, true)
when :lower
ActiveSupport::Inflector.camelize(self, false)
end
end
alias_method :camelcase, :camelize
# Capitalizes all the words and replaces some characters in the string to create
# a nicer looking title. +titleize+ is meant for creating pretty output. It is not
# used in the Rails internals.
#
# +titleize+ is also aliased as +titlecase+.
#
# 'man from the boondocks'.titleize # => "Man From The Boondocks"
# 'x-men: the last stand'.titleize # => "X Men: The Last Stand"
def titleize
ActiveSupport::Inflector.titleize(self)
end
alias_method :titlecase, :titleize
# The reverse of +camelize+. Makes an underscored, lowercase form from the expression in the string.
#
# +underscore+ will also change '::' to '/' to convert namespaces to paths.
#
# 'ActiveModel'.underscore # => "active_model"
# 'ActiveModel::Errors'.underscore # => "active_model/errors"
def underscore
ActiveSupport::Inflector.underscore(self)
end
# Replaces underscores with dashes in the string.
#
# 'puni_puni'.dasherize # => "puni-puni"
def dasherize
ActiveSupport::Inflector.dasherize(self)
end
# Removes the module part from the constant expression in the string.
#
# 'ActiveRecord::CoreExtensions::String::Inflections'.demodulize # => "Inflections"
# 'Inflections'.demodulize # => "Inflections"
# '::Inflections'.demodulize # => "Inflections"
# ''.demodulize # => ''
#
# See also +deconstantize+.
def demodulize
ActiveSupport::Inflector.demodulize(self)
end
# Removes the rightmost segment from the constant expression in the string.
#
# 'Net::HTTP'.deconstantize # => "Net"
# '::Net::HTTP'.deconstantize # => "::Net"
# 'String'.deconstantize # => ""
# '::String'.deconstantize # => ""
# ''.deconstantize # => ""
#
# See also +demodulize+.
def deconstantize
ActiveSupport::Inflector.deconstantize(self)
end
# Replaces special characters in a string so that it may be used as part of a 'pretty' URL.
#
# class Person
# def to_param
# "#{id}-#{name.parameterize}"
# end
# end
#
# @person = Person.find(1)
# # => #<Person id: 1, name: "Donald E. Knuth">
#
# <%= link_to(@person.name, person_path) %>
# # => <a href="/person/1-donald-e-knuth">Donald E. Knuth</a>
def parameterize(sep = '-')
ActiveSupport::Inflector.parameterize(self, sep)
end
# Creates the name of a table like Rails does for models to table names. This method
# uses the +pluralize+ method on the last word in the string.
#
# 'RawScaledScorer'.tableize # => "raw_scaled_scorers"
# 'egg_and_ham'.tableize # => "egg_and_hams"
# 'fancyCategory'.tableize # => "fancy_categories"
def tableize
ActiveSupport::Inflector.tableize(self)
end
# Create a class name from a plural table name like Rails does for table names to models.
# Note that this returns a string and not a class. (To convert to an actual class
# follow +classify+ with +constantize+.)
#
# 'egg_and_hams'.classify # => "EggAndHam"
# 'posts'.classify # => "Post"
def classify
ActiveSupport::Inflector.classify(self)
end
# Capitalizes the first word, turns underscores into spaces, and strips a
# trailing '_id' if present.
# Like +titleize+, this is meant for creating pretty output.
#
# The capitalization of the first word can be turned off by setting the
# optional parameter +capitalize+ to false.
# By default, this parameter is true.
#
# 'employee_salary'.humanize # => "Employee salary"
# 'author_id'.humanize # => "Author"
# 'author_id'.humanize(capitalize: false) # => "author"
def humanize(options = {})
ActiveSupport::Inflector.humanize(self, options)
end
# Creates a foreign key name from a class name.
# +separate_class_name_and_id_with_underscore+ sets whether
# the method should put '_' between the name and 'id'.
#
# 'Message'.foreign_key # => "message_id"
# 'Message'.foreign_key(false) # => "messageid"
# 'Admin::Post'.foreign_key # => "post_id"
def foreign_key(separate_class_name_and_id_with_underscore = true)
ActiveSupport::Inflector.foreign_key(self, separate_class_name_and_id_with_underscore)
end
end
| 38.520737 | 102 | 0.646369 |
5de963058361f6a2ef143ef6da112b79ada85fed | 10,404 | # frozen_string_literal: true
RSpec.describe Twib::RssBuilder do
describe '#podcast_root' do
let(:result) do
described_class.new do
podcast_root do |rss|
rss.testFoo 'bar'
end
end
end
describe 'the rss element' do
let(:rss_node) { result.doc.xpath('rss').first }
it 'is the root of the document' do
expect(rss_node).to eq(result.doc.root)
end
it 'has the correct XML version' do
expect(rss_node.attributes['version'].value).to eq('2.0')
end
end
describe 'the channel element' do
let(:channel_element) { result.doc.xpath('rss/channel').first }
it 'is nested under the rss element' do
expect(channel_element).to eq(result.doc.root.children.first)
end
it 'has the custom element passed into the block' do
expect_child_element(channel_element, name: 'testFoo')
end
it 'has a title element' do
expect_child_element(channel_element, name: 'title')
end
it 'has a description element' do
expect_child_element(channel_element, name: 'description')
end
describe 'description element' do
let(:description_element) do
channel_element.xpath('description').first
end
it 'has CDATA' do
expect_child_element(
description_element,
name: '#cdata-section'
)
end
end
it 'has an itunes summary element' do
expect_child_element(
channel_element, name: 'summary', namespace: 'itunes'
)
end
it 'has a managingEditor element' do
expect_child_element(channel_element, name: 'managingEditor')
end
it 'has a copyright element' do
expect_child_element(channel_element, name: 'copyright')
end
it 'has a link element' do
expect_child_element(
channel_element,
name: 'link',
text: match(URI::DEFAULT_PARSER.make_regexp)
)
end
it 'has an itunes owner element' do
expect_child_element(
channel_element, name: 'owner', namespace: 'itunes'
)
end
describe 'owner element' do
let(:owner_element) do
channel_element.xpath('itunes:owner').first
end
it 'has an email child element' do
expect_child_element(
owner_element, name: 'email', namespace: 'itunes'
)
end
it 'has a name child element' do
expect_child_element(
owner_element, name: 'name', namespace: 'itunes'
)
end
end
it 'has an itunes author element' do
expect_child_element(
channel_element, name: 'author', namespace: 'itunes'
)
end
it 'has a language element' do
expect_child_element(channel_element, name: 'language')
end
it 'has two itunes category elements' do
expect_child_element(
channel_element,
name: 'category',
namespace: 'itunes',
text: '',
count: 2
)
end
it 'has an itunes image element' do
expect_child_element(
channel_element,
name: 'image',
namespace: 'itunes',
text: ''
)
end
describe 'itunes image element' do
let(:image_element) do
channel_element.xpath('itunes:image').first
end
it 'has an href attribute with a url' do
href = image_element.attributes['href'].value
expect(href).to match(URI::DEFAULT_PARSER.make_regexp)
end
end
it 'has an image element' do
expect_child_element(channel_element, name: 'image')
end
describe 'image element' do
let(:image_element) do
channel_element.xpath('image').first
end
it 'has a url child element' do
expect_child_element(
image_element,
name: 'url',
text: match(URI::DEFAULT_PARSER.make_regexp)
)
end
it 'has a link child element' do
expect_child_element(
image_element,
name: 'link',
text: match(URI::DEFAULT_PARSER.make_regexp)
)
end
it 'has a title child element' do
expect_child_element(image_element, name: 'title')
end
end
it 'has an itunes explicit element' do
expect_child_element(
channel_element,
name: 'explicit',
namespace: 'itunes',
text: 'no'
)
end
it 'has an atom link element' do
expect_child_element(
channel_element,
name: 'link',
namespace: 'atom',
text: ''
)
end
describe 'atom link element' do
let(:link_element) do
channel_element.xpath('atom:link').first
end
it 'has an href attribute with a url' do
href = link_element.attributes['href'].value
expect(href).to match(URI::DEFAULT_PARSER.make_regexp)
end
it 'has an rel attribute with self as value' do
rel = link_element.attributes['rel'].value
expect(rel).to eq('self')
end
it 'has an type attribute with rss+xml as value' do
type = link_element.attributes['type'].value
expect(type).to eq('application/rss+xml')
end
end
end
end
describe '#episode_item' do
let(:episode_data) do
{
number: 42,
title: 'title',
enclosure: {
url: 'audio url',
length: 100,
type: 'audio/mpeg'
},
pub_date: 'pub date',
duration: 100,
subtitle: 'sub title',
description: 'description',
language: 'en-us',
image_url: 'image url',
guid: 'guid',
author: 'author'
}
end
let(:result) do
described_class.new do
podcast_root do |rss|
rss.episode_item(episode_data)
end
end
end
let(:episode_element) do
result.doc.xpath('rss/channel/item').first
end
it 'produces an item element' do
expect(episode_element.name).to eq('item')
end
describe 'item element' do
it 'has a title element' do
expect_child_element(
episode_element,
name: 'title',
text: episode_data[:title]
)
end
it 'has a description element' do
expect_child_element(
episode_element,
name: 'description',
text: episode_data[:description]
)
end
describe 'description element' do
let(:description_element) do
episode_element.xpath('description').first
end
it 'has CDATA' do
expect_child_element(
description_element,
name: '#cdata-section',
text: episode_data[:description]
)
end
end
it 'has a content encoded element' do
expect_child_element(
episode_element,
name: 'encoded',
namespace: 'content'
)
end
describe 'content encoded element' do
let(:encoded_element) do
episode_element.xpath('content:encoded').first
end
it 'has CDATA' do
expect_child_element(
encoded_element,
name: '#cdata-section',
text: episode_data[:description]
)
end
end
it 'has an itunes episode element' do
expect_child_element(
episode_element,
name: 'episode',
text: episode_data[:number].to_s,
namespace: 'itunes'
)
end
it 'has a pubDate element' do
expect_child_element(
episode_element,
name: 'pubDate',
text: episode_data[:pub_date]
)
end
it 'has an itunes duration element' do
expect_child_element(
episode_element,
name: 'duration',
text: episode_data[:duration].to_s,
namespace: 'itunes'
)
end
it 'has an itunes author element' do
expect_child_element(
episode_element,
name: 'author',
text: episode_data[:author],
namespace: 'itunes'
)
end
it 'has an itunes image element' do
expect_child_element(
episode_element,
name: 'image',
text: '',
namespace: 'itunes'
)
end
describe 'itunes image element' do
let(:image_element) do
episode_element.xpath('itunes:image').first
end
it 'has an href attribute' do
href = image_element.attributes['href'].value
expect(href).to eq(episode_data[:image_url])
end
end
it 'has an enclosure element' do
expect_child_element(
episode_element,
name: 'enclosure',
text: ''
)
end
describe 'enclosure element' do
let(:enclosure_element) do
episode_element.xpath('enclosure').first
end
let(:enclosure_data) { episode_data[:enclosure] }
it 'has a url attribute' do
url = enclosure_element.attributes['url'].value
expect(url).to eq(enclosure_data[:url])
end
it 'has a length attribute' do
length = enclosure_element.attributes['length'].value
expect(length).to eq(enclosure_data[:length].to_s)
end
it 'has a type attribute' do
type = enclosure_element.attributes['type'].value
expect(type).to eq(enclosure_data[:type])
end
end
it 'has a guid element' do
expect_child_element(
episode_element,
name: 'guid',
text: episode_data[:guid]
)
end
end
end
def expect_child_element(element, **opts)
text = opts.key?(:text) ? opts[:text] : be_present
namespace = opts[:namespace] &&
having_attributes(prefix: opts[:namespace])
count = opts[:count] || 1
expect(element.children.to_a).to include(
an_object_having_attributes(
text:,
name: opts[:name],
namespace:
)
).exactly(count).times
end
end
| 25.06988 | 69 | 0.56113 |
b91b211f70b52bc0a017f432a24db0a28db886ff | 224 | #
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
require 'thrift'
require 'thrift_test_types'
module Thrift
module Test
MyNumberz = 1
end
end
| 14 | 66 | 0.705357 |
38746d98823674f535405bfe184f3bbe4db6d653 | 110 | class DropTableDelayedJobs < ActiveRecord::Migration[5.2]
def change
drop_table :delayed_jobs
end
end
| 18.333333 | 57 | 0.772727 |
ff793895f9c7e84a5b7dcfb793b947d51bbf2dab | 7,814 | class Rubygem < ActiveRecord::Base
include Patterns
include RubygemSearchable
has_many :owners, through: :ownerships, source: :user
has_many :ownerships, dependent: :destroy
has_many :subscribers, through: :subscriptions, source: :user
has_many :subscriptions, dependent: :destroy
has_many :versions, dependent: :destroy, validate: false
has_many :web_hooks, dependent: :destroy
has_one :linkset, dependent: :destroy
validate :ensure_name_format, if: :needs_name_validation?
validates :name, presence: true, uniqueness: true
after_create :update_unresolved
before_destroy :mark_unresolved
def self.with_versions
where("rubygems.id IN (SELECT rubygem_id FROM versions where versions.indexed IS true)")
end
def self.with_one_version
select('rubygems.*')
.joins(:versions)
.group(column_names.map { |name| "rubygems.#{name}" }.join(', '))
.having('COUNT(versions.id) = 1')
end
def self.name_is(name)
sensitive = where(name: name.strip).limit(1)
return sensitive unless sensitive.empty?
where("UPPER(name) = UPPER(?)", name.strip).limit(1)
end
def self.name_starts_with(letter)
where("UPPER(name) LIKE UPPER(?)", "#{letter}%")
end
def self.reverse_dependencies(name)
where(id: Version.reverse_dependencies(name).select(:rubygem_id))
end
def self.reverse_development_dependencies(name)
where(id: Version.reverse_development_dependencies(name).select(:rubygem_id))
end
def self.reverse_runtime_dependencies(name)
where(id: Version.reverse_runtime_dependencies(name).select(:rubygem_id))
end
def self.total_count
with_versions.count
end
def self.latest(limit = 5)
with_one_version.order(created_at: :desc).limit(limit)
end
def self.downloaded(limit = 5)
with_versions.by_downloads.limit(limit)
end
def self.letter(letter)
name_starts_with(letter).by_name.with_versions
end
def self.letterize(letter)
letter =~ /\A[A-Za-z]\z/ ? letter.upcase : 'A'
end
def self.monthly_dates
(2..31).map { |n| n.days.ago.to_date }.reverse
end
def self.monthly_short_dates
monthly_dates.map { |date| date.strftime("%m/%d") }
end
def self.versions_key(name)
"r:#{name}"
end
def self.by_name
order(name: :asc)
end
def self.by_downloads
order(downloads: :desc)
end
def self.current_rubygems_release
rubygem = find_by(name: "rubygems-update")
rubygem && rubygem.versions.release.indexed.latest.first
end
def all_errors(version = nil)
[self, linkset, version].compact.map do |ar|
ar.errors.full_messages
end.flatten.join(", ")
end
def public_versions(limit = nil)
versions.by_position.published(limit)
end
def public_versions_with_extra_version(extra_version)
versions = public_versions(5)
versions << extra_version
versions.uniq.sort_by(&:position)
end
def hosted?
versions.count.nonzero?
end
def unowned?
ownerships.blank?
end
def indexed_versions?
versions.indexed.count > 0
end
def owned_by?(user)
return false unless user
ownerships.exists?(user_id: user.id)
end
def to_s
versions.most_recent.try(:to_title) || name
end
def downloads
Download.for(self)
end
def downloads_today
versions.to_a.sum { |v| Download.today(v) }
end
def payload(version = versions.most_recent, protocol = Gemcutter::PROTOCOL, host_with_port = Gemcutter::HOST)
deps = version.dependencies.to_a
{
'name' => name,
'downloads' => downloads,
'version' => version.number,
'version_downloads' => version.downloads_count,
'platform' => version.platform,
'authors' => version.authors,
'info' => version.info,
'licenses' => version.licenses,
'metadata' => version.metadata,
'sha' => version.sha256_hex,
'project_uri' => "#{protocol}://#{host_with_port}/gems/#{name}",
'gem_uri' => "#{protocol}://#{host_with_port}/gems/#{version.full_name}.gem",
'homepage_uri' => linkset.try(:home),
'wiki_uri' => linkset.try(:wiki),
'documentation_uri' => linkset.try(:docs).presence || version.documentation_path,
'mailing_list_uri' => linkset.try(:mail),
'source_code_uri' => linkset.try(:code),
'bug_tracker_uri' => linkset.try(:bugs),
'dependencies' => {
'development' => deps.select { |r| r.rubygem && 'development' == r.scope },
'runtime' => deps.select { |r| r.rubygem && 'runtime' == r.scope }
}
}
end
def as_json(*)
payload
end
def to_xml(options = {})
payload.to_xml(options.merge(root: 'rubygem'))
end
def to_param
name.remove(/[^#{Patterns::ALLOWED_CHARACTERS}]/)
end
def with_downloads
"#{name} (#{downloads})"
end
def pushable?
new_record? || versions.indexed.count.zero?
end
def create_ownership(user)
ownerships.create(user: user) if unowned?
end
def update_versions!(version, spec)
version.update_attributes_from_gem_specification!(spec)
end
def update_dependencies!(version, spec)
spec.dependencies.each do |dependency|
version.dependencies.create!(gem_dependency: dependency)
end
rescue ActiveRecord::RecordInvalid => ex
# ActiveRecord can't chain a nested error here, so we have to add and reraise
errors[:base] << ex.message
raise ex
end
def update_linkset!(spec)
self.linkset ||= Linkset.new
self.linkset.update_attributes_from_gem_specification!(spec)
self.linkset.save!
end
def update_attributes_from_gem_specification!(version, spec)
Rubygem.transaction do
save!
update_versions! version, spec
update_dependencies! version, spec
update_linkset! spec
end
end
delegate :count, to: :versions, prefix: true
def yanked_versions?
versions.yanked.exists?
end
def reorder_versions
numbers = reload.versions.sort.reverse.map(&:number).uniq
versions.each do |version|
Version.find(version.id).update_column(:position, numbers.index(version.number))
end
versions.update_all(latest: false)
versions_of_platforms = versions
.release
.indexed
.group_by(&:platform)
versions_of_platforms.each_value do |platforms|
Version.find(platforms.sort.last.id).update_column(:latest, true)
end
end
def disown
ownerships.each(&:delete)
ownerships.clear
end
def find_version_from_spec(spec)
versions.find_by_number_and_platform(spec.version.to_s, spec.original_platform.to_s)
end
def find_or_initialize_version_from_spec(spec)
version = versions.find_or_initialize_by(number: spec.version.to_s,
platform: spec.original_platform.to_s)
version.rubygem = self
version
end
def monthly_downloads
key_dates = self.class.monthly_dates.map(&:to_s)
Redis.current.hmget(Download.history_key(self), *key_dates).map(&:to_i)
end
def first_built_date
versions.by_earliest_built_at.limit(1).last.built_at
end
private
def ensure_name_format
if name.class != String
errors.add :name, "must be a String"
elsif name !~ /[a-zA-Z]+/
errors.add :name, "must include at least one letter"
elsif name !~ NAME_PATTERN
errors.add :name, "can only include letters, numbers, dashes, and underscores"
end
end
def needs_name_validation?
new_record? || name_changed?
end
def update_unresolved
Dependency.where(unresolved_name: name).find_each do |dependency|
dependency.update_resolved(self)
end
true
end
def mark_unresolved
Dependency.mark_unresolved_for(self)
true
end
end
| 25.788779 | 111 | 0.677758 |
ab7c25ee775569e8ff5a427c22cc1ae6de9e967d | 773 | cask 'omnigraffle' do
if MacOS.version <= :sierra
version '7.8.2'
sha256 'ab463ea6c12d49c4104d3814ac3280d0359072702d4751f5074f644fc79de0c6'
url "https://downloads.omnigroup.com/software/Archive/MacOSX/10.12/OmniGraffle-#{version}.dmg"
else
version '7.10.1'
sha256 '70a960eee199e089a34e26a94e2d48bfcda52231f9826e20fc85363fbaa5757c'
url "https://downloads.omnigroup.com/software/MacOSX/10.13/OmniGraffle-#{version}.dmg"
end
appcast "https://update.omnigroup.com/appcast/com.omnigroup.OmniGraffle#{version.major}"
name 'OmniGraffle'
homepage 'https://www.omnigroup.com/omnigraffle/'
auto_updates true
depends_on macos: '>= :sierra'
app 'OmniGraffle.app'
zap trash: '~/Library/Application Support/The Omni Group/OmniGraffle'
end
| 33.608696 | 98 | 0.759379 |
e20398c53300e213bd05fec517bc275fb5f93aed | 2,015 | class Gnupg < Formula
desc "GNU Pretty Good Privacy (PGP) package"
homepage "https://gnupg.org/"
url "https://gnupg.org/ftp/gcrypt/gnupg/gnupg-2.2.21.tar.bz2"
sha256 "61e83278fb5fa7336658a8b73ab26f379d41275bb1c7c6e694dd9f9a6e8e76ec"
license "GPL-3.0"
bottle do
sha256 "a42991eca1ff5cc8bd25bdd700c8104c50f09a207b1599c8552662b67a95e36a" => :catalina
sha256 "a6f32a714e777b8949f5197e2f70744352c1f3f6785e5813c390af1f45cb5e1c" => :mojave
sha256 "191e614ac845a540c5973f522e2edd7d5d48e2e3f3399cafaf95ef2754a987d3" => :high_sierra
end
depends_on "pkg-config" => :build
depends_on "adns"
depends_on "gettext"
depends_on "gnutls"
depends_on "libassuan"
depends_on "libgcrypt"
depends_on "libgpg-error"
depends_on "libksba"
depends_on "libusb"
depends_on "npth"
depends_on "pinentry"
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--sbindir=#{bin}",
"--sysconfdir=#{etc}",
"--enable-all-tests",
"--enable-symcryptrun",
"--with-pinentry-pgm=#{Formula["pinentry"].opt_bin}/pinentry"
system "make"
system "make", "check"
system "make", "install"
end
def post_install
(var/"run").mkpath
quiet_system "killall", "gpg-agent"
end
test do
(testpath/"batch.gpg").write <<~EOS
Key-Type: RSA
Key-Length: 2048
Subkey-Type: RSA
Subkey-Length: 2048
Name-Real: Testing
Name-Email: [email protected]
Expire-Date: 1d
%no-protection
%commit
EOS
begin
system bin/"gpg", "--batch", "--gen-key", "batch.gpg"
(testpath/"test.txt").write "Hello World!"
system bin/"gpg", "--detach-sign", "test.txt"
system bin/"gpg", "--verify", "test.txt.sig"
ensure
system bin/"gpgconf", "--kill", "gpg-agent"
end
end
end
| 30.074627 | 93 | 0.614888 |
f7de3dd7f80c3f59f0f1ea0bd0d8103b42761287 | 2,637 | require_relative "test_helper"
require "./app"
class TripValidatorTest < Minitest::Test
def setup
stub_request(:get, %r{https://api\.mapbox\.com/directions/v5/mapbox/driving/.*})
.to_return(status: 200,
body: File.read("#{__dir__}/doubles/responses/mapbox_example.json"))
end
def assert_raise_validation_error(trip)
assert_raises Hitchspots::ValidationError do
Hitchspots::Trip::Validator.new(trip).validate!
end
end
def test_too_many_places
max = Hitchspots::Trip::Validator::MAX_NUMBER_OF_PLACES
too_many_places = Array.new((max + 1)) do
Hitchspots::Place.new("Berlin", lat: "1.23", lon: "2.34")
end
invalid_trip = Hitchspots::Trip.new(*too_many_places)
error = assert_raise_validation_error(invalid_trip)
assert_match(/Too many destinations, maximum is #{max}/, error.message)
end
def test_not_enough_places
min = Hitchspots::Trip::Validator::MIN_NUMBER_OF_PLACES
invalid_trip = Hitchspots::Trip.new(
Hitchspots::Place.new("Berlin", lat: "1.23", lon: "2.34")
)
error = assert_raise_validation_error(invalid_trip)
assert_match(/At least #{min} destinations needed/, error.message)
end
def test_no_coordinates
stub_request(:get, %r{https://api\.mapbox\.com/directions/v5/mapbox/driving\.*})
.to_return(status: 200,
body: File.read("#{__dir__}/doubles/responses/mapbox_no_route_example.json"))
invalid_trip = Hitchspots::Trip.new(
Hitchspots::Place.new("List tiny village", lat: "1.23", lon: "2.34"),
Hitchspots::Place.new("Another lost place", lat: "4.56", lon: "5.67")
)
error = assert_raise_validation_error(invalid_trip)
assert_match(/No route found/, error.message)
end
def test_place_not_found
place = "Invalid Place"
stub_request(:get, "https://nominatim.openstreetmap.org/search?format=json&limit=1&q=#{place}")
.to_return(status: 200,
body: File.read("#{__dir__}/doubles/responses/osm_no_place_example.json"))
invalid_trip = Hitchspots::Trip.new(
Hitchspots::Place.new(place),
Hitchspots::Place.new("Valid Place", lat: "1.23", lon: "2.34")
)
error = assert_raise_validation_error(invalid_trip)
assert_match(/#{place} not found/, error.message)
end
def test_no_spots
invalid_trip = Hitchspots::Trip.new(
Hitchspots::Place.new("Paris", lat: "1.23", lon: "2.34"),
Hitchspots::Place.new("Berlin", lat: "4.56", lon: "5.67")
)
error = assert_raise_validation_error(invalid_trip)
assert_match(/No spots on this route/, error.message)
end
end
| 32.555556 | 99 | 0.681077 |
089ad0f27030551e9ecc037e66bf502ea63769e6 | 2,779 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/devtools/clouddebugger/v2/debugger.proto for package 'google.devtools.clouddebugger.v2'
# Original file comments:
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/devtools/clouddebugger/v2/debugger_pb'
module Google
module Devtools
module Clouddebugger
module V2
module Debugger2
# The Debugger service provides the API that allows users to collect run-time
# information from a running application, without stopping or slowing it down
# and without modifying its state. An application may include one or
# more replicated processes performing the same work.
#
# A debugged application is represented using the Debuggee concept. The
# Debugger service provides a way to query for available debuggees, but does
# not provide a way to create one. A debuggee is created using the Controller
# service, usually by running a debugger agent with the application.
#
# The Debugger service enables the client to set one or more Breakpoints on a
# Debuggee and collect the results of the set Breakpoints.
class Service
include GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.devtools.clouddebugger.v2.Debugger2'
# Sets the breakpoint to the debuggee.
rpc :SetBreakpoint, SetBreakpointRequest, SetBreakpointResponse
# Gets breakpoint information.
rpc :GetBreakpoint, GetBreakpointRequest, GetBreakpointResponse
# Deletes the breakpoint from the debuggee.
rpc :DeleteBreakpoint, DeleteBreakpointRequest, Google::Protobuf::Empty
# Lists all breakpoints for the debuggee.
rpc :ListBreakpoints, ListBreakpointsRequest, ListBreakpointsResponse
# Lists all the debuggees that the user has access to.
rpc :ListDebuggees, ListDebuggeesRequest, ListDebuggeesResponse
end
Stub = Service.rpc_stub_class
end
end
end
end
end
| 42.106061 | 104 | 0.699532 |
216cef585664692933ea634eeadd8f905b0da4d3 | 36,680 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module AndroidpublisherV3
class Apk
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApkBinary
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApksAddExternallyHostedRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApksAddExternallyHostedResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApksListResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AppDetails
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AppEdit
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Bundle
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BundlesListResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Comment
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CountryTargeting
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DeobfuscationFile
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DeobfuscationFilesUploadResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DeveloperComment
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DeviceMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ExpansionFile
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ExpansionFilesUploadResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ExternallyHostedApk
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ExternallyHostedApkUsesPermission
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Image
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ImagesDeleteAllResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ImagesListResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ImagesUploadResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class InAppProduct
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class InAppProductListing
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class InappproductsListResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class InternalAppSharingArtifact
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Listing
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListingsListResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LocalizedText
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class MonthDay
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class PageInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Price
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ProductPurchase
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ProductPurchasesAcknowledgeRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Prorate
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Review
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReviewReplyResult
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReviewsListResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReviewsReplyRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReviewsReplyResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Season
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SubscriptionCancelSurveyResult
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SubscriptionDeferralInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SubscriptionPriceChange
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SubscriptionPurchase
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SubscriptionPurchasesAcknowledgeRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SubscriptionPurchasesDeferRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SubscriptionPurchasesDeferResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Testers
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Timestamp
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TokenPagination
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Track
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TrackRelease
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TracksListResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class UserComment
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class VoidedPurchase
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class VoidedPurchasesListResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Apk
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :binary, as: 'binary', class: Google::Apis::AndroidpublisherV3::ApkBinary, decorator: Google::Apis::AndroidpublisherV3::ApkBinary::Representation
property :version_code, as: 'versionCode'
end
end
class ApkBinary
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :sha1, as: 'sha1'
property :sha256, as: 'sha256'
end
end
class ApksAddExternallyHostedRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :externally_hosted_apk, as: 'externallyHostedApk', class: Google::Apis::AndroidpublisherV3::ExternallyHostedApk, decorator: Google::Apis::AndroidpublisherV3::ExternallyHostedApk::Representation
end
end
class ApksAddExternallyHostedResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :externally_hosted_apk, as: 'externallyHostedApk', class: Google::Apis::AndroidpublisherV3::ExternallyHostedApk, decorator: Google::Apis::AndroidpublisherV3::ExternallyHostedApk::Representation
end
end
class ApksListResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :apks, as: 'apks', class: Google::Apis::AndroidpublisherV3::Apk, decorator: Google::Apis::AndroidpublisherV3::Apk::Representation
property :kind, as: 'kind'
end
end
class AppDetails
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :contact_email, as: 'contactEmail'
property :contact_phone, as: 'contactPhone'
property :contact_website, as: 'contactWebsite'
property :default_language, as: 'defaultLanguage'
end
end
class AppEdit
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :expiry_time_seconds, as: 'expiryTimeSeconds'
property :id, as: 'id'
end
end
class Bundle
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :sha1, as: 'sha1'
property :sha256, as: 'sha256'
property :version_code, as: 'versionCode'
end
end
class BundlesListResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :bundles, as: 'bundles', class: Google::Apis::AndroidpublisherV3::Bundle, decorator: Google::Apis::AndroidpublisherV3::Bundle::Representation
property :kind, as: 'kind'
end
end
class Comment
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :developer_comment, as: 'developerComment', class: Google::Apis::AndroidpublisherV3::DeveloperComment, decorator: Google::Apis::AndroidpublisherV3::DeveloperComment::Representation
property :user_comment, as: 'userComment', class: Google::Apis::AndroidpublisherV3::UserComment, decorator: Google::Apis::AndroidpublisherV3::UserComment::Representation
end
end
class CountryTargeting
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :countries, as: 'countries'
property :include_rest_of_world, as: 'includeRestOfWorld'
end
end
class DeobfuscationFile
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :symbol_type, as: 'symbolType'
end
end
class DeobfuscationFilesUploadResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :deobfuscation_file, as: 'deobfuscationFile', class: Google::Apis::AndroidpublisherV3::DeobfuscationFile, decorator: Google::Apis::AndroidpublisherV3::DeobfuscationFile::Representation
end
end
class DeveloperComment
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :last_modified, as: 'lastModified', class: Google::Apis::AndroidpublisherV3::Timestamp, decorator: Google::Apis::AndroidpublisherV3::Timestamp::Representation
property :text, as: 'text'
end
end
class DeviceMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cpu_make, as: 'cpuMake'
property :cpu_model, as: 'cpuModel'
property :device_class, as: 'deviceClass'
property :gl_es_version, as: 'glEsVersion'
property :manufacturer, as: 'manufacturer'
property :native_platform, as: 'nativePlatform'
property :product_name, as: 'productName'
property :ram_mb, as: 'ramMb'
property :screen_density_dpi, as: 'screenDensityDpi'
property :screen_height_px, as: 'screenHeightPx'
property :screen_width_px, as: 'screenWidthPx'
end
end
class ExpansionFile
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :file_size, :numeric_string => true, as: 'fileSize'
property :references_version, as: 'referencesVersion'
end
end
class ExpansionFilesUploadResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :expansion_file, as: 'expansionFile', class: Google::Apis::AndroidpublisherV3::ExpansionFile, decorator: Google::Apis::AndroidpublisherV3::ExpansionFile::Representation
end
end
class ExternallyHostedApk
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :application_label, as: 'applicationLabel'
collection :certificate_base64s, as: 'certificateBase64s'
property :externally_hosted_url, as: 'externallyHostedUrl'
property :file_sha1_base64, as: 'fileSha1Base64'
property :file_sha256_base64, as: 'fileSha256Base64'
property :file_size, :numeric_string => true, as: 'fileSize'
property :icon_base64, as: 'iconBase64'
property :maximum_sdk, as: 'maximumSdk'
property :minimum_sdk, as: 'minimumSdk'
collection :native_codes, as: 'nativeCodes'
property :package_name, as: 'packageName'
collection :uses_features, as: 'usesFeatures'
collection :uses_permissions, as: 'usesPermissions', class: Google::Apis::AndroidpublisherV3::ExternallyHostedApkUsesPermission, decorator: Google::Apis::AndroidpublisherV3::ExternallyHostedApkUsesPermission::Representation
property :version_code, as: 'versionCode'
property :version_name, as: 'versionName'
end
end
class ExternallyHostedApkUsesPermission
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :max_sdk_version, as: 'maxSdkVersion'
property :name, as: 'name'
end
end
class Image
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :id, as: 'id'
property :sha1, as: 'sha1'
property :url, as: 'url'
end
end
class ImagesDeleteAllResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :deleted, as: 'deleted', class: Google::Apis::AndroidpublisherV3::Image, decorator: Google::Apis::AndroidpublisherV3::Image::Representation
end
end
class ImagesListResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :images, as: 'images', class: Google::Apis::AndroidpublisherV3::Image, decorator: Google::Apis::AndroidpublisherV3::Image::Representation
end
end
class ImagesUploadResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :image, as: 'image', class: Google::Apis::AndroidpublisherV3::Image, decorator: Google::Apis::AndroidpublisherV3::Image::Representation
end
end
class InAppProduct
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :default_language, as: 'defaultLanguage'
property :default_price, as: 'defaultPrice', class: Google::Apis::AndroidpublisherV3::Price, decorator: Google::Apis::AndroidpublisherV3::Price::Representation
property :grace_period, as: 'gracePeriod'
hash :listings, as: 'listings', class: Google::Apis::AndroidpublisherV3::InAppProductListing, decorator: Google::Apis::AndroidpublisherV3::InAppProductListing::Representation
property :package_name, as: 'packageName'
hash :prices, as: 'prices', class: Google::Apis::AndroidpublisherV3::Price, decorator: Google::Apis::AndroidpublisherV3::Price::Representation
property :purchase_type, as: 'purchaseType'
property :season, as: 'season', class: Google::Apis::AndroidpublisherV3::Season, decorator: Google::Apis::AndroidpublisherV3::Season::Representation
property :sku, as: 'sku'
property :status, as: 'status'
property :subscription_period, as: 'subscriptionPeriod'
property :trial_period, as: 'trialPeriod'
end
end
class InAppProductListing
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :description, as: 'description'
property :title, as: 'title'
end
end
class InappproductsListResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :inappproduct, as: 'inappproduct', class: Google::Apis::AndroidpublisherV3::InAppProduct, decorator: Google::Apis::AndroidpublisherV3::InAppProduct::Representation
property :kind, as: 'kind'
property :page_info, as: 'pageInfo', class: Google::Apis::AndroidpublisherV3::PageInfo, decorator: Google::Apis::AndroidpublisherV3::PageInfo::Representation
property :token_pagination, as: 'tokenPagination', class: Google::Apis::AndroidpublisherV3::TokenPagination, decorator: Google::Apis::AndroidpublisherV3::TokenPagination::Representation
end
end
class InternalAppSharingArtifact
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :certificate_fingerprint, as: 'certificateFingerprint'
property :download_url, as: 'downloadUrl'
property :sha256, as: 'sha256'
end
end
class Listing
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :full_description, as: 'fullDescription'
property :language, as: 'language'
property :short_description, as: 'shortDescription'
property :title, as: 'title'
property :video, as: 'video'
end
end
class ListingsListResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :kind, as: 'kind'
collection :listings, as: 'listings', class: Google::Apis::AndroidpublisherV3::Listing, decorator: Google::Apis::AndroidpublisherV3::Listing::Representation
end
end
class LocalizedText
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :language, as: 'language'
property :text, as: 'text'
end
end
class MonthDay
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :day, as: 'day'
property :month, as: 'month'
end
end
class PageInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :result_per_page, as: 'resultPerPage'
property :start_index, as: 'startIndex'
property :total_results, as: 'totalResults'
end
end
class Price
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :currency, as: 'currency'
property :price_micros, as: 'priceMicros'
end
end
class ProductPurchase
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :acknowledgement_state, as: 'acknowledgementState'
property :consumption_state, as: 'consumptionState'
property :developer_payload, as: 'developerPayload'
property :kind, as: 'kind'
property :order_id, as: 'orderId'
property :purchase_state, as: 'purchaseState'
property :purchase_time_millis, :numeric_string => true, as: 'purchaseTimeMillis'
property :purchase_type, as: 'purchaseType'
end
end
class ProductPurchasesAcknowledgeRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :developer_payload, as: 'developerPayload'
end
end
class Prorate
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :default_price, as: 'defaultPrice', class: Google::Apis::AndroidpublisherV3::Price, decorator: Google::Apis::AndroidpublisherV3::Price::Representation
property :start, as: 'start', class: Google::Apis::AndroidpublisherV3::MonthDay, decorator: Google::Apis::AndroidpublisherV3::MonthDay::Representation
end
end
class Review
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :author_name, as: 'authorName'
collection :comments, as: 'comments', class: Google::Apis::AndroidpublisherV3::Comment, decorator: Google::Apis::AndroidpublisherV3::Comment::Representation
property :review_id, as: 'reviewId'
end
end
class ReviewReplyResult
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :last_edited, as: 'lastEdited', class: Google::Apis::AndroidpublisherV3::Timestamp, decorator: Google::Apis::AndroidpublisherV3::Timestamp::Representation
property :reply_text, as: 'replyText'
end
end
class ReviewsListResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :page_info, as: 'pageInfo', class: Google::Apis::AndroidpublisherV3::PageInfo, decorator: Google::Apis::AndroidpublisherV3::PageInfo::Representation
collection :reviews, as: 'reviews', class: Google::Apis::AndroidpublisherV3::Review, decorator: Google::Apis::AndroidpublisherV3::Review::Representation
property :token_pagination, as: 'tokenPagination', class: Google::Apis::AndroidpublisherV3::TokenPagination, decorator: Google::Apis::AndroidpublisherV3::TokenPagination::Representation
end
end
class ReviewsReplyRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :reply_text, as: 'replyText'
end
end
class ReviewsReplyResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :result, as: 'result', class: Google::Apis::AndroidpublisherV3::ReviewReplyResult, decorator: Google::Apis::AndroidpublisherV3::ReviewReplyResult::Representation
end
end
class Season
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :end, as: 'end', class: Google::Apis::AndroidpublisherV3::MonthDay, decorator: Google::Apis::AndroidpublisherV3::MonthDay::Representation
collection :prorations, as: 'prorations', class: Google::Apis::AndroidpublisherV3::Prorate, decorator: Google::Apis::AndroidpublisherV3::Prorate::Representation
property :start, as: 'start', class: Google::Apis::AndroidpublisherV3::MonthDay, decorator: Google::Apis::AndroidpublisherV3::MonthDay::Representation
end
end
class SubscriptionCancelSurveyResult
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cancel_survey_reason, as: 'cancelSurveyReason'
property :user_input_cancel_reason, as: 'userInputCancelReason'
end
end
class SubscriptionDeferralInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :desired_expiry_time_millis, :numeric_string => true, as: 'desiredExpiryTimeMillis'
property :expected_expiry_time_millis, :numeric_string => true, as: 'expectedExpiryTimeMillis'
end
end
class SubscriptionPriceChange
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :new_price, as: 'newPrice', class: Google::Apis::AndroidpublisherV3::Price, decorator: Google::Apis::AndroidpublisherV3::Price::Representation
property :state, as: 'state'
end
end
class SubscriptionPurchase
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :acknowledgement_state, as: 'acknowledgementState'
property :auto_renewing, as: 'autoRenewing'
property :auto_resume_time_millis, :numeric_string => true, as: 'autoResumeTimeMillis'
property :cancel_reason, as: 'cancelReason'
property :cancel_survey_result, as: 'cancelSurveyResult', class: Google::Apis::AndroidpublisherV3::SubscriptionCancelSurveyResult, decorator: Google::Apis::AndroidpublisherV3::SubscriptionCancelSurveyResult::Representation
property :country_code, as: 'countryCode'
property :developer_payload, as: 'developerPayload'
property :email_address, as: 'emailAddress'
property :expiry_time_millis, :numeric_string => true, as: 'expiryTimeMillis'
property :family_name, as: 'familyName'
property :given_name, as: 'givenName'
property :kind, as: 'kind'
property :linked_purchase_token, as: 'linkedPurchaseToken'
property :order_id, as: 'orderId'
property :payment_state, as: 'paymentState'
property :price_amount_micros, :numeric_string => true, as: 'priceAmountMicros'
property :price_change, as: 'priceChange', class: Google::Apis::AndroidpublisherV3::SubscriptionPriceChange, decorator: Google::Apis::AndroidpublisherV3::SubscriptionPriceChange::Representation
property :price_currency_code, as: 'priceCurrencyCode'
property :profile_id, as: 'profileId'
property :profile_name, as: 'profileName'
property :purchase_type, as: 'purchaseType'
property :start_time_millis, :numeric_string => true, as: 'startTimeMillis'
property :user_cancellation_time_millis, :numeric_string => true, as: 'userCancellationTimeMillis'
end
end
class SubscriptionPurchasesAcknowledgeRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :developer_payload, as: 'developerPayload'
end
end
class SubscriptionPurchasesDeferRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :deferral_info, as: 'deferralInfo', class: Google::Apis::AndroidpublisherV3::SubscriptionDeferralInfo, decorator: Google::Apis::AndroidpublisherV3::SubscriptionDeferralInfo::Representation
end
end
class SubscriptionPurchasesDeferResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :new_expiry_time_millis, :numeric_string => true, as: 'newExpiryTimeMillis'
end
end
class Testers
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :google_groups, as: 'googleGroups'
collection :google_plus_communities, as: 'googlePlusCommunities'
end
end
class Timestamp
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :nanos, as: 'nanos'
property :seconds, :numeric_string => true, as: 'seconds'
end
end
class TokenPagination
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :next_page_token, as: 'nextPageToken'
property :previous_page_token, as: 'previousPageToken'
end
end
class Track
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :releases, as: 'releases', class: Google::Apis::AndroidpublisherV3::TrackRelease, decorator: Google::Apis::AndroidpublisherV3::TrackRelease::Representation
property :track, as: 'track'
end
end
class TrackRelease
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :country_targeting, as: 'countryTargeting', class: Google::Apis::AndroidpublisherV3::CountryTargeting, decorator: Google::Apis::AndroidpublisherV3::CountryTargeting::Representation
property :name, as: 'name'
collection :release_notes, as: 'releaseNotes', class: Google::Apis::AndroidpublisherV3::LocalizedText, decorator: Google::Apis::AndroidpublisherV3::LocalizedText::Representation
property :status, as: 'status'
property :user_fraction, as: 'userFraction'
collection :version_codes, as: 'versionCodes'
end
end
class TracksListResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :kind, as: 'kind'
collection :tracks, as: 'tracks', class: Google::Apis::AndroidpublisherV3::Track, decorator: Google::Apis::AndroidpublisherV3::Track::Representation
end
end
class UserComment
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :android_os_version, as: 'androidOsVersion'
property :app_version_code, as: 'appVersionCode'
property :app_version_name, as: 'appVersionName'
property :device, as: 'device'
property :device_metadata, as: 'deviceMetadata', class: Google::Apis::AndroidpublisherV3::DeviceMetadata, decorator: Google::Apis::AndroidpublisherV3::DeviceMetadata::Representation
property :last_modified, as: 'lastModified', class: Google::Apis::AndroidpublisherV3::Timestamp, decorator: Google::Apis::AndroidpublisherV3::Timestamp::Representation
property :original_text, as: 'originalText'
property :reviewer_language, as: 'reviewerLanguage'
property :star_rating, as: 'starRating'
property :text, as: 'text'
property :thumbs_down_count, as: 'thumbsDownCount'
property :thumbs_up_count, as: 'thumbsUpCount'
end
end
class VoidedPurchase
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :kind, as: 'kind'
property :purchase_time_millis, :numeric_string => true, as: 'purchaseTimeMillis'
property :purchase_token, as: 'purchaseToken'
property :voided_time_millis, :numeric_string => true, as: 'voidedTimeMillis'
end
end
class VoidedPurchasesListResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :page_info, as: 'pageInfo', class: Google::Apis::AndroidpublisherV3::PageInfo, decorator: Google::Apis::AndroidpublisherV3::PageInfo::Representation
property :token_pagination, as: 'tokenPagination', class: Google::Apis::AndroidpublisherV3::TokenPagination, decorator: Google::Apis::AndroidpublisherV3::TokenPagination::Representation
collection :voided_purchases, as: 'voidedPurchases', class: Google::Apis::AndroidpublisherV3::VoidedPurchase, decorator: Google::Apis::AndroidpublisherV3::VoidedPurchase::Representation
end
end
end
end
end
| 38.168574 | 233 | 0.653871 |
91567caa2b129a331362d2813054d7cffb84a399 | 2,080 | class Strongswan < Formula
desc "VPN based on IPsec"
homepage "https://www.strongswan.org"
url "https://download.strongswan.org/strongswan-5.8.2.tar.bz2"
sha256 "86900ddbe7337c923dadf2c8339ae8ed2b9158e3691745884d08ae534677430e"
bottle do
sha256 "09147538543405ab5feb83c4aad866d147ed3acdc6d67e3fc423b7d1aa968663" => :catalina
sha256 "1ec03a199f80d10c7726ed50d61ea92a0f1b250f292ba44986086814b1f51b35" => :mojave
sha256 "507157d52fbbe0d7ba3194abfa56dfbdb7cd0b98dcfcf7b54af8d39e1c0ed595" => :high_sierra
end
head do
url "https://git.strongswan.org/strongswan.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "bison" => :build
depends_on "gettext" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
end
depends_on "[email protected]"
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--sbindir=#{bin}
--sysconfdir=#{etc}
--disable-defaults
--enable-charon
--enable-cmd
--enable-constraints
--enable-curve25519
--enable-eap-gtc
--enable-eap-identity
--enable-eap-md5
--enable-eap-mschapv2
--enable-ikev1
--enable-ikev2
--enable-kernel-pfkey
--enable-kernel-pfroute
--enable-nonce
--enable-openssl
--enable-osx-attr
--enable-pem
--enable-pgp
--enable-pkcs1
--enable-pkcs8
--enable-pki
--enable-pubkey
--enable-revocation
--enable-scepclient
--enable-socket-default
--enable-sshkey
--enable-stroke
--enable-swanctl
--enable-unity
--enable-updown
--enable-x509
--enable-xauth-generic
]
system "./autogen.sh" if build.head?
system "./configure", *args
system "make", "check"
system "make", "install"
end
def caveats; <<~EOS
You will have to run both "ipsec" and "charon-cmd" with "sudo".
EOS
end
test do
system "#{bin}/ipsec", "--version"
system "#{bin}/charon-cmd", "--version"
end
end
| 25.365854 | 93 | 0.640385 |
ffbe63516bb9a25803f167d4c0ed677180d3fc4c | 2,150 | class UsersController < ApplicationController
# editかupdateの処理がされる直前にlogged_in_userメソッドが実行(:onlyを渡す事で指定できる)
before_action :logged_in_user, only:[:edit, :update]
before_action :correct_user, only:[:edit, :update]
before_action :admin_user, only:[:index, :destroy]
def index
# perメソッド(kaminari gemで利用できる)を渡す事により1ページあたりの表示を決めれる
@all_user = User.page(params[:page]).per(10)
end
def show
@user = User.find(params[:id])
end
def signup #new
@user = User.new
end
# アカウント作成
def create
@user = User.new(user_params)
if @user.save
@user.send_activation_email
flash[:info] = "メールをチェックしてアカウントを認証してください"
redirect_to root_url
# log_in @user
# flash[:success] = "アカウントを作成しました"
# redirect_to @user #@user == user_url(@user)
else
render 'signup'
end
end
def edit
@user = User.find(params[:id])
end
def update
@user = User.find(params[:id])
# 更新に成功した場合
if @user.update_attributes(user_params)
flash[:success] = "プロフィールを更新しました"
redirect_to @user
else
render 'edit'
end
end
def destroy
User.find(params[:id]).destroy
flash[:success] = "ユーザーを削除しました"
redirect_to users_url
end
# 外部から使えない
private
# Strong Parameters(必須のパラメータと許可されたパラメータを指定)
def user_params
params.require(:user).permit(:name, :email, :password,
:password_confirmation, :a_word, :introduction)
end
#before_action
# ログイン済みユーザーか確認
def logged_in_user
# 渡させれたユーザーがログイン済みではない場合
unless logged_in?
store_location
flash[:danger] = "ログインしてください"
redirect_to login_url
end
end
# 正しいユーザーかどうか確認
def correct_user
@user = User.find(params[:id])
# unless文(後置) current_user?が成り立たない場合redirect_toが発動
unless current_user?(@user)
redirect_to(root_url)
flash[:danger] = "無効な操作です"
end
end
# 管理者かどうか確認
def admin_user
# ログイン済みのユーザーはadmin属性を持っているか? 持っていなければリダイレクト
unless current_user.admin?
redirect_to(root_url)
flash[:danger] = "無効な操作です"
end
end
end
| 21.717172 | 69 | 0.646977 |
3829c91bdf5dee6a5d44d73023a5f675d92f2650 | 669 | require 'spec_helper'
RSpec.describe "EipValidator::Loader" do
let(:category) { 'Core' }
let(:type) { 'Standards Track' }
let(:status) { 'Final' }
let(:eip){
{
"eip" => 145,
"title" => 'Bitwise shifting instructions in EVM',
"author" => 'Alex Beregszaszi, Paweł Bylica',
"type" => type,
"category" => category,
"status" => status,
"created" => Date.parse('2017-02-13')
}
}
let(:file_name) {
'spec/fixtures/valid/eip-145.md'
}
subject(:loader){ EipValidator::Loader.load(file_name)}
describe "valid" do
it "should have required fields" do
expect(loader).to eq(eip)
end
end
end | 23.068966 | 57 | 0.588939 |
08e8023f992a5907b39b77ee1d67e4f6cc885ff5 | 1,859 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
require 'social_stream-base'
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
end
end
| 41.311111 | 99 | 0.724045 |
bb5cf7758e672fa55f9929846e55a8bd4ea2fdb5 | 597 | require 'spec_helper_acceptance'
describe 'memcached' do
context 'with all defaults' do
let(:pp) do
'include memcached'
end
it 'works idempotently with no errors' do
apply_manifest(pp, catch_failures: true)
apply_manifest(pp, catch_changes: true)
end
describe service('memcached') do
it { is_expected.to be_enabled }
it { is_expected.to be_running }
end
describe port(11_211) do
it { is_expected.to be_listening.on('127.0.0.1').with('tcp') }
it { is_expected.to be_listening.on('127.0.0.1').with('udp') }
end
end
end
| 25.956522 | 68 | 0.658291 |
4a890b99d4a1eb5b28d05a499dfe0289695fe03a | 152 | class ServerTimestampController < ApplicationController
def show
render json: { server_timestamp: DateTime.now.strftime('%Q') }.as_json
end
end
| 25.333333 | 74 | 0.769737 |
083ffb3b47e46aefb50670596e68033e4ba1476c | 423 | class CreateUsers < ActiveRecord::Migration[5.2]
def change
create_table :users do |t|
t.string :provider
t.string :uid
t.string :name
t.string :oauth_token
t.string :oauth_expires_at
t.string :email
t.string :sha
t.hstore :extra
t.boolean :admin, :default => false
t.timestamps :null => false
end
end
end
| 24.882353 | 48 | 0.550827 |
62dbc2da1e5352513d9b2cf13d5dbe8f4cf07dce | 2,694 | require 'test_helper'
describe Outpost::Expectations::ResponseBody do
class SubjectBody
class << self
attr_reader :expectation, :evaluation_method
def expect(expectation, evaluation_method)
@expectation = expectation
@evaluation_method = evaluation_method
end
end
extend Outpost::Expectations::ResponseBody
end
describe ".evaluate_response_body with match" do
it "should return true when it matches" do
assert SubjectBody.evaluate_response_body(scout_stub, :match => /ll/)
end
it "should return false when it doesn't" do
refute SubjectBody.evaluate_response_body(scout_stub, :match => /omg/)
end
end
describe ".evaluate_response_body with not_match" do
it "should return true when it matches" do
assert SubjectBody.evaluate_response_body(scout_stub, :not_match => /omg/)
end
it "should return false when it doesn't" do
refute SubjectBody.evaluate_response_body(scout_stub, :not_match => /Hello/)
end
end
describe ".evaluate_response_body with equals" do
it "should return true when it matches" do
assert SubjectBody.evaluate_response_body(scout_stub, :equals => "Hello!")
end
it "should return false when it doesn't" do
refute SubjectBody.evaluate_response_body(scout_stub, :equals => "Hell")
end
end
describe ".evaluate_response_body with differs" do
it "should return true when it matches" do
assert SubjectBody.evaluate_response_body(scout_stub, :differs => "Hell")
end
it "should return false when it doesn't" do
refute SubjectBody.evaluate_response_body(scout_stub, :differs => "Hello!")
end
end
describe ".evaluate_response_body with multiple rules" do
it "should return true when all rules matches" do
rules = {:differs => 'omg', :match => /ll/}
assert SubjectBody.evaluate_response_body(scout_stub, rules)
end
it "should return false when there are no matches" do
rules = {:equals => 'omg', :not_match => /ll/}
refute SubjectBody.evaluate_response_body(scout_stub, rules)
end
it "should return false when at least one rule doesn't match" do
rules = {:equals => 'Hello!', :match => /Hell/, :differs => 'Hello!'}
refute SubjectBody.evaluate_response_body(scout_stub, rules)
end
end
it "should set expectation correctly" do
assert_equal :response_body, SubjectBody.expectation
end
it "should set evaluation method correctly" do
assert_equal SubjectBody.method(:evaluate_response_body), \
SubjectBody.evaluation_method
end
private
def scout_stub
build_stub(:response_body => 'Hello!')
end
end
| 30.613636 | 82 | 0.707127 |
18541b0a685939e83e9f84273277220002196a5f | 210 | require "manageiq/providers/cloud_manager"
require "vm"
module ManageIQ::Providers
class CloudManager
class Vm < ::Vm
belongs_to :availability_zone
belongs_to :cloud_tenant
end
end
end
| 17.5 | 42 | 0.728571 |
28ab9fe6bf7a066b7f3d9cdad4d8157e0e8bc9c8 | 1,038 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'interest_graph/version'
Gem::Specification.new do |spec|
spec.name = "interest-graph-ruby"
spec.version = InterestGraph::VERSION
spec.authors = ["Marshall Shen"]
spec.email = ["[email protected]"]
spec.summary = %q{Ruby client for Prismatic Interst Graph API.}
spec.description = %q{Ruby client for Prismatic Interst Graph API.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "httparty"
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "webmock", "~> 1.20"
end
| 37.071429 | 74 | 0.660886 |
386156870850588c51391e37d7be64c23aa8fa9e | 23,993 | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "google/cloud/errors"
require "google/cloud/pubsub/topic/list"
require "google/cloud/pubsub/async_publisher"
require "google/cloud/pubsub/batch_publisher"
require "google/cloud/pubsub/subscription"
require "google/cloud/pubsub/policy"
module Google
module Cloud
module PubSub
##
# # Topic
#
# A named resource to which messages are published.
#
# See {Project#create_topic} and {Project#topic}.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# topic.publish "task completed"
#
class Topic
##
# @private The Service object.
attr_accessor :service
##
# @private The Google::Cloud::PubSub::V1::Topic object.
attr_accessor :grpc
##
# @private Create an empty {Topic} object.
def initialize
@service = nil
@grpc = nil
@resource_name = nil
@exists = nil
@async_opts = {}
end
##
# AsyncPublisher object used to publish multiple messages in batches.
#
# @return [AsyncPublisher] Returns publisher object if calls to
# {#publish_async} have been made, returns `nil` otherwise.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# topic.publish_async "task completed" do |result|
# if result.succeeded?
# log_publish_success result.data
# else
# log_publish_failure result.data, result.error
# end
# end
#
# topic.async_publisher.stop.wait!
#
def async_publisher
@async_publisher
end
##
# The name of the topic in the form of
# "/projects/project-identifier/topics/topic-name".
#
# @return [String]
#
def name
return @resource_name if reference?
@grpc.name
end
##
# A hash of user-provided labels associated with this topic. Labels can
# be used to organize and group topics. See [Creating and Managing
# Labels](https://cloud.google.com/pubsub/docs/labels).
#
# The returned hash is frozen and changes are not allowed. Use
# {#labels=} to update the labels for this topic.
#
# Makes an API call to retrieve the labels values when called on a
# reference object. See {#reference?}.
#
# @return [Hash] The frozen labels hash.
#
def labels
ensure_grpc!
@grpc.labels.to_h.freeze
end
##
# Sets the hash of user-provided labels associated with this
# topic. Labels can be used to organize and group topics.
# Label keys and values can be no longer than 63 characters, can only
# contain lowercase letters, numeric characters, underscores and dashes.
# International characters are allowed. Label values are optional. Label
# keys must start with a letter and each label in the list must have a
# different key. See [Creating and Managing
# Labels](https://cloud.google.com/pubsub/docs/labels).
#
# @param [Hash] new_labels The new labels hash.
#
def labels= new_labels
raise ArgumentError, "Value must be a Hash" if new_labels.nil?
update_grpc = Google::Cloud::PubSub::V1::Topic.new \
name: name, labels: new_labels
@grpc = service.update_topic update_grpc, :labels
@resource_name = nil
end
##
# Permanently deletes the topic.
#
# @return [Boolean] Returns `true` if the topic was deleted.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# topic.delete
#
def delete
ensure_service!
service.delete_topic name
true
end
##
# Creates a new {Subscription} object on the current Topic.
#
# @param [String] subscription_name Name of the new subscription. Must
# start with a letter, and contain only letters ([A-Za-z]), numbers
# ([0-9], dashes (-), underscores (_), periods (.), tildes (~), plus
# (+) or percent signs (%). It must be between 3 and 255 characters in
# length, and it must not start with "goog". Required.
# @param [Integer] deadline The maximum number of seconds after a
# subscriber receives a message before the subscriber should
# acknowledge the message.
# @param [Boolean] retain_acked Indicates whether to retain acknowledged
# messages. If `true`, then messages are not expunged from the
# subscription's backlog, even if they are acknowledged, until they
# fall out of the `retention` window. Default is `false`.
# @param [Numeric] retention How long to retain unacknowledged messages
# in the subscription's backlog, from the moment a message is
# published. If `retain_acked` is `true`, then this also configures
# the retention of acknowledged messages, and thus configures how far
# back in time a {Subscription#seek} can be done. Cannot be more than
# 604,800 seconds (7 days) or less than 600 seconds (10 minutes).
# Default is 604,800 seconds (7 days).
# @param [String] endpoint A URL locating the endpoint to which messages
# should be pushed.
# @param [Hash] labels A hash of user-provided labels associated with
# the subscription. You can use these to organize and group your
# subscriptions. Label keys and values can be no longer than 63
# characters, can only contain lowercase letters, numeric characters,
# underscores and dashes. International characters are allowed. Label
# values are optional. Label keys must start with a letter and each
# label in the list must have a different key. See [Creating and
# Managing Labels](https://cloud.google.com/pubsub/docs/labels).
#
# @return [Google::Cloud::PubSub::Subscription]
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# sub = topic.subscribe "my-topic-sub"
# sub.name # => "my-topic-sub"
#
# @example Wait 2 minutes for acknowledgement and push all to endpoint:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# sub = topic.subscribe "my-topic-sub",
# deadline: 120,
# endpoint: "https://example.com/push"
#
def subscribe subscription_name, deadline: nil, retain_acked: false,
retention: nil, endpoint: nil, labels: nil
ensure_service!
options = { deadline: deadline, retain_acked: retain_acked,
retention: retention, endpoint: endpoint, labels: labels }
grpc = service.create_subscription name, subscription_name, options
Subscription.from_grpc grpc, service
end
alias create_subscription subscribe
alias new_subscription subscribe
##
# Retrieves subscription by name.
#
# @param [String] subscription_name Name of a subscription.
# @param [Boolean] skip_lookup Optionally create a {Subscription} object
# without verifying the subscription resource exists on the Pub/Sub
# service. Calls made on this object will raise errors if the service
# resource does not exist. Default is `false`.
#
# @return [Google::Cloud::PubSub::Subscription, nil] Returns `nil` if
# the subscription does not exist.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
#
# sub = topic.subscription "my-topic-sub"
# sub.name #=> "projects/my-project/subscriptions/my-topic-sub"
#
# @example Skip the lookup against the service with `skip_lookup`:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
#
# # No API call is made to retrieve the subscription information.
# sub = topic.subscription "my-topic-sub", skip_lookup: true
# sub.name #=> "projects/my-project/subscriptions/my-topic-sub"
#
def subscription subscription_name, skip_lookup: nil
ensure_service!
if skip_lookup
return Subscription.from_name subscription_name, service
end
grpc = service.get_subscription subscription_name
Subscription.from_grpc grpc, service
rescue Google::Cloud::NotFoundError
nil
end
alias get_subscription subscription
alias find_subscription subscription
##
# Retrieves a list of subscription names for the given project.
#
# @param [String] token The `token` value returned by the last call to
# `subscriptions`; indicates that this is a continuation of a call,
# and that the system should return the next page of data.
# @param [Integer] max Maximum number of subscriptions to return.
#
# @return [Array<Subscription>] (See {Subscription::List})
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# subscriptions = topic.subscriptions
# subscriptions.each do |subscription|
# puts subscription.name
# end
#
# @example Retrieve all subscriptions: (See {Subscription::List#all})
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# subscriptions = topic.subscriptions
# subscriptions.all do |subscription|
# puts subscription.name
# end
#
def subscriptions token: nil, max: nil
ensure_service!
options = { token: token, max: max }
grpc = service.list_topics_subscriptions name, options
Subscription::List.from_topic_grpc grpc, service, name, max
end
alias find_subscriptions subscriptions
alias list_subscriptions subscriptions
##
# Publishes one or more messages to the topic.
#
# The message payload must not be empty; it must contain either a
# non-empty data field, or at least one attribute.
#
# @param [String, File] data The message payload. This will be converted
# to bytes encoded as ASCII-8BIT.
# @param [Hash] attributes Optional attributes for the message.
# @yield [batch] a block for publishing multiple messages in one
# request
# @yieldparam [BatchPublisher] batch the topic batch publisher
# object
#
# @return [Message, Array<Message>] Returns the published message when
# called without a block, or an array of messages when called with a
# block.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# msg = topic.publish "task completed"
#
# @example A message can be published using a File object:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# file = File.open "message.txt", mode: "rb"
# msg = topic.publish file
#
# @example Additionally, a message can be published with attributes:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# msg = topic.publish "task completed",
# foo: :bar,
# this: :that
#
# @example Multiple messages can be sent at the same time using a block:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# msgs = topic.publish do |t|
# t.publish "task 1 completed", foo: :bar
# t.publish "task 2 completed", foo: :baz
# t.publish "task 3 completed", foo: :bif
# end
#
def publish data = nil, attributes = {}
ensure_service!
batch = BatchPublisher.new data, attributes
yield batch if block_given?
return nil if batch.messages.count.zero?
publish_batch_messages batch
end
##
# Publishes a message asynchronously to the topic.
#
# The message payload must not be empty; it must contain either a
# non-empty data field, or at least one attribute.
#
# @param [String, File] data The message payload. This will be converted
# to bytes encoded as ASCII-8BIT.
# @param [Hash] attributes Optional attributes for the message.
# @yield [result] the callback for when the message has been published
# @yieldparam [PublishResult] result the result of the asynchronous
# publish
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# topic.publish_async "task completed" do |result|
# if result.succeeded?
# log_publish_success result.data
# else
# log_publish_failure result.data, result.error
# end
# end
#
# topic.async_publisher.stop.wait!
#
# @example A message can be published using a File object:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# file = File.open "message.txt", mode: "rb"
# topic.publish_async file
#
# topic.async_publisher.stop.wait!
#
# @example Additionally, a message can be published with attributes:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# topic.publish_async "task completed",
# foo: :bar, this: :that
#
# topic.async_publisher.stop.wait!
#
def publish_async data = nil, attributes = {}, &block
ensure_service!
@async_publisher ||= AsyncPublisher.new(name, service, @async_opts)
@async_publisher.publish data, attributes, &block
end
##
# Gets the [Cloud IAM](https://cloud.google.com/iam/) access control
# policy for this topic.
#
# @see https://cloud.google.com/pubsub/docs/reference/rpc/google.iam.v1#iampolicy
# google.iam.v1.IAMPolicy
#
# @yield [policy] A block for updating the policy. The latest policy
# will be read from the Pub/Sub service and passed to the block. After
# the block completes, the modified policy will be written to the
# service.
# @yieldparam [Policy] policy the current Cloud IAM Policy for this
# topic
#
# @return [Policy] the current Cloud IAM Policy for this topic
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "my-topic"
#
# policy = topic.policy
#
# @example Update the policy by passing a block:
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "my-topic"
#
# topic.policy do |p|
# p.add "roles/owner", "user:[email protected]"
# end
#
def policy
ensure_service!
grpc = service.get_topic_policy name
policy = Policy.from_grpc grpc
return policy unless block_given?
yield policy
update_policy policy
end
##
# Updates the [Cloud IAM](https://cloud.google.com/iam/) access control
# policy for this topic. The policy should be read from {#policy}. See
# {Google::Cloud::PubSub::Policy} for an explanation of the policy
# `etag` property and how to modify policies.
#
# You can also update the policy by passing a block to {#policy}, which
# will call this method internally after the block completes.
#
# @see https://cloud.google.com/pubsub/docs/reference/rpc/google.iam.v1#iampolicy
# google.iam.v1.IAMPolicy
#
# @param [Policy] new_policy a new or modified Cloud IAM Policy for this
# topic
#
# @return [Policy] the policy returned by the API update operation
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "my-topic"
#
# policy = topic.policy # API call
#
# policy.add "roles/owner", "user:[email protected]"
#
# topic.update_policy policy # API call
#
def update_policy new_policy
ensure_service!
grpc = service.set_topic_policy name, new_policy.to_grpc
@policy = Policy.from_grpc grpc
end
alias policy= update_policy
##
# Tests the specified permissions against the [Cloud
# IAM](https://cloud.google.com/iam/) access control policy.
#
# @see https://cloud.google.com/iam/docs/managing-policies Managing
# Policies
#
# @param [String, Array<String>] permissions The set of permissions to
# check access for. Permissions with wildcards (such as `*` or
# `storage.*`) are not allowed.
#
# The permissions that can be checked on a topic are:
#
# * pubsub.topics.publish
# * pubsub.topics.attachSubscription
# * pubsub.topics.get
# * pubsub.topics.delete
# * pubsub.topics.update
# * pubsub.topics.getIamPolicy
# * pubsub.topics.setIamPolicy
#
# @return [Array<Strings>] The permissions that have access.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "my-topic"
# perms = topic.test_permissions "pubsub.topics.get",
# "pubsub.topics.publish"
# perms.include? "pubsub.topics.get" #=> true
# perms.include? "pubsub.topics.publish" #=> false
#
def test_permissions *permissions
permissions = Array(permissions).flatten
permissions = Array(permissions).flatten
ensure_service!
grpc = service.test_topic_permissions name, permissions
grpc.permissions
end
##
# Determines whether the topic exists in the Pub/Sub service.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# topic.exists? #=> true
#
def exists?
# Always true if the object is not set as reference
return true unless reference?
# If we have a value, return it
return @exists unless @exists.nil?
ensure_grpc!
@exists = true
rescue Google::Cloud::NotFoundError
@exists = false
end
##
# Determines whether the topic object was created without retrieving the
# resource representation from the Pub/Sub service.
#
# @return [Boolean] `true` when the topic was created without a resource
# representation, `false` otherwise.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic", skip_lookup: true
# topic.reference? #=> true
#
def reference?
@grpc.nil?
end
##
# Determines whether the topic object was created with a resource
# representation from the Pub/Sub service.
#
# @return [Boolean] `true` when the topic was created with a resource
# representation, `false` otherwise.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
#
# topic = pubsub.topic "my-topic"
# topic.resource? #=> true
#
def resource?
[email protected]?
end
##
# @private New Topic from a Google::Cloud::PubSub::V1::Topic object.
def self.from_grpc grpc, service, async: nil
new.tap do |t|
t.grpc = grpc
t.service = service
t.instance_variable_set :@async_opts, async if async
end
end
##
# @private New reference {Topic} object without making an HTTP request.
def self.from_name name, service, options = {}
name = service.topic_path name, options
from_grpc(nil, service).tap do |t|
t.instance_variable_set :@resource_name, name
end
end
protected
##
# @private Raise an error unless an active connection to the service is
# available.
def ensure_service!
raise "Must have active connection to service" unless service
end
##
# Ensures a Google::Cloud::PubSub::V1::Topic object exists.
def ensure_grpc!
ensure_service!
@grpc = service.get_topic name if reference?
@resource_name = nil
end
##
# Call the publish API with arrays of data data and attrs.
def publish_batch_messages batch
grpc = service.publish name, batch.messages
batch.to_gcloud_messages Array(grpc.message_ids)
end
end
end
Pubsub = PubSub unless const_defined? :Pubsub
end
end
| 36.519026 | 89 | 0.573292 |
e2ef528f03004fa203e44178e18b3ab60e892643 | 19,610 | # frozen_string_literal: true
module Faraday
# Connection objects manage the default properties and the middleware
# stack for fulfilling an HTTP request.
#
# @example
#
# conn = Faraday::Connection.new 'http://sushi.com'
#
# # GET http://sushi.com/nigiri
# conn.get 'nigiri'
# # => #<Faraday::Response>
#
class Connection
# A Set of allowed HTTP verbs.
METHODS = Set.new %i[get post put delete head patch options trace]
# @return [Hash] URI query unencoded key/value pairs.
attr_reader :params
# @return [Hash] unencoded HTTP header key/value pairs.
attr_reader :headers
# @return [String] a URI with the prefix used for all requests from this
# Connection. This includes a default host name, scheme, port, and path.
attr_reader :url_prefix
# @return [Faraday::Builder] Builder for this Connection.
attr_reader :builder
# @return [Hash] SSL options.
attr_reader :ssl
# @return [Object] the parallel manager for this Connection.
attr_reader :parallel_manager
# Sets the default parallel manager for this connection.
attr_writer :default_parallel_manager
# @return [Hash] proxy options.
attr_reader :proxy
# Initializes a new Faraday::Connection.
#
# @param url [URI, String] URI or String base URL to use as a prefix for all
# requests (optional).
# @param options [Hash, Faraday::ConnectionOptions]
# @option options [URI, String] :url ('http:/') URI or String base URL
# @option options [Hash<String => String>] :params URI query unencoded
# key/value pairs.
# @option options [Hash<String => String>] :headers Hash of unencoded HTTP
# header key/value pairs.
# @option options [Hash] :request Hash of request options.
# @option options [Hash] :ssl Hash of SSL options.
# @option options [Hash, URI, String] :proxy proxy options, either as a URL
# or as a Hash
# @option options [URI, String] :proxy[:uri]
# @option options [String] :proxy[:user]
# @option options [String] :proxy[:password]
# @yield [self] after all setup has been done
def initialize(url = nil, options = nil)
options = ConnectionOptions.from(options)
if url.is_a?(Hash) || url.is_a?(ConnectionOptions)
options = options.merge(url)
url = options.url
end
@parallel_manager = nil
@headers = Utils::Headers.new
@params = Utils::ParamsHash.new
@options = options.request
@ssl = options.ssl
@default_parallel_manager = options.parallel_manager
@builder = options.builder || begin
# pass an empty block to Builder so it doesn't assume default middleware
options.new_builder(block_given? ? proc { |b| } : nil)
end
self.url_prefix = url || 'http:/'
@params.update(options.params) if options.params
@headers.update(options.headers) if options.headers
initialize_proxy(url, options)
yield(self) if block_given?
@headers[:user_agent] ||= "Faraday v#{VERSION}"
end
def initialize_proxy(url, options)
@manual_proxy = !!options.proxy
@proxy =
if options.proxy
ProxyOptions.from(options.proxy)
else
proxy_from_env(url)
end
end
# Sets the Hash of URI query unencoded key/value pairs.
# @param hash [Hash]
def params=(hash)
@params.replace hash
end
# Sets the Hash of unencoded HTTP header key/value pairs.
# @param hash [Hash]
def headers=(hash)
@headers.replace hash
end
extend Forwardable
def_delegators :builder, :build, :use, :request, :response, :adapter, :app
# Closes the underlying resources and/or connections. In the case of
# persistent connections, this closes all currently open connections
# but does not prevent new connections from being made.
def close
app.close
end
# @!method get(url = nil, params = nil, headers = nil)
# Makes a GET HTTP request without a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.get '/items', { page: 1 }, :accept => 'application/json'
#
# # ElasticSearch example sending a body with GET.
# conn.get '/twitter/tweet/_search' do |req|
# req.headers[:content_type] = 'application/json'
# req.params[:routing] = 'kimchy'
# req.body = JSON.generate(query: {...})
# end
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!method head(url = nil, params = nil, headers = nil)
# Makes a HEAD HTTP request without a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.head '/items/1'
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!method delete(url = nil, params = nil, headers = nil)
# Makes a DELETE HTTP request without a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.delete '/items/1'
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!method trace(url = nil, params = nil, headers = nil)
# Makes a TRACE HTTP request without a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.connect '/items/1'
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!visibility private
METHODS_WITH_QUERY.each do |method|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{method}(url = nil, params = nil, headers = nil)
run_request(:#{method}, url, nil, headers) do |request|
request.params.update(params) if params
yield request if block_given?
end
end
RUBY
end
# @overload options()
# Returns current Connection options.
#
# @overload options(url, params = nil, headers = nil)
# Makes an OPTIONS HTTP request to the given URL.
# @param url [String] String base URL to sue as a prefix for all requests.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.options '/items/1'
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
def options(*args)
return @options if args.size.zero?
url, params, headers = *args
run_request(:options, url, nil, headers) do |request|
request.params.update(params) if params
yield request if block_given?
end
end
# @!method post(url = nil, body = nil, headers = nil)
# Makes a POST HTTP request with a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param body [String] body for the request.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.post '/items', data, content_type: 'application/json'
#
# # Simple ElasticSearch indexing sample.
# conn.post '/twitter/tweet' do |req|
# req.headers[:content_type] = 'application/json'
# req.params[:routing] = 'kimchy'
# req.body = JSON.generate(user: 'kimchy', ...)
# end
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!method put(url = nil, body = nil, headers = nil)
# Makes a PUT HTTP request with a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param body [String] body for the request.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# # TODO: Make it a PUT example
# conn.post '/items', data, content_type: 'application/json'
#
# # Simple ElasticSearch indexing sample.
# conn.post '/twitter/tweet' do |req|
# req.headers[:content_type] = 'application/json'
# req.params[:routing] = 'kimchy'
# req.body = JSON.generate(user: 'kimchy', ...)
# end
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!visibility private
METHODS_WITH_BODY.each do |method|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{method}(url = nil, body = nil, headers = nil, &block)
run_request(:#{method}, url, body, headers, &block)
end
RUBY
end
# Sets up the Authorization header with these credentials, encoded
# with base64.
#
# @param login [String] The authentication login.
# @param pass [String] The authentication password.
#
# @example
#
# conn.basic_auth 'Aladdin', 'open sesame'
# conn.headers['Authorization']
# # => "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=="
#
# @return [void]
def basic_auth(login, pass)
set_authorization_header(:basic_auth, login, pass)
end
# Sets up the Authorization header with the given token.
#
# @param token [String]
# @param options [Hash] extra token options.
#
# @example
#
# conn.token_auth 'abcdef', foo: 'bar'
# conn.headers['Authorization']
# # => "Token token=\"abcdef\",
# foo=\"bar\""
#
# @return [void]
def token_auth(token, options = nil)
set_authorization_header(:token_auth, token, options)
end
# Sets up a custom Authorization header.
#
# @param type [String] authorization type
# @param token [String, Hash] token. A String value is taken literally, and
# a Hash is encoded into comma-separated key/value pairs.
#
# @example
#
# conn.authorization :Bearer, 'mF_9.B5f-4.1JqM'
# conn.headers['Authorization']
# # => "Bearer mF_9.B5f-4.1JqM"
#
# conn.authorization :Token, token: 'abcdef', foo: 'bar'
# conn.headers['Authorization']
# # => "Token token=\"abcdef\",
# foo=\"bar\""
#
# @return [void]
def authorization(type, token)
set_authorization_header(:authorization, type, token)
end
# Check if the adapter is parallel-capable.
#
# @yield if the adapter isn't parallel-capable, or if no adapter is set yet.
#
# @return [Object, nil] a parallel manager or nil if yielded
# @api private
def default_parallel_manager
@default_parallel_manager ||= begin
adapter = @builder.adapter.klass if @builder.adapter
if support_parallel?(adapter)
adapter.setup_parallel_manager
elsif block_given?
yield
end
end
end
# Determine if this Faraday::Connection can make parallel requests.
#
# @return [Boolean]
def in_parallel?
!!@parallel_manager
end
# Sets up the parallel manager to make a set of requests.
#
# @param manager [Object] The parallel manager that this Connection's
# Adapter uses.
#
# @yield a block to execute multiple requests.
# @return [void]
def in_parallel(manager = nil)
@parallel_manager = manager || default_parallel_manager do
warn 'Warning: `in_parallel` called but no parallel-capable adapter ' \
'on Faraday stack'
warn caller[2, 10].join("\n")
nil
end
yield
@parallel_manager&.run
ensure
@parallel_manager = nil
end
# Sets the Hash proxy options.
#
# @param new_value [Object]
def proxy=(new_value)
@manual_proxy = true
@proxy = new_value ? ProxyOptions.from(new_value) : nil
end
def_delegators :url_prefix, :scheme, :scheme=, :host, :host=, :port, :port=
def_delegator :url_prefix, :path, :path_prefix
# Parses the given URL with URI and stores the individual
# components in this connection. These components serve as defaults for
# requests made by this connection.
#
# @param url [String, URI]
# @param encoder [Object]
#
# @example
#
# conn = Faraday::Connection.new { ... }
# conn.url_prefix = "https://sushi.com/api"
# conn.scheme # => https
# conn.path_prefix # => "/api"
#
# conn.get("nigiri?page=2") # accesses https://sushi.com/api/nigiri
def url_prefix=(url, encoder = nil)
uri = @url_prefix = Utils.URI(url)
self.path_prefix = uri.path
params.merge_query(uri.query, encoder)
uri.query = nil
with_uri_credentials(uri) do |user, password|
basic_auth user, password
uri.user = uri.password = nil
end
end
# Sets the path prefix and ensures that it always has a leading
# slash.
#
# @param value [String]
#
# @return [String] the new path prefix
def path_prefix=(value)
url_prefix.path = if value
value = '/' + value unless value[0, 1] == '/'
value
end
end
# Takes a relative url for a request and combines it with the defaults
# set on the connection instance.
#
# @param url [String]
# @param extra_params [Hash]
#
# @example
# conn = Faraday::Connection.new { ... }
# conn.url_prefix = "https://sushi.com/api?token=abc"
# conn.scheme # => https
# conn.path_prefix # => "/api"
#
# conn.build_url("nigiri?page=2")
# # => https://sushi.com/api/nigiri?token=abc&page=2
#
# conn.build_url("nigiri", page: 2)
# # => https://sushi.com/api/nigiri?token=abc&page=2
#
def build_url(url = nil, extra_params = nil)
uri = build_exclusive_url(url)
query_values = params.dup.merge_query(uri.query, options.params_encoder)
query_values.update(extra_params) if extra_params
uri.query =
if query_values.empty?
nil
else
query_values.to_query(options.params_encoder)
end
uri
end
# Builds and runs the Faraday::Request.
#
# @param method [Symbol] HTTP method.
# @param url [String, URI] String or URI to access.
# @param body [Object] The request body that will eventually be converted to
# a string.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @return [Faraday::Response]
def run_request(method, url, body, headers)
unless METHODS.include?(method)
raise ArgumentError, "unknown http method: #{method}"
end
request = build_request(method) do |req|
req.options.proxy = proxy_for_request(url)
req.url(url) if url
req.headers.update(headers) if headers
req.body = body if body
yield(req) if block_given?
end
builder.build_response(self, request)
end
# Creates and configures the request object.
#
# @param method [Symbol]
#
# @yield [Faraday::Request] if block given
# @return [Faraday::Request]
def build_request(method)
Request.create(method) do |req|
req.params = params.dup
req.headers = headers.dup
req.options = options.dup
yield(req) if block_given?
end
end
# Build an absolute URL based on url_prefix.
#
# @param url [String, URI]
# @param params [Faraday::Utils::ParamsHash] A Faraday::Utils::ParamsHash to
# replace the query values
# of the resulting url (default: nil).
#
# @return [URI]
def build_exclusive_url(url = nil, params = nil, params_encoder = nil)
url = nil if url.respond_to?(:empty?) && url.empty?
base = url_prefix
if url && base.path && base.path !~ %r{/$}
base = base.dup
base.path = base.path + '/' # ensure trailing slash
end
uri = url ? base + url : base
if params
uri.query = params.to_query(params_encoder || options.params_encoder)
end
# rubocop:disable Style/SafeNavigation
uri.query = nil if uri.query && uri.query.empty?
# rubocop:enable Style/SafeNavigation
uri
end
# Creates a duplicate of this Faraday::Connection.
#
# @api private
#
# @return [Faraday::Connection]
def dup
self.class.new(build_exclusive_url,
headers: headers.dup,
params: params.dup,
builder: builder.dup,
ssl: ssl.dup,
request: options.dup)
end
# Yields username and password extracted from a URI if they both exist.
#
# @param uri [URI]
# @yield [username, password] any username and password
# @yieldparam username [String] any username from URI
# @yieldparam password [String] any password from URI
# @return [void]
# @api private
def with_uri_credentials(uri)
return unless uri.user && uri.password
yield(Utils.unescape(uri.user), Utils.unescape(uri.password))
end
def set_authorization_header(header_type, *args)
header = Faraday::Request
.lookup_middleware(header_type)
.header(*args)
headers[Faraday::Request::Authorization::KEY] = header
end
def proxy_from_env(url)
return if Faraday.ignore_env_proxy
uri = nil
if URI.parse('').respond_to?(:find_proxy)
case url
when String
uri = Utils.URI(url)
uri = URI.parse("#{uri.scheme}://#{uri.hostname}").find_proxy
when URI
uri = url.find_proxy
when nil
uri = find_default_proxy
end
else
warn 'no_proxy is unsupported' if ENV['no_proxy'] || ENV['NO_PROXY']
uri = find_default_proxy
end
ProxyOptions.from(uri) if uri
end
def find_default_proxy
uri = ENV['http_proxy']
return unless uri && !uri.empty?
uri = 'http://' + uri unless uri.match?(/^http/i)
uri
end
def proxy_for_request(url)
return proxy if @manual_proxy
if url && Utils.URI(url).absolute?
proxy_from_env(url)
else
proxy
end
end
def support_parallel?(adapter)
adapter&.respond_to?(:supports_parallel?) && adapter&.supports_parallel?
end
end
end
| 31.886179 | 80 | 0.612035 |
ab92e08f423858f22de160a14f93a258f531acb7 | 10,386 | # deep_merge was written by Steve Midgley, and is now maintained by Daniel DeLeo.
# The official home of deep_merge on the internet is now
# https://github.com/danielsdeleo/deep_merge
#
# Copyright (c) 2008 Steve Midgley, released under the MIT license
module DeepMerge
class InvalidParameter < StandardError; end
DEFAULT_FIELD_KNOCKOUT_PREFIX = '--'
# Deep Merge core documentation.
# deep_merge! method permits merging of arbitrary child elements. The two top level
# elements must be hashes. These hashes can contain unlimited (to stack limit) levels
# of child elements. These child elements to not have to be of the same types.
# Where child elements are of the same type, deep_merge will attempt to merge them together.
# Where child elements are not of the same type, deep_merge will skip or optionally overwrite
# the destination element with the contents of the source element at that level.
# So if you have two hashes like this:
# source = {:x => [1,2,3], :y => 2}
# dest = {:x => [4,5,'6'], :y => [7,8,9]}
# dest.deep_merge!(source)
# Results: {:x => [1,2,3,4,5,'6'], :y => 2}
# By default, "deep_merge!" will overwrite any unmergeables and merge everything else.
# To avoid this, use "deep_merge" (no bang/exclamation mark)
#
# Options:
# Options are specified in the last parameter passed, which should be in hash format:
# hash.deep_merge!({:x => [1,2]}, {:knockout_prefix => '--'})
# :preserve_unmergeables DEFAULT: false
# Set to true to skip any unmergeable elements from source
# :knockout_prefix DEFAULT: nil
# Set to string value to signify prefix which deletes elements from existing element
# :sort_merged_arrays DEFAULT: false
# Set to true to sort all arrays that are merged together
# :unpack_arrays DEFAULT: nil
# Set to string value to run "Array::join" then "String::split" against all arrays
# :merge_hash_arrays DEFAULT: false
# Set to true to merge hashes within arrays
# :merge_debug DEFAULT: false
# Set to true to get console output of merge process for debugging
#
# Selected Options Details:
# :knockout_prefix => The purpose of this is to provide a way to remove elements
# from existing Hash by specifying them in a special way in incoming hash
# source = {:x => ['--1', '2']}
# dest = {:x => ['1', '3']}
# dest.ko_deep_merge!(source)
# Results: {:x => ['2','3']}
# Additionally, if the knockout_prefix is passed alone as a string, it will cause
# the entire element to be removed:
# source = {:x => '--'}
# dest = {:x => [1,2,3]}
# dest.ko_deep_merge!(source)
# Results: {:x => ""}
# :unpack_arrays => The purpose of this is to permit compound elements to be passed
# in as strings and to be converted into discrete array elements
# irsource = {:x => ['1,2,3', '4']}
# dest = {:x => ['5','6','7,8']}
# dest.deep_merge!(source, {:unpack_arrays => ','})
# Results: {:x => ['1','2','3','4','5','6','7','8'}
# Why: If receiving data from an HTML form, this makes it easy for a checkbox
# to pass multiple values from within a single HTML element
#
# :merge_hash_arrays => merge hashes within arrays
# source = {:x => [{:y => 1}]}
# dest = {:x => [{:z => 2}]}
# dest.deep_merge!(source, {:merge_hash_arrays => true})
# Results: {:x => [{:y => 1, :z => 2}]}
#
# There are many tests for this library - and you can learn more about the features
# and usages of deep_merge! by just browsing the test examples
def self.deep_merge!(source, dest, options = {})
# turn on this line for stdout debugging text
merge_debug = options[:merge_debug] || false
overwrite_unmergeable = !options[:preserve_unmergeables]
knockout_prefix = options[:knockout_prefix] || nil
raise InvalidParameter, "knockout_prefix cannot be an empty string in deep_merge!" if knockout_prefix == ""
raise InvalidParameter, "overwrite_unmergeable must be true if knockout_prefix is specified in deep_merge!" if knockout_prefix && !overwrite_unmergeable
# if present: we will split and join arrays on this char before merging
array_split_char = options[:unpack_arrays] || false
# request that we sort together any arrays when they are merged
sort_merged_arrays = options[:sort_merged_arrays] || false
# request that arrays of hashes are merged together
merge_hash_arrays = options[:merge_hash_arrays] || false
di = options[:debug_indent] || ''
# do nothing if source is nil
return dest if source.nil?
# if dest doesn't exist, then simply copy source to it
if !(dest) && overwrite_unmergeable
dest = source; return dest
end
puts "#{di}Source class: #{source.class.inspect} :: Dest class: #{dest.class.inspect}" if merge_debug
if source.kind_of?(Hash)
puts "#{di}Hashes: #{source.inspect} :: #{dest.inspect}" if merge_debug
source.each do |src_key, src_value|
if dest.kind_of?(Hash)
puts "#{di} looping: #{src_key.inspect} => #{src_value.inspect} :: #{dest.inspect}" if merge_debug
if dest[src_key]
puts "#{di} ==>merging: #{src_key.inspect} => #{src_value.inspect} :: #{dest[src_key].inspect}" if merge_debug
dest[src_key] = deep_merge!(src_value, dest[src_key], options.merge(:debug_indent => di + ' '))
else # dest[src_key] doesn't exist so we want to create and overwrite it (but we do this via deep_merge!)
puts "#{di} ==>merging over: #{src_key.inspect} => #{src_value.inspect}" if merge_debug
# note: we rescue here b/c some classes respond to "dup" but don't implement it (Numeric, TrueClass, FalseClass, NilClass among maybe others)
begin
src_dup = src_value.dup # we dup src_value if possible because we're going to merge into it (since dest is empty)
rescue TypeError
src_dup = src_value
end
dest[src_key] = deep_merge!(src_value, src_dup, options.merge(:debug_indent => di + ' '))
end
else # dest isn't a hash, so we overwrite it completely (if permitted)
if overwrite_unmergeable
puts "#{di} overwriting dest: #{src_key.inspect} => #{src_value.inspect} -over-> #{dest.inspect}" if merge_debug
dest = overwrite_unmergeables(source, dest, options)
end
end
end
elsif source.kind_of?(Array)
puts "#{di}Arrays: #{source.inspect} :: #{dest.inspect}" if merge_debug
# if we are instructed, join/split any source arrays before processing
if array_split_char
puts "#{di} split/join on source: #{source.inspect}" if merge_debug
source = source.join(array_split_char).split(array_split_char)
if dest.kind_of?(Array)
dest = dest.join(array_split_char).split(array_split_char)
end
end
# if there's a naked knockout_prefix in source, that means we are to truncate dest
if source.index(knockout_prefix)
dest = clear_or_nil(dest); source.delete(knockout_prefix)
end
if dest.kind_of?(Array)
if knockout_prefix
print "#{di} knocking out: " if merge_debug
# remove knockout prefix items from both source and dest
source.delete_if do |ko_item|
retval = false
item = ko_item.respond_to?(:gsub) ? ko_item.gsub(%r{^#{knockout_prefix}}, "") : ko_item
if item != ko_item
print "#{ko_item} - " if merge_debug
dest.delete(item)
dest.delete(ko_item)
retval = true
end
retval
end
puts if merge_debug
end
puts "#{di} merging arrays: #{source.inspect} :: #{dest.inspect}" if merge_debug
source_all_hashes = source.all? { |i| i.kind_of?(Hash) }
dest_all_hashes = dest.all? { |i| i.kind_of?(Hash) }
if merge_hash_arrays && source_all_hashes && dest_all_hashes
# merge hashes in lists
list = []
dest.each_index do |i|
list[i] = deep_merge!(source[i] || {}, dest[i],
options.merge(:debug_indent => di + ' '))
end
list += source[dest.count..-1] if source.count > dest.count
dest = list
else
dest = dest | source
end
dest.sort! if sort_merged_arrays
elsif overwrite_unmergeable
puts "#{di} overwriting dest: #{source.inspect} -over-> #{dest.inspect}" if merge_debug
dest = overwrite_unmergeables(source, dest, options)
end
else # src_hash is not an array or hash, so we'll have to overwrite dest
puts "#{di}Others: #{source.inspect} :: #{dest.inspect}" if merge_debug
dest = overwrite_unmergeables(source, dest, options)
end
puts "#{di}Returning #{dest.inspect}" if merge_debug
dest
end # deep_merge!
# allows deep_merge! to uniformly handle overwriting of unmergeable entities
def self.overwrite_unmergeables(source, dest, options)
merge_debug = options[:merge_debug] || false
overwrite_unmergeable = !options[:preserve_unmergeables]
knockout_prefix = options[:knockout_prefix] || false
di = options[:debug_indent] || ''
if knockout_prefix && overwrite_unmergeable
if source.kind_of?(String) # remove knockout string from source before overwriting dest
src_tmp = source.gsub(%r{^#{knockout_prefix}},"")
elsif source.kind_of?(Array) # remove all knockout elements before overwriting dest
src_tmp = source.delete_if {|ko_item| ko_item.kind_of?(String) && ko_item.match(%r{^#{knockout_prefix}}) }
else
src_tmp = source
end
if src_tmp == source # if we didn't find a knockout_prefix then we just overwrite dest
puts "#{di}#{src_tmp.inspect} -over-> #{dest.inspect}" if merge_debug
dest = src_tmp
else # if we do find a knockout_prefix, then we just delete dest
puts "#{di}\"\" -over-> #{dest.inspect}" if merge_debug
dest = ""
end
elsif overwrite_unmergeable
dest = source
end
dest
end
def self.clear_or_nil(obj)
if obj.respond_to?(:clear)
obj.clear
else
obj = nil
end
obj
end
end # module DeepMerge
| 47.861751 | 156 | 0.641633 |
33ae16117e0c8ca336ed1bcd5fd527fe0553f07a | 1,352 | module SessionsHelper
# 渡されたユーザーでログインする
def log_in(user)
session[:user_id] = user.id
end
# ユーザーを永続的セッションに記憶する
def remember(user)
user.remember
cookies.permanent.signed[:user_id] = user.id
cookies.permanent[:remember_token] = user.remember_token
end
# 渡されたユーザーがログイン済みユーザーであればtrueを返す
def current_user?(user)
user == current_user
end
# 現在ログイン中のユーザーを返す (いる場合)
def current_user
if (user_id = session[:user_id])
@current_user ||= User.find_by(id: user_id)
elsif (user_id = cookies.signed[:user_id])
user = User.find_by(id: user_id)
if user && user.authenticated?(cookies[:remember_token])
log_in user
@current_user = user
end
end
end
# ユーザーがログインしていればtrue、その他ならfalseを返す
def logged_in?
!current_user.nil?
end
# 永続的セッションを破棄する
def forget(user)
user.forget
cookies.delete(:user_id)
cookies.delete(:remember_token)
end
# 現在のユーザーをログアウトする
def log_out
forget(current_user)
session.delete(:user_id)
@current_user = nil
end
# 記憶したURL (もしくはデフォルト値) にリダイレクト
def redirect_back_or(default)
redirect_to(session[:forwarding_url] || default)
session.delete(:forwarding_url)
end
# アクセスしようとしたURLを覚えておく
def store_location
session[:forwarding_url] = request.original_url if request.get?
end
end
| 21.806452 | 67 | 0.691568 |
38d0e693d320d31e42c205b9e2807165d54b0be9 | 488 | # @note this file is loaded in env.rb to setup simplecov using RUBYOPTs for child processes
simplecov_command_name = ENV['SIMPLECOV_COMMAND_NAME']
# will not be set if hook does not run because `bundle install --without coverage`
if simplecov_command_name
require 'simplecov'
require 'pathname'
root = Pathname(__FILE__).expand_path.parent.parent.parent
SimpleCov.command_name(simplecov_command_name)
SimpleCov.root(root)
load root.join('.simplecov')
end
| 28.705882 | 92 | 0.760246 |
39779d429624fd1991f9601bf903c42af5826b57 | 159 | module DevcampNewViewTool
class Renderer
def self.copyright name, msg
"© #{Time.now.year} | <b>#{name}</b> #{msg}".html_safe
end
end
end | 22.714286 | 65 | 0.641509 |
0841c4835af081a02988b351c63dff092aab744e | 889 | {
name: 'Lee',
num_matrices: '2',
notes: 'FEM, Electromagnetics, Center for Computational Electromagnetics, UIUC
From the Univ of Illinois at Urbana-Champaign, Center for Computational
Electromagnetics (development and application of the finite element
method for analyzing antennas, high-frequency circuits, high-speed
circuits, and so on). The governing equations are Maxwell\'s equations.
The matrix results from the finite-element discretization of a bandpass
microwave filter at 500 MHz. The first-order vector element is employed.
The absorbing boundary condition is applied on the outer boundary of the
structure for emulating the open space. The port boundary condition is
applied on each port of the circuit for the truncating the computational
domain and exciting the circuit. Due to these boundary conditions, the
finite-element system matrix is complex.
',
}
| 40.409091 | 82 | 0.79865 |
ac02a3dc45ba344ba1ef12d276c1e907d29dce5a | 221 | # frozen_string_literal: true
require 'nokogiri'
require 'content-style/parser'
require 'content-style/linter'
require 'content-style/hotcop_partner'
module ContentStyle
ROOT = File.expand_path('../..', __FILE__)
end
| 20.090909 | 44 | 0.773756 |
f77da0c7bf4edaa22c3b9011374c8c53623e413d | 11,285 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/security_center/v1/securitycenter_service.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
require 'google/cloud/security_center/v1/asset_pb'
require 'google/cloud/security_center/v1/finding_pb'
require 'google/cloud/security_center/v1/organization_settings_pb'
require 'google/cloud/security_center/v1/run_asset_discovery_response_pb'
require 'google/cloud/security_center/v1/security_marks_pb'
require 'google/cloud/security_center/v1/source_pb'
require 'google/iam/v1/iam_policy_pb'
require 'google/iam/v1/policy_pb'
require 'google/longrunning/operations_pb'
require 'google/protobuf/duration_pb'
require 'google/protobuf/empty_pb'
require 'google/protobuf/field_mask_pb'
require 'google/protobuf/struct_pb'
require 'google/protobuf/timestamp_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_message "google.cloud.securitycenter.v1.CreateFindingRequest" do
optional :parent, :string, 1
optional :finding_id, :string, 2
optional :finding, :message, 3, "google.cloud.securitycenter.v1.Finding"
end
add_message "google.cloud.securitycenter.v1.CreateSourceRequest" do
optional :parent, :string, 1
optional :source, :message, 2, "google.cloud.securitycenter.v1.Source"
end
add_message "google.cloud.securitycenter.v1.GetOrganizationSettingsRequest" do
optional :name, :string, 1
end
add_message "google.cloud.securitycenter.v1.GetSourceRequest" do
optional :name, :string, 1
end
add_message "google.cloud.securitycenter.v1.GroupAssetsRequest" do
optional :parent, :string, 1
optional :filter, :string, 2
optional :group_by, :string, 3
optional :compare_duration, :message, 4, "google.protobuf.Duration"
optional :read_time, :message, 5, "google.protobuf.Timestamp"
optional :page_token, :string, 7
optional :page_size, :int32, 8
end
add_message "google.cloud.securitycenter.v1.GroupAssetsResponse" do
repeated :group_by_results, :message, 1, "google.cloud.securitycenter.v1.GroupResult"
optional :read_time, :message, 2, "google.protobuf.Timestamp"
optional :next_page_token, :string, 3
optional :total_size, :int32, 4
end
add_message "google.cloud.securitycenter.v1.GroupFindingsRequest" do
optional :parent, :string, 1
optional :filter, :string, 2
optional :group_by, :string, 3
optional :read_time, :message, 4, "google.protobuf.Timestamp"
optional :compare_duration, :message, 5, "google.protobuf.Duration"
optional :page_token, :string, 7
optional :page_size, :int32, 8
end
add_message "google.cloud.securitycenter.v1.GroupFindingsResponse" do
repeated :group_by_results, :message, 1, "google.cloud.securitycenter.v1.GroupResult"
optional :read_time, :message, 2, "google.protobuf.Timestamp"
optional :next_page_token, :string, 3
optional :total_size, :int32, 4
end
add_message "google.cloud.securitycenter.v1.GroupResult" do
map :properties, :string, :message, 1, "google.protobuf.Value"
optional :count, :int64, 2
end
add_message "google.cloud.securitycenter.v1.ListSourcesRequest" do
optional :parent, :string, 1
optional :page_token, :string, 2
optional :page_size, :int32, 7
end
add_message "google.cloud.securitycenter.v1.ListSourcesResponse" do
repeated :sources, :message, 1, "google.cloud.securitycenter.v1.Source"
optional :next_page_token, :string, 2
end
add_message "google.cloud.securitycenter.v1.ListAssetsRequest" do
optional :parent, :string, 1
optional :filter, :string, 2
optional :order_by, :string, 3
optional :read_time, :message, 4, "google.protobuf.Timestamp"
optional :compare_duration, :message, 5, "google.protobuf.Duration"
optional :field_mask, :message, 7, "google.protobuf.FieldMask"
optional :page_token, :string, 8
optional :page_size, :int32, 9
end
add_message "google.cloud.securitycenter.v1.ListAssetsResponse" do
repeated :list_assets_results, :message, 1, "google.cloud.securitycenter.v1.ListAssetsResponse.ListAssetsResult"
optional :read_time, :message, 2, "google.protobuf.Timestamp"
optional :next_page_token, :string, 3
optional :total_size, :int32, 4
end
add_message "google.cloud.securitycenter.v1.ListAssetsResponse.ListAssetsResult" do
optional :asset, :message, 1, "google.cloud.securitycenter.v1.Asset"
optional :state_change, :enum, 2, "google.cloud.securitycenter.v1.ListAssetsResponse.ListAssetsResult.StateChange"
end
add_enum "google.cloud.securitycenter.v1.ListAssetsResponse.ListAssetsResult.StateChange" do
value :UNUSED, 0
value :ADDED, 1
value :REMOVED, 2
value :ACTIVE, 3
end
add_message "google.cloud.securitycenter.v1.ListFindingsRequest" do
optional :parent, :string, 1
optional :filter, :string, 2
optional :order_by, :string, 3
optional :read_time, :message, 4, "google.protobuf.Timestamp"
optional :compare_duration, :message, 5, "google.protobuf.Duration"
optional :field_mask, :message, 7, "google.protobuf.FieldMask"
optional :page_token, :string, 8
optional :page_size, :int32, 9
end
add_message "google.cloud.securitycenter.v1.ListFindingsResponse" do
repeated :list_findings_results, :message, 1, "google.cloud.securitycenter.v1.ListFindingsResponse.ListFindingsResult"
optional :read_time, :message, 2, "google.protobuf.Timestamp"
optional :next_page_token, :string, 3
optional :total_size, :int32, 4
end
add_message "google.cloud.securitycenter.v1.ListFindingsResponse.ListFindingsResult" do
optional :finding, :message, 1, "google.cloud.securitycenter.v1.Finding"
optional :state_change, :enum, 2, "google.cloud.securitycenter.v1.ListFindingsResponse.ListFindingsResult.StateChange"
end
add_enum "google.cloud.securitycenter.v1.ListFindingsResponse.ListFindingsResult.StateChange" do
value :UNUSED, 0
value :CHANGED, 1
value :UNCHANGED, 2
value :ADDED, 3
value :REMOVED, 4
end
add_message "google.cloud.securitycenter.v1.SetFindingStateRequest" do
optional :name, :string, 1
optional :state, :enum, 2, "google.cloud.securitycenter.v1.Finding.State"
optional :start_time, :message, 3, "google.protobuf.Timestamp"
end
add_message "google.cloud.securitycenter.v1.RunAssetDiscoveryRequest" do
optional :parent, :string, 1
end
add_message "google.cloud.securitycenter.v1.UpdateFindingRequest" do
optional :finding, :message, 1, "google.cloud.securitycenter.v1.Finding"
optional :update_mask, :message, 2, "google.protobuf.FieldMask"
end
add_message "google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest" do
optional :organization_settings, :message, 1, "google.cloud.securitycenter.v1.OrganizationSettings"
optional :update_mask, :message, 2, "google.protobuf.FieldMask"
end
add_message "google.cloud.securitycenter.v1.UpdateSourceRequest" do
optional :source, :message, 1, "google.cloud.securitycenter.v1.Source"
optional :update_mask, :message, 2, "google.protobuf.FieldMask"
end
add_message "google.cloud.securitycenter.v1.UpdateSecurityMarksRequest" do
optional :security_marks, :message, 1, "google.cloud.securitycenter.v1.SecurityMarks"
optional :update_mask, :message, 2, "google.protobuf.FieldMask"
optional :start_time, :message, 3, "google.protobuf.Timestamp"
end
end
module Google::Cloud::SecurityCenter::V1
CreateFindingRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.CreateFindingRequest").msgclass
CreateSourceRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.CreateSourceRequest").msgclass
GetOrganizationSettingsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.GetOrganizationSettingsRequest").msgclass
GetSourceRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.GetSourceRequest").msgclass
GroupAssetsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.GroupAssetsRequest").msgclass
GroupAssetsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.GroupAssetsResponse").msgclass
GroupFindingsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.GroupFindingsRequest").msgclass
GroupFindingsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.GroupFindingsResponse").msgclass
GroupResult = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.GroupResult").msgclass
ListSourcesRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.ListSourcesRequest").msgclass
ListSourcesResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.ListSourcesResponse").msgclass
ListAssetsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.ListAssetsRequest").msgclass
ListAssetsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.ListAssetsResponse").msgclass
ListAssetsResponse::ListAssetsResult = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.ListAssetsResponse.ListAssetsResult").msgclass
ListAssetsResponse::ListAssetsResult::StateChange = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.ListAssetsResponse.ListAssetsResult.StateChange").enummodule
ListFindingsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.ListFindingsRequest").msgclass
ListFindingsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.ListFindingsResponse").msgclass
ListFindingsResponse::ListFindingsResult = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.ListFindingsResponse.ListFindingsResult").msgclass
ListFindingsResponse::ListFindingsResult::StateChange = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.ListFindingsResponse.ListFindingsResult.StateChange").enummodule
SetFindingStateRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.SetFindingStateRequest").msgclass
RunAssetDiscoveryRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.RunAssetDiscoveryRequest").msgclass
UpdateFindingRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.UpdateFindingRequest").msgclass
UpdateOrganizationSettingsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest").msgclass
UpdateSourceRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.UpdateSourceRequest").msgclass
UpdateSecurityMarksRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.securitycenter.v1.UpdateSecurityMarksRequest").msgclass
end
| 60.026596 | 209 | 0.794152 |
1adcc23082dd04b3dae30ebc7796314ed64807b2 | 3,172 | require 'test_helper'
class Minitest::DataTest < Minitest::Test
attr_accessor :reporter
def run_test(test)
output = StringIO.new("")
self.reporter = Minitest::CompositeReporter.new
reporter << Minitest::SummaryReporter.new(output)
reporter << Minitest::ProgressReporter.new(output)
reporter.start
Minitest::Runnable.runnables.delete(test)
test.run(reporter)
reporter.report
end
def result_reporter
reporter.reporters.first
end
def test_data_with_success
test_case = Class.new(Minitest::Test) do
data("empty string" => [true, ""],
"plain string" => [false, "hello"])
def test_empty(data)
expected, target = data
assert_equal(expected, target.empty?)
end
end
run_test(test_case)
assert_empty(result_reporter.results.first.to_s)
end
def test_data_with_fail
test_case = Class.new(Minitest::Test) do
data("empty string" => [true, "1"],
"plain string" => [false, ""])
def test_empty(data)
expected, target = data
assert_equal(expected, target.empty?)
end
end
run_test(test_case)
assert_match(/test_empty\(empty string\)/, result_reporter.results[0].to_s)
assert_match(/test_empty\(plain string\)/, result_reporter.results[1].to_s)
end
def test_data_label_with_success
test_case = Class.new(Minitest::Test) do
data("empty string", [true, ""])
data("plain string", [false, "hello"])
def test_empty(data)
expected, target = data
assert_equal(expected, target.empty?)
end
end
run_test(test_case)
assert_empty(result_reporter.results.first.to_s)
end
def test_data_label_with_fail
test_case = Class.new(Minitest::Test) do
data("empty string", [true, "1"])
data("plain string", [false, ""])
def test_empty(data)
expected, target = data
assert_equal(expected, target.empty?)
end
end
run_test(test_case)
assert_match(/test_empty\(empty string\)/, result_reporter.results[0].to_s)
assert_match(/test_empty\(plain string\)/, result_reporter.results[1].to_s)
end
def test_data_block_with_success
test_case = Class.new(Minitest::Test) do
data do
data_set = {}
data_set["empty string"] = [true, ""]
data_set["plain string"] = [false, "hello"]
data_set
end
def test_empty(data)
expected, target = data
assert_equal(expected, target.empty?)
end
end
run_test(test_case)
assert_empty(result_reporter.results.first.to_s)
end
def test_data_block_with_fail
test_case = Class.new(Minitest::Test) do
data do
data_set = {}
data_set["empty string"] = [true, "1"]
data_set["plain string"] = [false, ""]
data_set
end
def test_empty(data)
expected, target = data
assert_equal(expected, target.empty?)
end
end
run_test(test_case)
assert_match(/test_empty\(empty string\)/, result_reporter.results[0].to_s)
assert_match(/test_empty\(plain string\)/, result_reporter.results[1].to_s)
end
end
| 25.174603 | 79 | 0.647226 |
e81c15ebb4b36fb135fc7942c481759b063cb073 | 99 | require 'spec_helper'
describe PageLink do
# "add some examples to (or delete) #{__FILE__}"
end
| 16.5 | 50 | 0.727273 |
26074c5ccbfedd7d37a0d96d3e0db10c1ba89e07 | 211 | user = User.create!(name: 'Admin', email: '[email protected]', password: 'iamRails2021',
password_confirmation: 'iamRails2021')
api_key = ApiKey.create!(user: user)
puts api_key.access_token
| 42.2 | 88 | 0.691943 |
ab41ecfcfb783cc3c23347a5c18decb0684391c9 | 2,052 | # frozen_string_literal: true
require "rails_helper"
RSpec.describe "Reserving an instrument on a holiday" do
let(:user) { create(:user) }
let!(:instrument) { create(:setup_instrument, restrict_holiday_access: true) }
let(:facility) { instrument.facility }
let!(:account) { create(:nufs_account, :with_account_owner, owner: user) }
let!(:price_policy) { create(:instrument_price_policy, price_group: PriceGroup.base, product: instrument) }
let!(:holiday) { Holiday.create(date: 2.days.from_now) }
before do
login_as user
visit facility_path(facility)
end
context "as a member of an approved group" do
let!(:product_access_group) { create(:product_access_group, allow_holiday_access: true, product: instrument) }
let!(:product_user) { create(:product_user, user: user, product_access_group: product_access_group, product: instrument) }
it "allows making a reservation" do
click_link instrument.name
select user.accounts.first.description, from: "Payment Source"
fill_in "Reserve Start", with: 2.days.from_now
click_button "Create"
expect(page).to have_content("Reservation created successfully")
end
end
context "as a member of a restricted group" do
context "non-admin" do
it "does NOT allow making a reservation" do
click_link instrument.name
select user.accounts.first.description, from: "Payment Source"
fill_in "Reserve Start", with: 2.days.from_now
click_button "Create"
expect(page).to have_content("Reserve Start cannot be on a holiday because you do not have holiday access")
end
end
context "as an admin" do
let(:user) { create(:user, :administrator) }
it "allows making a reservation" do
click_link instrument.name
select user.accounts.first.description, from: "Payment Source"
fill_in "Reserve Start", with: 2.days.from_now
click_button "Create"
expect(page).to have_content("Reservation created successfully")
end
end
end
end
| 36 | 126 | 0.705166 |
5d14b2543adcdb3ececf1415905110ab254b480c | 1,508 | require 'spec_helper'
describe Bosh::AwsCloud::Cloud, "reboot_vm" do
let(:cloud) { described_class.new(options) }
let(:options) do
{
"aws" => {
"default_availability_zone" => "foo",
"region" => "bar",
"access_key_id" => "access",
"secret_access_key" => "secret",
"default_key_name" => "sesame"
},
"registry" => {
"endpoint" => "endpoint",
"user" => "user",
"password" => "password"
},
"agent" => {
"baz" => "qux"
}
}
end
it 'deletes an EC2 instance' do
registry = double("registry")
allow(Bosh::Registry::Client).to receive(:new).and_return(registry)
region = double("region", name: 'bar')
allow(AWS::EC2).to receive(:new).and_return(double("ec2", regions: [ region ]))
az_selector = double("availability zone selector")
allow(Bosh::AwsCloud::AvailabilityZoneSelector).to receive(:new).
with(region, "foo").
and_return(az_selector)
instance_manager = instance_double('Bosh::AwsCloud::InstanceManager')
allow(Bosh::AwsCloud::InstanceManager).to receive(:new).
with(region, registry, be_an_instance_of(AWS::ELB), az_selector, be_an_instance_of(Logger)).
and_return(instance_manager)
instance = instance_double('Bosh::AwsCloud::Instance')
allow(instance_manager).to receive(:find).with('fake-id').and_return(instance)
expect(instance).to receive(:reboot).with(no_args)
cloud.reboot_vm('fake-id')
end
end
| 29.568627 | 98 | 0.627321 |
e2f4e7f223f48f9c4fcf80b4a6335a20a5db20d8 | 77 | require_relative '../../Dispatcher'
class DispatcherServlet < Dispatcher
end | 19.25 | 36 | 0.792208 |
f8da234fbcfd2377a03b0518e56b9baa42dda72f | 3,745 | # frozen_string_literal: true
# This file was auto-generated by lib/tasks/web.rake
require_relative 'endpoints/admin_apps'
require_relative 'endpoints/admin_apps_requests'
require_relative 'endpoints/admin_inviteRequests'
require_relative 'endpoints/admin_inviteRequests_approved'
require_relative 'endpoints/admin_inviteRequests_denied'
require_relative 'endpoints/admin_teams'
require_relative 'endpoints/admin_teams_admins'
require_relative 'endpoints/admin_teams_owners'
require_relative 'endpoints/admin_teams_settings'
require_relative 'endpoints/admin_users'
require_relative 'endpoints/admin_users_session'
require_relative 'endpoints/api'
require_relative 'endpoints/apps'
require_relative 'endpoints/apps_permissions'
require_relative 'endpoints/apps_permissions_resources'
require_relative 'endpoints/apps_permissions_scopes'
require_relative 'endpoints/apps_permissions_users'
require_relative 'endpoints/auth'
require_relative 'endpoints/bots'
require_relative 'endpoints/channels'
require_relative 'endpoints/chat'
require_relative 'endpoints/chat_scheduledMessages'
require_relative 'endpoints/conversations'
require_relative 'endpoints/dialog'
require_relative 'endpoints/dnd'
require_relative 'endpoints/emoji'
require_relative 'endpoints/files'
require_relative 'endpoints/files_comments'
require_relative 'endpoints/files_remote'
require_relative 'endpoints/groups'
require_relative 'endpoints/im'
require_relative 'endpoints/migration'
require_relative 'endpoints/mpim'
require_relative 'endpoints/oauth'
require_relative 'endpoints/oauth_v2'
require_relative 'endpoints/pins'
require_relative 'endpoints/reactions'
require_relative 'endpoints/reminders'
require_relative 'endpoints/rtm'
require_relative 'endpoints/search'
require_relative 'endpoints/stars'
require_relative 'endpoints/team'
require_relative 'endpoints/team_profile'
require_relative 'endpoints/usergroups'
require_relative 'endpoints/usergroups_users'
require_relative 'endpoints/users'
require_relative 'endpoints/users_admin'
require_relative 'endpoints/users_prefs'
require_relative 'endpoints/users_profile'
require_relative 'endpoints/views'
module Slack
module Web
module Api
module Endpoints
include Slack::Web::Api::Mixins::Channels
include Slack::Web::Api::Mixins::Users
include Slack::Web::Api::Mixins::Groups
include AdminApps
include AdminAppsRequests
include AdminInviterequests
include AdminInviterequestsApproved
include AdminInviterequestsDenied
include AdminTeams
include AdminTeamsAdmins
include AdminTeamsOwners
include AdminTeamsSettings
include AdminUsers
include AdminUsersSession
include Api
include Apps
include AppsPermissions
include AppsPermissionsResources
include AppsPermissionsScopes
include AppsPermissionsUsers
include Auth
include Bots
include Channels
include Chat
include ChatScheduledmessages
include Conversations
include Dialog
include Dnd
include Emoji
include Files
include FilesComments
include FilesRemote
include Groups
include Im
include Migration
include Mpim
include Oauth
include OauthV2
include Pins
include Reactions
include Reminders
include Rtm
include Search
include Stars
include Team
include TeamProfile
include Usergroups
include UsergroupsUsers
include Users
include UsersAdmin
include UsersPrefs
include UsersProfile
include Views
end
end
end
end
| 32.008547 | 58 | 0.77036 |
ffea6880c49b41b86a3cbcfa5903183c756c6dfa | 1,067 | cask "unity" do
arch = Hardware::CPU.intel? ? "" : "Arm64"
version "2022.1.5f1,feea5ec8f162"
if Hardware::CPU.intel?
sha256 "5093df3663d7356e156ccc116072457625010e57107371c4fce5c0230009018f"
else
sha256 "6e75c0fda9d2b61d00458dafc62d887cc1368bbce666e384d1a05819dad4e48d"
end
url "https://download.unity3d.com/download_unity/#{version.csv.second}/MacEditorInstaller#{arch}/Unity-#{version.csv.first}.pkg",
verified: "download.unity3d.com/download_unity/"
name "Unity Editor"
desc "Platform for 3D content"
homepage "https://unity.com/products"
livecheck do
url "https://public-cdn.cloud.unity3d.com/hub/prod/releases-darwin.json"
strategy :page_match do |page|
page.scan(%r{/download_unity/(\h+)/MacEditorInstaller/Unity-(\d+(?:\.\d+)+[a-z]*\d*)\.pkg}i).map do |match|
"#{match[1]},#{match[0]}"
end
end
end
pkg "Unity-#{version.csv.first}.pkg"
uninstall quit: "com.unity3d.UnityEditor5.x",
pkgutil: "com.unity3d.UnityEditor5.x",
delete: "/Applications/Unity"
end
| 32.333333 | 131 | 0.68791 |
f8481846ab4b1974d65b40ee03c1823c0538770b | 9,778 | require 'spec_helper'
module Omnibus
describe Compressor::DMG do
let(:project) do
Project.new.tap do |project|
project.name('project')
project.homepage('https://example.com')
project.install_dir('/opt/project')
project.build_version('1.2.3')
project.build_iteration('2')
project.maintainer('Chef Software')
end
end
subject { described_class.new(project) }
let(:project_root) { "#{tmp_path}/project/root" }
let(:package_dir) { "#{tmp_path}/package/dir" }
let(:staging_dir) { "#{tmp_path}/staging/dir" }
before do
allow(project).to receive(:packager)
.and_return(Packager::PKG.new(project))
Config.project_root(project_root)
Config.package_dir(package_dir)
allow(subject).to receive(:staging_dir)
.and_return(staging_dir)
create_directory(staging_dir)
allow(subject).to receive(:shellout!)
end
describe '#window_bounds' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:window_bounds)
end
it 'has a default value' do
expect(subject.window_bounds).to eq('100, 100, 750, 600')
end
end
describe '#pkg_position' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:pkg_position)
end
it 'has a default value' do
expect(subject.pkg_position).to eq('535, 50')
end
end
describe '#id' do
it 'is :dmg' do
expect(subject.id).to eq(:dmg)
end
end
describe '#resources_dir' do
it 'is nested inside the staging_dir' do
expect(subject.resources_dir).to eq("#{staging_dir}/Resources")
end
end
describe '#clean_disks' do
it 'logs a message' do
allow(subject).to receive(:shellout!)
.and_return(double(Mixlib::ShellOut, stdout: ''))
output = capture_logging { subject.clean_disks }
expect(output).to include('Cleaning previously mounted disks')
end
end
describe '#create_writable_dmg' do
it 'logs a message' do
output = capture_logging { subject.create_writable_dmg }
expect(output).to include('Creating writable dmg')
end
it 'runs the hdiutil command' do
expect(subject).to receive(:shellout!)
.with <<-EOH.gsub(/^ {12}/, '')
hdiutil create \\
-srcfolder "#{staging_dir}/Resources" \\
-volname "Project" \\
-fs HFS+ \\
-fsargs "-c c=64,a=16,e=16" \\
-format UDRW \\
-size 512000k \\
"#{staging_dir}/project-writable.dmg"
EOH
subject.create_writable_dmg
end
end
describe '#attach_dmg' do
before do
allow(subject).to receive(:shellout!)
.and_return(shellout)
end
let(:shellout) { double(Mixlib::ShellOut, stdout: "hello\n") }
it 'logs a message' do
output = capture_logging { subject.attach_dmg }
expect(output).to include('Attaching dmg as disk')
end
it 'runs the hdiutil command' do
expect(subject).to receive(:shellout!)
.with <<-EOH.gsub(/^ {12}/, '')
hdiutil attach \\
-readwrite \\
-noverify \\
-noautoopen \\
"#{staging_dir}/project-writable.dmg" | egrep '^/dev/' | sed 1q | awk '{print $1}'
EOH
subject.attach_dmg
end
it 'returns the stripped stdout' do
expect(subject.attach_dmg).to eq('hello')
end
end
describe '#set_volume_icon' do
it 'logs a message' do
output = capture_logging { subject.set_volume_icon }
expect(output).to include('Setting volume icon')
end
it 'runs the sips commands' do
icon = subject.resource_path('icon.png')
expect(subject).to receive(:shellout!)
.with <<-EOH.gsub(/^ {12}/, '')
# Generate the icns
mkdir tmp.iconset
sips -z 16 16 #{icon} --out tmp.iconset/icon_16x16.png
sips -z 32 32 #{icon} --out tmp.iconset/[email protected]
sips -z 32 32 #{icon} --out tmp.iconset/icon_32x32.png
sips -z 64 64 #{icon} --out tmp.iconset/[email protected]
sips -z 128 128 #{icon} --out tmp.iconset/icon_128x128.png
sips -z 256 256 #{icon} --out tmp.iconset/[email protected]
sips -z 256 256 #{icon} --out tmp.iconset/icon_256x256.png
sips -z 512 512 #{icon} --out tmp.iconset/[email protected]
sips -z 512 512 #{icon} --out tmp.iconset/icon_512x512.png
sips -z 1024 1024 #{icon} --out tmp.iconset/[email protected]
iconutil -c icns tmp.iconset
# Copy it over
cp tmp.icns "/Volumes/Project/.VolumeIcon.icns"
# Source the icon
SetFile -a C "/Volumes/Project"
EOH
subject.set_volume_icon
end
end
describe '#prettify_dmg' do
it 'logs a message' do
output = capture_logging { subject.prettify_dmg }
expect(output).to include('Making the dmg all pretty and stuff')
end
it 'renders the apple script template' do
subject.prettify_dmg
expect("#{staging_dir}/create_dmg.osascript").to be_a_file
end
it 'has the correct content' do
subject.prettify_dmg
contents = File.read("#{staging_dir}/create_dmg.osascript")
expect(contents).to include('tell application "Finder"')
expect(contents).to include(' tell disk "Project"')
expect(contents).to include(' open')
expect(contents).to include(' set current view of container window to icon view')
expect(contents).to include(' set toolbar visible of container window to false')
expect(contents).to include(' set statusbar visible of container window to false')
expect(contents).to include(' set the bounds of container window to {100, 100, 750, 600}')
expect(contents).to include(' set theViewOptions to the icon view options of container window')
expect(contents).to include(' set arrangement of theViewOptions to not arranged')
expect(contents).to include(' set icon size of theViewOptions to 72')
expect(contents).to include(' set background picture of theViewOptions to file ".support:background.png"')
expect(contents).to include(' delay 5')
expect(contents).to include(' set position of item "project-1.2.3-2.pkg" of container window to {535, 50}')
expect(contents).to include(' update without registering applications')
expect(contents).to include(' delay 5')
expect(contents).to include(' end tell')
expect(contents).to include('end tell')
end
it 'runs the apple script' do
expect(subject).to receive(:shellout!)
.with <<-EOH.gsub(/^ {12}/, '')
osascript "#{staging_dir}/create_dmg.osascript"
EOH
subject.prettify_dmg
end
end
describe '#compress_dmg' do
it 'logs a message' do
output = capture_logging { subject.compress_dmg }
expect(output).to include('Compressing dmg')
end
it 'runs the magical command series' do
device = '/dev/sda1'
subject.instance_variable_set(:@device, device)
expect(subject).to receive(:shellout!)
.with <<-EOH.gsub(/^ {12}/, '')
chmod -Rf go-w /Volumes/Project
sync
hdiutil detach "#{device}"
hdiutil convert \\
"#{staging_dir}/project-writable.dmg" \\
-format UDZO \\
-imagekey \\
zlib-level=9 \\
-o "#{package_dir}/project-1.2.3-2.dmg"
rm -rf "#{staging_dir}/project-writable.dmg"
EOH
subject.compress_dmg
end
end
describe '#set_dmg_icon' do
it 'logs a message' do
output = capture_logging { subject.set_dmg_icon }
expect(output).to include('Setting dmg icon')
end
it 'runs the sips commands' do
icon = subject.resource_path('icon.png')
expect(subject).to receive(:shellout!)
.with <<-EOH.gsub(/^ {12}/, '')
# Convert the png to an icon
sips -i "#{icon}"
# Extract the icon into its own resource
DeRez -only icns "#{icon}" > tmp.rsrc
# Append the icon reosurce to the DMG
Rez -append tmp.rsrc -o "#{package_dir}/project-1.2.3-2.dmg"
# Source the icon
SetFile -a C "#{package_dir}/project-1.2.3-2.dmg"
EOH
subject.set_dmg_icon
end
end
describe '#package_name' do
it 'reflects the packager\'s unmodified package_name' do
expect(subject.package_name).to eq('project-1.2.3-2.dmg')
end
it 'reflects the packager\'s modified package_name' do
package_basename = 'projectsub-1.2.3-3'
allow(project.packager).to receive(:package_name)
.and_return("#{package_basename}.pkg")
expect(subject.package_name).to eq("#{package_basename}.dmg")
end
end
describe '#writable_dmg' do
it 'is in the staging_dir' do
expect(subject.writable_dmg).to include(staging_dir)
end
it 'is project-writable' do
expect(subject.writable_dmg).to include('project-writable.dmg')
end
end
describe '#volume_name' do
it 'is the project friendly_name' do
project.friendly_name('Friendly Bacon Bits')
expect(subject.volume_name).to eq('Friendly Bacon Bits')
end
end
end
end
| 32.593333 | 118 | 0.593475 |
5db78dfd119f6a0489aa3e90b94e7851a764b33f | 1,523 | require 'active_support/core_ext/array/wrap'
module ActsAsParanoid
module Validations
def self.included(base)
base.extend ClassMethods
end
class UniquenessWithoutDeletedValidator < ActiveRecord::Validations::UniquenessValidator
def validate_each(record, attribute, value)
finder_class = find_finder_class_for(record)
table = finder_class.arel_table
coder = record.class.serialized_attributes[attribute.to_s]
if value && coder
value = coder.dump value
end
relation = build_relation(finder_class, table, attribute, value)
[Array(finder_class.primary_key), Array(record.send(:id))].transpose.each do |pk_key, pk_value|
relation = relation.and(table[pk_key.to_sym].not_eq(pk_value))
end if record.persisted?
Array.wrap(options[:scope]).each do |scope_item|
scope_value = record.send(scope_item)
relation = relation.and(table[scope_item].eq(scope_value))
end
# Re-add ActsAsParanoid default scope conditions manually.
if finder_class.unscoped.where(finder_class.paranoid_default_scope_sql).where(relation).exists?
record.errors.add(attribute, :taken, options.except(:case_sensitive, :scope).merge(:value => value))
end
end
end
module ClassMethods
def validates_uniqueness_of_without_deleted(*attr_names)
validates_with UniquenessWithoutDeletedValidator, _merge_attributes(attr_names)
end
end
end
end
| 34.613636 | 110 | 0.705187 |
79f3a7dcb532c3641d886ed48525e843cc30d2fe | 1,260 | # frozen_string_literal: true
module Gitlab
module Utils
module LazyAttributes
extend ActiveSupport::Concern
include Gitlab::Utils::StrongMemoize
class_methods do
def lazy_attr_reader(*one_or_more_names, type: nil)
names = Array.wrap(one_or_more_names)
names.each { |name| define_lazy_reader(name, type: type) }
end
def lazy_attr_accessor(*one_or_more_names, type: nil)
names = Array.wrap(one_or_more_names)
names.each do |name|
define_lazy_reader(name, type: type)
define_lazy_writer(name)
end
end
private
def define_lazy_reader(name, type:)
define_method(name) do
strong_memoize("#{name}_lazy_loaded") do
value = instance_variable_get("@#{name}")
value = value.call if value.respond_to?(:call)
value = nil if type && !value.is_a?(type)
value
end
end
end
def define_lazy_writer(name)
define_method("#{name}=") do |value|
clear_memoization("#{name}_lazy_loaded")
instance_variable_set("@#{name}", value)
end
end
end
end
end
end
| 27.391304 | 68 | 0.580952 |
791cdf3f9aff4ea5a622494467867440b57f8154 | 68,604 | require_relative "spec_helper"
describe "DB#create_table" do
before do
@db = Sequel.mock
end
it "should accept the table name" do
@db.create_table(:cats){}.must_be_nil
@db.sqls.must_equal ['CREATE TABLE cats ()']
end
with_symbol_splitting "should accept the table name with splittable symbols" do
@db.create_table(:cats__cats) {}
@db.sqls.must_equal ['CREATE TABLE cats.cats ()']
end
it "should accept the table name in multiple formats" do
@db.create_table(Sequel[:cats][:cats]) {}
@db.create_table("cats__cats1") {}
@db.create_table(Sequel.identifier(:cats__cats2)) {}
@db.create_table(Sequel.qualify(:cats3, :cats)) {}
@db.sqls.must_equal ['CREATE TABLE cats.cats ()', 'CREATE TABLE cats__cats1 ()', 'CREATE TABLE cats__cats2 ()', 'CREATE TABLE cats3.cats ()']
end
it "should raise an error if the table name argument is not valid" do
proc{@db.create_table(1) {}}.must_raise(Sequel::Error)
proc{@db.create_table(Sequel.as(:cats, :c)) {}}.must_raise(Sequel::Error)
end
it "should remove cached schema entry" do
@db.instance_variable_set(:@schemas, {'cats'=>[]})
@db.create_table(:cats){Integer :a}
@db.instance_variable_get(:@schemas).must_be :empty?
end
it "should accept multiple columns" do
@db.create_table(:cats) do
column :id, :integer
column :name, :text
end
@db.sqls.must_equal ['CREATE TABLE cats (id integer, name text)']
end
it "should accept method calls as data types" do
@db.create_table(:cats) do
integer :id
text :name
end
@db.sqls.must_equal ['CREATE TABLE cats (id integer, name text)']
end
it "should transform types given as ruby classes to database-specific types" do
@db.create_table(:cats) do
String :a
Integer :b
Fixnum :c
Bignum :d
Float :e
BigDecimal :f
Date :g
DateTime :h
Time :i
Numeric :j
File :k
TrueClass :l
FalseClass :m
column :n, Integer
primary_key :o, :type=>String
foreign_key :p, :f, :type=>Date
end
@db.sqls.must_equal ['CREATE TABLE cats (o varchar(255) PRIMARY KEY AUTOINCREMENT, a varchar(255), b integer, c integer, d bigint, e double precision, f numeric, g date, h timestamp, i timestamp, j numeric, k blob, l boolean, m boolean, n integer, p date REFERENCES f)']
end
it "should transform types given as ruby classes to database-specific types" do
@db.default_string_column_size = 50
@db.create_table(:cats) do
String :a
String :a2, :size=>13
String :a3, :fixed=>true
String :a4, :size=>13, :fixed=>true
String :a5, :text=>true
varchar :a6
varchar :a7, :size=>13
end
@db.sqls.must_equal ['CREATE TABLE cats (a varchar(50), a2 varchar(13), a3 char(50), a4 char(13), a5 text, a6 varchar(50), a7 varchar(13))']
end
it "should allow the use of modifiers with ruby class types" do
@db.create_table(:cats) do
String :a, :size=>50
String :b, :text=>true
String :c, :fixed=>true, :size=>40
Time :d, :only_time=>true
BigDecimal :e, :size=>[11,2]
end
@db.sqls.must_equal ['CREATE TABLE cats (a varchar(50), b text, c char(40), d time, e numeric(11, 2))']
end
it "should allow the use of modifiers with ruby class types" do
c = Class.new
def c.name; 'Fixnum'; end
@db.create_table(:cats) do
column :a, c
end
@db.sqls.must_equal ['CREATE TABLE cats (a integer)']
end
it "should raise an error if you use a ruby class that isn't handled" do
proc{@db.create_table(:cats){column :a, Class}}.must_raise(Sequel::Error)
end
it "should accept primary key definition" do
@db.create_table(:cats) do
primary_key :id
end
@db.sqls.must_equal ['CREATE TABLE cats (id integer PRIMARY KEY AUTOINCREMENT)']
@db.create_table(:cats) do
primary_key :id, :serial, :auto_increment => false
end
@db.sqls.must_equal ['CREATE TABLE cats (id serial PRIMARY KEY)']
@db.create_table(:cats) do
primary_key :id, :type => :serial, :auto_increment => false
end
@db.sqls.must_equal ['CREATE TABLE cats (id serial PRIMARY KEY)']
@db.create_table(:cats) do
Integer :a
primary_key :id
end
@db.sqls.must_equal ['CREATE TABLE cats (id integer PRIMARY KEY AUTOINCREMENT, a integer)']
@db.create_table(:cats) do
Integer :a
primary_key :id, :keep_order=>true
end
@db.sqls.must_equal ['CREATE TABLE cats (a integer, id integer PRIMARY KEY AUTOINCREMENT)']
end
it "should allow naming primary key constraint with :primary_key_constraint_name option" do
@db.create_table(:cats) do
primary_key :id, :primary_key_constraint_name=>:foo
end
@db.sqls.must_equal ['CREATE TABLE cats (id integer CONSTRAINT foo PRIMARY KEY AUTOINCREMENT)']
end
it "should automatically set primary key column NOT NULL if database doesn't do it automatically" do
def @db.can_add_primary_key_constraint_on_nullable_columns?; false end
@db.create_table(:cats) do
primary_key :id
end
@db.sqls.must_equal ['CREATE TABLE cats (id integer NOT NULL PRIMARY KEY AUTOINCREMENT)']
end
it "should automatically set primary key column NOT NULL when adding constraint if database doesn't do it automatically" do
def @db.can_add_primary_key_constraint_on_nullable_columns?; false end
@db.create_table(:cats) do
String :id
primary_key [:id]
end
@db.sqls.must_equal ['CREATE TABLE cats (id varchar(255) NOT NULL, PRIMARY KEY (id))']
end
it "should handling splitting named column constraints into table constraints if unsupported" do
def @db.supports_named_column_constraints?; false end
@db.create_table(:cats) do
primary_key :id, :primary_key_constraint_name=>:foo
foreign_key :cat_id, :cats, :unique=>true, :unique_constraint_name=>:bar, :foreign_key_constraint_name=>:baz, :deferrable=>true, :key=>:foo_id, :on_delete=>:cascade, :on_update=>:restrict
end
@db.sqls.must_equal ['CREATE TABLE cats (id integer AUTOINCREMENT, cat_id integer, CONSTRAINT foo PRIMARY KEY (id), CONSTRAINT baz FOREIGN KEY (cat_id) REFERENCES cats(foo_id) ON DELETE CASCADE ON UPDATE RESTRICT DEFERRABLE INITIALLY DEFERRED, CONSTRAINT bar UNIQUE (cat_id))']
end
it "should accept and literalize default values" do
@db.create_table(:cats) do
integer :id, :default => 123
text :name, :default => "abc'def"
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer DEFAULT 123, name text DEFAULT 'abc''def')"]
end
it "should accept not null definition" do
@db.create_table(:cats) do
integer :id
text :name, :null => false
text :name2, :allow_null => false
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer, name text NOT NULL, name2 text NOT NULL)"]
end
it "should accept null definition" do
@db.create_table(:cats) do
integer :id
text :name, :null => true
text :name2, :allow_null => true
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer, name text NULL, name2 text NULL)"]
end
it "should accept unique definition" do
@db.create_table(:cats) do
integer :id
text :name, :unique => true
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer, name text UNIQUE)"]
end
it "should allow naming unique constraint with :unique_constraint_name option" do
@db.create_table(:cats) do
text :name, :unique => true, :unique_constraint_name=>:foo
end
@db.sqls.must_equal ["CREATE TABLE cats (name text CONSTRAINT foo UNIQUE)"]
end
it "should handle not deferred unique constraints" do
@db.create_table(:cats) do
integer :id
text :name
unique :name, :deferrable=>false
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer, name text, UNIQUE (name) NOT DEFERRABLE)"]
end
it "should handle deferred unique constraints" do
@db.create_table(:cats) do
integer :id
text :name
unique :name, :deferrable=>true
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer, name text, UNIQUE (name) DEFERRABLE INITIALLY DEFERRED)"]
end
it "should handle deferred initially immediate unique constraints" do
@db.create_table(:cats) do
integer :id
text :name
unique :name, :deferrable=>:immediate
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer, name text, UNIQUE (name) DEFERRABLE INITIALLY IMMEDIATE)"]
end
it "should accept unsigned definition" do
@db.create_table(:cats) do
integer :value, :unsigned => true
end
@db.sqls.must_equal ["CREATE TABLE cats (value integer UNSIGNED)"]
end
it "should accept [SET|ENUM](...) types" do
@db.create_table(:cats) do
set :color, :elements => ['black', 'tricolor', 'grey']
end
@db.sqls.must_equal ["CREATE TABLE cats (color set('black', 'tricolor', 'grey'))"]
end
it "should accept varchar size" do
@db.create_table(:cats) do
varchar :name
end
@db.sqls.must_equal ["CREATE TABLE cats (name varchar(255))"]
@db.create_table(:cats) do
varchar :name, :size => 51
end
@db.sqls.must_equal ["CREATE TABLE cats (name varchar(51))"]
end
it "should use double precision for double type" do
@db.create_table(:cats) do
double :name
end
@db.sqls.must_equal ["CREATE TABLE cats (name double precision)"]
end
it "should accept foreign keys without options" do
@db.create_table(:cats) do
foreign_key :project_id
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer)"]
end
it "should accept foreign keys with options" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects)"]
end
it "should accept foreign keys with separate table argument" do
@db.create_table(:cats) do
foreign_key :project_id, :projects, :default=>3
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer DEFAULT 3 REFERENCES projects)"]
end
it "should allowing naming foreign key constraint with :foreign_key_constraint_name option" do
@db.create_table(:cats) do
foreign_key :project_id, :projects, :foreign_key_constraint_name=>:foo
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer CONSTRAINT foo REFERENCES projects)"]
end
it "should raise an error if the table argument to foreign_key isn't a hash, symbol, or nil" do
proc{@db.create_table(:cats){foreign_key :project_id, Object.new, :default=>3}}.must_raise(Sequel::Error)
end
it "should accept foreign keys with arbitrary keys" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :key => :id
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects(id))"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :key => :zzz
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects(zzz))"]
end
it "should accept foreign keys with ON DELETE clause" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :restrict
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE RESTRICT)"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :cascade
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE CASCADE)"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :no_action
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE NO ACTION)"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :set_null
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE SET NULL)"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :set_default
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE SET DEFAULT)"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => 'NO ACTION FOO'
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE NO ACTION FOO)"]
end
it "should accept foreign keys with ON UPDATE clause" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :restrict
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE RESTRICT)"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :cascade
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE CASCADE)"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :no_action
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE NO ACTION)"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :set_null
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE SET NULL)"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :set_default
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE SET DEFAULT)"]
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => 'SET DEFAULT FOO'
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE SET DEFAULT FOO)"]
end
it "should accept foreign keys with deferrable option" do
@db.create_table(:cats) do
foreign_key :project_id, :projects, :deferrable=>true
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects DEFERRABLE INITIALLY DEFERRED)"]
end
it "should accept collation" do
@db.create_table(:cats) do
String :name, :collate => :utf8_bin
end
@db.sqls.must_equal ['CREATE TABLE cats (name varchar(255) COLLATE utf8_bin)']
end
it "should accept collation as a String, treated literally" do
@db.create_table(:cats) do
String :name, :collate => '"utf8_bin"'
end
@db.sqls.must_equal ['CREATE TABLE cats (name varchar(255) COLLATE "utf8_bin")']
end
it "should accept inline index definition" do
@db.create_table(:cats) do
integer :id, :index => true
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)"]
end
it "should accept inline index definition with a hash of options" do
@db.create_table(:cats) do
integer :id, :index => {:unique=>true}
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE UNIQUE INDEX cats_id_index ON cats (id)"]
end
it "should accept inline index definition for foreign keys" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :cascade, :index => true
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE CASCADE)",
"CREATE INDEX cats_project_id_index ON cats (project_id)"]
end
it "should accept inline index definition for foreign keys with a hash of options" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :cascade, :index => {:unique=>true}
end
@db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE CASCADE)",
"CREATE UNIQUE INDEX cats_project_id_index ON cats (project_id)"]
end
it "should accept index definitions" do
@db.create_table(:cats) do
integer :id
index :id
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)"]
end
it "should accept unique constraint definitions" do
@db.create_table(:cats) do
text :name
unique :name
end
@db.sqls.must_equal ["CREATE TABLE cats (name text, UNIQUE (name))"]
end
it "should accept partial index definitions" do
def @db.supports_partial_indexes?() true end
@db.create_table(:cats) do
integer :id
index :id, :where=>proc{id > 1}
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id) WHERE (id > 1)"]
end
it "should raise an error if partial indexes are not supported" do
proc do
@db.create_table(:cats) do
integer :id
index :id, :where=>proc{id > 1}
end
end.must_raise(Sequel::Error)
end
it "should not raise on index error for unsupported index definitions if ignore_index_errors is used" do
@db.create_table(:cats, :ignore_index_errors=>true) do
text :name
full_text_index :name
end
end
it "should raise on full-text index definitions" do
proc {
@db.create_table(:cats) do
text :name
full_text_index :name
end
}.must_raise(Sequel::Error)
end
it "should raise on spatial index definitions" do
proc {
@db.create_table(:cats) do
point :geom
spatial_index :geom
end
}.must_raise(Sequel::Error)
end
it "should raise on partial index definitions" do
proc {
@db.create_table(:cats) do
text :name
index :name, :where => {:something => true}
end
}.must_raise(Sequel::Error)
end
it "should raise index definitions with type" do
proc {
@db.create_table(:cats) do
text :name
index :name, :type => :hash
end
}.must_raise(Sequel::Error)
end
it "should ignore errors if the database raises an error on an index creation statement and the :ignore_index_errors option is used" do
@db.define_singleton_method(:execute_ddl){|*a| raise Sequel::DatabaseError if /blah/.match(a.first); super(*a)}
lambda{@db.create_table(:cats){Integer :id; index :blah; index :id}}.must_raise(Sequel::DatabaseError)
@db.sqls.must_equal ['CREATE TABLE cats (id integer)']
@db.create_table(:cats, :ignore_index_errors=>true){Integer :id; index :blah; index :id}
@db.sqls.must_equal ['CREATE TABLE cats (id integer)', 'CREATE INDEX cats_id_index ON cats (id)']
end
it "should not use savepoints around index creation if running inside a transaction if :ignore_index_errors option is used" do
@db.define_singleton_method(:execute_ddl){|*a| super(*a); raise Sequel::DatabaseError if /blah/.match(a.first)}
@db.transaction{@db.create_table(:cats, :ignore_index_errors=>true){Integer :id; index :blah; index :id}}
@db.sqls.must_equal ["BEGIN", "CREATE TABLE cats (id integer)", "CREATE INDEX cats_blah_index ON cats (blah)", "CREATE INDEX cats_id_index ON cats (id)", "COMMIT"]
end
it "should use savepoints around index creation if running inside a transaction if :ignore_index_errors option is used and transactional schema modifications are supported" do
@db.define_singleton_method(:supports_transactional_ddl?){true}
@db.define_singleton_method(:execute_ddl){|*a| super(*a); raise Sequel::DatabaseError if /blah/.match(a.first)}
@db.transaction{@db.create_table(:cats, :ignore_index_errors=>true){Integer :id; index :blah; index :id}}
@db.sqls.must_equal ["BEGIN", "CREATE TABLE cats (id integer)", "SAVEPOINT autopoint_1", "CREATE INDEX cats_blah_index ON cats (blah)", "ROLLBACK TO SAVEPOINT autopoint_1", "SAVEPOINT autopoint_1", "CREATE INDEX cats_id_index ON cats (id)", "RELEASE SAVEPOINT autopoint_1", "COMMIT"]
end
it "should accept multiple index definitions" do
@db.create_table(:cats) do
integer :id
index :id
index :name
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)", "CREATE INDEX cats_name_index ON cats (name)"]
end
it "should accept functional indexes" do
@db.create_table(:cats) do
integer :id
index Sequel.function(:lower, :name)
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_lower_name__index ON cats (lower(name))"]
end
it "should accept indexes with identifiers" do
@db.create_table(:cats) do
integer :id
index Sequel.identifier(:lower__name)
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_lower__name_index ON cats (lower__name)"]
end
it "should accept custom index names" do
@db.create_table(:cats) do
integer :id
index :id, :name => 'abc'
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX abc ON cats (id)"]
end
it "should accept unique index definitions" do
@db.create_table(:cats) do
integer :id
index :id, :unique => true
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE UNIQUE INDEX cats_id_index ON cats (id)"]
end
it "should accept composite index definitions" do
@db.create_table(:cats) do
integer :id
index [:id, :name], :unique => true
end
@db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE UNIQUE INDEX cats_id_name_index ON cats (id, name)"]
end
it "should accept unnamed constraint definitions with blocks" do
@db.create_table(:cats) do
integer :score
check{(x > 0) & (y < 1)}
end
@db.sqls.must_equal ["CREATE TABLE cats (score integer, CHECK ((x > 0) AND (y < 1)))"]
end
it "should accept unnamed constraint definitions with function calls" do
@db.create_table(:cats) do
integer :score
check{f(x)}
end
@db.sqls.must_equal ["CREATE TABLE cats (score integer, CHECK (f(x)))"]
end
it "should accept unnamed constraint definitions" do
@db.create_table(:cats) do
check 'price < ?', 100
end
@db.sqls.must_equal ["CREATE TABLE cats (CHECK (price < 100))"]
end
it "should accept arrays of pairs constraints" do
@db.create_table(:cats) do
check [[:price, 100]]
end
@db.sqls.must_equal ["CREATE TABLE cats (CHECK (price = 100))"]
end
it "should accept hash constraints" do
@db.create_table(:cats) do
check :price=>100
end
@db.sqls.must_equal ["CREATE TABLE cats (CHECK (price = 100))"]
end
it "should accept array constraints" do
@db.create_table(:cats) do
check [Sequel.expr(:x) > 0, Sequel.expr(:y) < 1]
end
@db.sqls.must_equal ["CREATE TABLE cats (CHECK ((x > 0) AND (y < 1)))"]
end
it "should accept expression constraints" do
@db.create_table(:cats) do
check Sequel.&(Sequel.expr(:x) > 0, Sequel.expr(:y) < 1)
end
@db.sqls.must_equal ["CREATE TABLE cats (CHECK ((x > 0) AND (y < 1)))"]
end
it "should accept named constraint definitions" do
@db.create_table(:cats) do
integer :score
constraint :valid_score, 'score <= 100'
end
@db.sqls.must_equal ["CREATE TABLE cats (score integer, CONSTRAINT valid_score CHECK (score <= 100))"]
end
it "should accept named constraint definitions with options" do
@db.create_table(:cats) do
integer :score
constraint({:name=>:valid_score, :deferrable=>true}, 'score <= 100')
end
@db.sqls.must_equal ["CREATE TABLE cats (score integer, CONSTRAINT valid_score CHECK (score <= 100) DEFERRABLE INITIALLY DEFERRED)"]
end
it "should accept named constraint definitions with block" do
@db.create_table(:cats) do
constraint(:blah_blah){(x.sql_number > 0) & (y.sql_number < 1)}
end
@db.sqls.must_equal ["CREATE TABLE cats (CONSTRAINT blah_blah CHECK ((x > 0) AND (y < 1)))"]
end
it "should raise an error if an invalid constraint type is used" do
proc{@db.create_table(:cats){unique [:a, :b], :type=>:bb}}.must_raise(Sequel::Error)
end
it "should accept composite primary keys" do
@db.create_table(:cats) do
integer :a
integer :b
primary_key [:a, :b]
end
@db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, PRIMARY KEY (a, b))"]
end
it "should accept named composite primary keys" do
@db.create_table(:cats) do
integer :a
integer :b
primary_key [:a, :b], :name => :cpk
end
@db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, CONSTRAINT cpk PRIMARY KEY (a, b))"]
end
it "should accept composite foreign keys" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc
end
@db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc)"]
end
it "should accept named composite foreign keys" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :name => :cfk
end
@db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, CONSTRAINT cfk FOREIGN KEY (a, b) REFERENCES abc)"]
end
it "should accept composite foreign keys with arbitrary keys" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :key => [:real_a, :real_b]
end
@db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(real_a, real_b))"]
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :key => [:z, :x]
end
@db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(z, x))"]
end
it "should accept composite foreign keys with on delete and on update clauses" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :on_delete => :cascade
end
@db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON DELETE CASCADE)"]
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :on_update => :no_action
end
@db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON UPDATE NO ACTION)"]
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :on_delete => :restrict, :on_update => :set_default
end
@db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON DELETE RESTRICT ON UPDATE SET DEFAULT)"]
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :key => [:x, :y], :on_delete => :set_null, :on_update => :set_null
end
@db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(x, y) ON DELETE SET NULL ON UPDATE SET NULL)"]
end
it "should accept an :as option to create a table from the results of a dataset" do
@db.create_table(:cats, :as=>@db[:a])
@db.sqls.must_equal ['CREATE TABLE cats AS SELECT * FROM a']
end
it "should accept an :as option to create a table from a SELECT string" do
@db.create_table(:cats, :as=>'SELECT * FROM a')
@db.sqls.must_equal ['CREATE TABLE cats AS SELECT * FROM a']
end
it "should raise an Error if both a block and an :as argument are given" do
proc{@db.create_table(:cats, :as=>@db[:a]){}}.must_raise(Sequel::Error)
end
end
describe "DB#create_table!" do
before do
@db = Sequel.mock
end
it "should create the table if it does not exist" do
@db.define_singleton_method(:table_exists?){|a| false}
@db.create_table!(:cats){|*a|}.must_be_nil
@db.sqls.must_equal ['CREATE TABLE cats ()']
end
it "should drop the table before creating it if it already exists" do
@db.define_singleton_method(:table_exists?){|a| true}
@db.create_table!(:cats){|*a|}
@db.sqls.must_equal ['DROP TABLE cats', 'CREATE TABLE cats ()']
end
it "should use IF EXISTS if the database supports it" do
@db.define_singleton_method(:supports_drop_table_if_exists?){true}
@db.create_table!(:cats){|*a|}
@db.sqls.must_equal ['DROP TABLE IF EXISTS cats', 'CREATE TABLE cats ()']
end
end
describe "DB#create_table?" do
before do
@db = Sequel.mock
end
it "should not create the table if the table already exists" do
@db.define_singleton_method(:table_exists?){|a| true}
@db.create_table?(:cats){|*a|}.must_be_nil
@db.sqls.must_equal []
end
it "should create the table if the table doesn't already exist" do
@db.define_singleton_method(:table_exists?){|a| false}
@db.create_table?(:cats){|*a|}
@db.sqls.must_equal ['CREATE TABLE cats ()']
end
it "should use IF NOT EXISTS if the database supports that" do
@db.define_singleton_method(:supports_create_table_if_not_exists?){true}
@db.create_table?(:cats){|*a|}
@db.sqls.must_equal ['CREATE TABLE IF NOT EXISTS cats ()']
end
it "should not use IF NOT EXISTS if the indexes are created" do
@db.define_singleton_method(:table_exists?){|a| false}
@db.define_singleton_method(:supports_create_table_if_not_exists?){true}
@db.create_table?(:cats){|*a| Integer :a, :index=>true}
@db.sqls.must_equal ['CREATE TABLE cats (a integer)', 'CREATE INDEX cats_a_index ON cats (a)']
@db.define_singleton_method(:table_exists?){|a| true}
@db.create_table?(:cats){|*a| Integer :a, :index=>true}
@db.sqls.must_equal []
end
end
describe "DB#create_join_table" do
before do
@db = Sequel.mock
end
it "should take a hash with foreign keys and table name values" do
@db.create_join_table(:cat_id=>:cats, :dog_id=>:dogs).must_be_nil
@db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)']
end
it "should be able to have values be a hash of options" do
@db.create_join_table(:cat_id=>{:table=>:cats, :null=>true}, :dog_id=>{:table=>:dogs, :default=>0})
@db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NULL REFERENCES cats, dog_id integer DEFAULT 0 NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)']
end
it "should be able to pass a second hash of table options" do
@db.create_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :temp=>true)
@db.sqls.must_equal ['CREATE TEMPORARY TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)']
end
it "should recognize :name option in table options" do
@db.create_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :name=>:f)
@db.sqls.must_equal ['CREATE TABLE f (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX f_dog_id_cat_id_index ON f (dog_id, cat_id)']
end
it "should recognize :index_options option in table options" do
@db.create_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :index_options=>{:name=>:foo_index})
@db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX foo_index ON cats_dogs (dog_id, cat_id)']
end
it "should recognize :no_index option in table options" do
@db.create_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :no_index=>true)
@db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))']
end
it "should recognize :no_primary_key option in table options" do
@db.create_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :no_primary_key=>true)
@db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs)', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)']
end
it "should raise an error if the hash doesn't have 2 entries with table names" do
proc{@db.create_join_table({})}.must_raise(Sequel::Error)
proc{@db.create_join_table({:cat_id=>:cats})}.must_raise(Sequel::Error)
proc{@db.create_join_table({:cat_id=>:cats, :human_id=>:humans, :dog_id=>:dog})}.must_raise(Sequel::Error)
proc{@db.create_join_table({:cat_id=>:cats, :dog_id=>{}})}.must_raise(Sequel::Error)
end
end
describe "DB#create_join_table?" do
before do
@db = Sequel.mock
end
it "should create the table if it does not already exist" do
@db.define_singleton_method(:table_exists?){|a| false}
@db.create_join_table?(:cat_id=>:cats, :dog_id=>:dogs).must_be_nil
@db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)']
end
it "should not create the table if it already exists" do
@db.define_singleton_method(:table_exists?){|a| true}
@db.create_join_table?(:cat_id=>:cats, :dog_id=>:dogs)
@db.sqls.must_equal []
end
it "should not use IF NOT EXISTS" do
@db.define_singleton_method(:table_exists?){|a| false}
@db.define_singleton_method(:supports_create_table_if_not_exists?){true}
@db.create_join_table?(:cat_id=>:cats, :dog_id=>:dogs)
@db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)']
@db.define_singleton_method(:table_exists?){|a| true}
@db.create_join_table?(:cat_id=>:cats, :dog_id=>:dogs)
@db.sqls.must_equal []
end
it "should not use IF NOT EXISTS if no_index is used" do
@db.define_singleton_method(:supports_create_table_if_not_exists?){true}
@db.create_join_table?({:cat_id=>:cats, :dog_id=>:dogs}, :no_index=>true)
@db.sqls.must_equal ['CREATE TABLE IF NOT EXISTS cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))']
end
end
describe "DB#create_join_table!" do
before do
@db = Sequel.mock
end
it "should drop the table first if it already exists" do
@db.define_singleton_method(:table_exists?){|a| true}
@db.create_join_table!(:cat_id=>:cats, :dog_id=>:dogs).must_be_nil
@db.sqls.must_equal ['DROP TABLE cats_dogs', 'CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)']
end
it "should not drop the table if it doesn't exists" do
@db.define_singleton_method(:table_exists?){|a| false}
@db.create_join_table!(:cat_id=>:cats, :dog_id=>:dogs)
@db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)']
end
it "should use IF EXISTS if the database supports it" do
@db.define_singleton_method(:supports_drop_table_if_exists?){true}
@db.create_join_table!(:cat_id=>:cats, :dog_id=>:dogs)
@db.sqls.must_equal ['DROP TABLE IF EXISTS cats_dogs', 'CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)']
end
end
describe "DB#drop_join_table" do
before do
@db = Sequel.mock
end
it "should take a hash with foreign keys and table name values and drop the table" do
@db.drop_join_table(:cat_id=>:cats, :dog_id=>:dogs).must_be_nil
@db.sqls.must_equal ['DROP TABLE cats_dogs']
end
it "should be able to have values be a hash of options" do
@db.drop_join_table(:cat_id=>{:table=>:cats, :null=>true}, :dog_id=>{:table=>:dogs, :default=>0})
@db.sqls.must_equal ['DROP TABLE cats_dogs']
end
it "should respect a second hash of table options" do
@db.drop_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :cascade=>true)
@db.sqls.must_equal ['DROP TABLE cats_dogs CASCADE']
end
it "should respect :name option for table name" do
@db.drop_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :name=>:f)
@db.sqls.must_equal ['DROP TABLE f']
end
it "should raise an error if the hash doesn't have 2 entries with table names" do
proc{@db.drop_join_table({})}.must_raise(Sequel::Error)
proc{@db.drop_join_table({:cat_id=>:cats})}.must_raise(Sequel::Error)
proc{@db.drop_join_table({:cat_id=>:cats, :human_id=>:humans, :dog_id=>:dog})}.must_raise(Sequel::Error)
proc{@db.drop_join_table({:cat_id=>:cats, :dog_id=>{}})}.must_raise(Sequel::Error)
end
end
describe "DB#drop_table" do
before do
@db = Sequel.mock
end
it "should generate a DROP TABLE statement" do
@db.drop_table(:cats).must_be_nil
@db.sqls.must_equal ['DROP TABLE cats']
end
it "should drop multiple tables at once" do
@db.drop_table :cats, :dogs
@db.sqls.must_equal ['DROP TABLE cats', 'DROP TABLE dogs']
end
it "should take an options hash and support the :cascade option" do
@db.drop_table :cats, :dogs, :cascade=>true
@db.sqls.must_equal ['DROP TABLE cats CASCADE', 'DROP TABLE dogs CASCADE']
end
end
describe "DB#drop_table?" do
before do
@db = Sequel.mock
end
it "should drop the table if it exists" do
@db.define_singleton_method(:table_exists?){|a| true}
@db.drop_table?(:cats).must_be_nil
@db.sqls.must_equal ["DROP TABLE cats"]
end
it "should do nothing if the table does not exist" do
@db.define_singleton_method(:table_exists?){|a| false}
@db.drop_table?(:cats)
@db.sqls.must_equal []
end
it "should operate on multiple tables at once" do
@db.define_singleton_method(:table_exists?){|a| a == :cats}
@db.drop_table? :cats, :dogs
@db.sqls.must_equal ['DROP TABLE cats']
end
it "should take an options hash and support the :cascade option" do
@db.define_singleton_method(:table_exists?){|a| true}
@db.drop_table? :cats, :dogs, :cascade=>true
@db.sqls.must_equal ['DROP TABLE cats CASCADE', 'DROP TABLE dogs CASCADE']
end
it "should use IF NOT EXISTS if the database supports that" do
@db.define_singleton_method(:supports_drop_table_if_exists?){true}
@db.drop_table? :cats, :dogs
@db.sqls.must_equal ['DROP TABLE IF EXISTS cats', 'DROP TABLE IF EXISTS dogs']
end
it "should use IF NOT EXISTS with CASCADE if the database supports that" do
@db.define_singleton_method(:supports_drop_table_if_exists?){true}
@db.drop_table? :cats, :dogs, :cascade=>true
@db.sqls.must_equal ['DROP TABLE IF EXISTS cats CASCADE', 'DROP TABLE IF EXISTS dogs CASCADE']
end
end
describe "DB#alter_table" do
before do
@db = Sequel.mock
end
it "should allow adding not null constraint via set_column_allow_null with false argument" do
@db.alter_table(:cats) do
set_column_allow_null :score, false
end.must_be_nil
@db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score SET NOT NULL"]
end
it "should allow removing not null constraint via set_column_allow_null with true argument" do
@db.alter_table(:cats) do
set_column_allow_null :score, true
end
@db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score DROP NOT NULL"]
end
it "should allow adding not null constraint via set_column_not_null" do
@db.alter_table(:cats) do
set_column_not_null :score
end
@db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score SET NOT NULL"]
end
it "should allow removing not null constraint via set_column_allow_null without argument" do
@db.alter_table(:cats) do
set_column_allow_null :score
end
@db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score DROP NOT NULL"]
end
it "should support add_column" do
@db.alter_table(:cats) do
add_column :score, :integer
end
@db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN score integer"]
end
it "should support add_constraint" do
@db.alter_table(:cats) do
add_constraint :valid_score, 'score <= 100'
end
@db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT valid_score CHECK (score <= 100)"]
end
it "should support add_constraint with options" do
@db.alter_table(:cats) do
add_constraint({:name=>:valid_score, :deferrable=>true}, 'score <= 100')
end
@db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT valid_score CHECK (score <= 100) DEFERRABLE INITIALLY DEFERRED"]
end
it "should support add_constraint with block" do
@db.alter_table(:cats) do
add_constraint(:blah_blah){(x.sql_number > 0) & (y.sql_number < 1)}
end
@db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT blah_blah CHECK ((x > 0) AND (y < 1))"]
end
it "should support add_unique_constraint" do
@db.alter_table(:cats) do
add_unique_constraint [:a, :b]
end
@db.sqls.must_equal ["ALTER TABLE cats ADD UNIQUE (a, b)"]
@db.alter_table(:cats) do
add_unique_constraint [:a, :b], :name => :ab_uniq
end
@db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT ab_uniq UNIQUE (a, b)"]
end
it "should support add_foreign_key" do
@db.alter_table(:cats) do
add_foreign_key :node_id, :nodes
end
@db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN node_id integer REFERENCES nodes"]
end
it "should support add_foreign_key with composite foreign keys" do
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props
end
@db.sqls.must_equal ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props"]
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props, :name => :cfk
end
@db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT cfk FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props"]
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props, :key => [:nid, :pid]
end
@db.sqls.must_equal ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props(nid, pid)"]
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props, :on_delete => :restrict, :on_update => :cascade
end
@db.sqls.must_equal ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props ON DELETE RESTRICT ON UPDATE CASCADE"]
end
it "should support add_index" do
@db.alter_table(:cats) do
add_index :name
end
@db.sqls.must_equal ["CREATE INDEX cats_name_index ON cats (name)"]
end
it "should ignore errors if the database raises an error on an add_index call and the :ignore_errors option is used" do
@db.define_singleton_method(:execute_ddl){|*a| raise Sequel::DatabaseError}
lambda{@db.add_index(:cats, :id)}.must_raise(Sequel::DatabaseError)
@db.add_index(:cats, :id, :ignore_errors=>true)
@db.sqls.must_equal []
end
it "should support add_primary_key" do
@db.alter_table(:cats) do
add_primary_key :id
end
@db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN id integer PRIMARY KEY AUTOINCREMENT"]
end
it "should support add_primary_key with composite primary keys" do
@db.alter_table(:cats) do
add_primary_key [:id, :type]
end
@db.sqls.must_equal ["ALTER TABLE cats ADD PRIMARY KEY (id, type)"]
@db.alter_table(:cats) do
add_primary_key [:id, :type], :name => :cpk
end
@db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT cpk PRIMARY KEY (id, type)"]
end
it "should set primary key column NOT NULL when using add_primary_key if database doesn't handle it" do
def @db.can_add_primary_key_constraint_on_nullable_columns?; false end
@db.alter_table(:cats) do
add_primary_key :id
end
@db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN id integer NOT NULL PRIMARY KEY AUTOINCREMENT"]
end
it "should set primary key column NOT NULL when adding primary key constraint if database doesn't handle it" do
def @db.can_add_primary_key_constraint_on_nullable_columns?; false end
@db.alter_table(:cats) do
add_primary_key [:id, :type]
end
@db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN id SET NOT NULL", "ALTER TABLE cats ALTER COLUMN type SET NOT NULL", "ALTER TABLE cats ADD PRIMARY KEY (id, type)"]
end
it "should support drop_column" do
@db.alter_table(:cats) do
drop_column :score
end
@db.sqls.must_equal ["ALTER TABLE cats DROP COLUMN score"]
end
it "should support drop_column with :cascade=>true option" do
@db.alter_table(:cats) do
drop_column :score, :cascade=>true
end
@db.sqls.must_equal ["ALTER TABLE cats DROP COLUMN score CASCADE"]
end
it "should support drop_constraint" do
@db.alter_table(:cats) do
drop_constraint :valid_score
end
@db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT valid_score"]
end
it "should support drop_constraint with :cascade=>true option" do
@db.alter_table(:cats) do
drop_constraint :valid_score, :cascade=>true
end
@db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT valid_score CASCADE"]
end
it "should support drop_foreign_key" do
def @db.foreign_key_list(table_name)
[{:name=>:cats_node_id_fkey, :columns=>[:node_id]}]
end
@db.alter_table(:cats) do
drop_foreign_key :node_id
end
@db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT cats_node_id_fkey", "ALTER TABLE cats DROP COLUMN node_id"]
end
it "should support drop_foreign_key with :foreign_key_constraint_name option" do
@db.alter_table(:cats) do
drop_foreign_key :node_id, :foreign_key_constraint_name=>:foo
end
@db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT foo", "ALTER TABLE cats DROP COLUMN node_id"]
end
it "should support drop_foreign_key with :name option" do
@db.alter_table(:cats) do
drop_foreign_key :node_id, :name=>:foo
end
@db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT foo", "ALTER TABLE cats DROP COLUMN node_id"]
end
it "should support drop_foreign_key with composite foreign keys" do
def @db.foreign_key_list(table_name)
[{:name=>:cats_node_id_prop_id_fkey, :columns=>[:node_id, :prop_id]}]
end
@db.alter_table(:cats) do
drop_foreign_key [:node_id, :prop_id]
end
@db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT cats_node_id_prop_id_fkey"]
@db.alter_table(:cats) do
drop_foreign_key [:node_id, :prop_id], :name => :cfk
end
@db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT cfk"]
end
it "should have drop_foreign_key raise Error if no name is found" do
def @db.foreign_key_list(table_name)
[{:name=>:cats_node_id_fkey, :columns=>[:foo_id]}]
end
lambda{@db.alter_table(:cats){drop_foreign_key :node_id}}.must_raise(Sequel::Error)
end
it "should have drop_foreign_key raise Error if multiple foreign keys found" do
def @db.foreign_key_list(table_name)
[{:name=>:cats_node_id_fkey, :columns=>[:node_id]}, {:name=>:cats_node_id_fkey2, :columns=>[:node_id]}]
end
lambda{@db.alter_table(:cats){drop_foreign_key :node_id}}.must_raise(Sequel::Error)
end
it "should support drop_index" do
@db.alter_table(:cats) do
drop_index :name
end
@db.sqls.must_equal ["DROP INDEX cats_name_index"]
end
it "should support drop_index with a given name" do
@db.alter_table(:cats) do
drop_index :name, :name=>:blah_blah
end
@db.sqls.must_equal ["DROP INDEX blah_blah"]
end
it "should support rename_column" do
@db.alter_table(:cats) do
rename_column :name, :old_name
end
@db.sqls.must_equal ["ALTER TABLE cats RENAME COLUMN name TO old_name"]
end
it "should support set_column_default" do
@db.alter_table(:cats) do
set_column_default :score, 3
end
@db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score SET DEFAULT 3"]
end
it "should support set_column_type" do
@db.alter_table(:cats) do
set_column_type :score, :real
end
@db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score TYPE real"]
end
it "should support set_column_type with options" do
@db.alter_table(:cats) do
set_column_type :score, :integer, :unsigned=>true
set_column_type :score, :varchar, :size=>30
set_column_type :score, :enum, :elements=>['a', 'b']
end
@db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score TYPE integer UNSIGNED",
"ALTER TABLE cats ALTER COLUMN score TYPE varchar(30)",
"ALTER TABLE cats ALTER COLUMN score TYPE enum('a', 'b')"]
end
it "should combine operations into a single query if the database supports it" do
@db.define_singleton_method(:supports_combining_alter_table_ops?){true}
@db.alter_table(:cats) do
add_column :a, Integer
drop_column :b
set_column_not_null :c
rename_column :d, :e
set_column_default :f, 'g'
set_column_type :h, Integer
add_constraint(:i){a > 1}
drop_constraint :j
end
@db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN a integer, DROP COLUMN b, ALTER COLUMN c SET NOT NULL, RENAME COLUMN d TO e, ALTER COLUMN f SET DEFAULT 'g', ALTER COLUMN h TYPE integer, ADD CONSTRAINT i CHECK (a > 1), DROP CONSTRAINT j"]
end
it "should combine operations into consecutive groups of combinable operations if the database supports combining operations" do
@db.define_singleton_method(:supports_combining_alter_table_ops?){true}
@db.alter_table(:cats) do
add_column :a, Integer
drop_column :b
set_column_not_null :c
rename_column :d, :e
add_index :e
set_column_default :f, 'g'
set_column_type :h, Integer
add_constraint(:i){a > 1}
drop_constraint :j
end
@db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN a integer, DROP COLUMN b, ALTER COLUMN c SET NOT NULL, RENAME COLUMN d TO e",
"CREATE INDEX cats_e_index ON cats (e)",
"ALTER TABLE cats ALTER COLUMN f SET DEFAULT 'g', ALTER COLUMN h TYPE integer, ADD CONSTRAINT i CHECK (a > 1), DROP CONSTRAINT j"]
end
end
describe "Database#create_table" do
before do
@db = Sequel.mock
end
it "should construct proper SQL" do
@db.create_table :test do
primary_key :id, :integer, :null => false
column :name, :text
index :name, :unique => true
end
@db.sqls.must_equal ['CREATE TABLE test (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, name text)',
'CREATE UNIQUE INDEX test_name_index ON test (name)']
end
it "should create a temporary table" do
@db.create_table :test_tmp, :temp => true do
primary_key :id, :integer, :null => false
column :name, :text
index :name, :unique => true
end
@db.sqls.must_equal ['CREATE TEMPORARY TABLE test_tmp (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, name text)',
'CREATE UNIQUE INDEX test_tmp_name_index ON test_tmp (name)']
end
end
describe "Database#alter_table" do
before do
@db = Sequel.mock
end
it "should construct proper SQL" do
@db.alter_table :xyz do
add_column :aaa, :text, :null => false, :unique => true
drop_column :bbb
rename_column :ccc, :ddd
set_column_type :eee, :integer
set_column_default :hhh, 'abcd'
add_index :fff, :unique => true
drop_index :ggg
end
@db.sqls.must_equal ['ALTER TABLE xyz ADD COLUMN aaa text NOT NULL UNIQUE',
'ALTER TABLE xyz DROP COLUMN bbb',
'ALTER TABLE xyz RENAME COLUMN ccc TO ddd',
'ALTER TABLE xyz ALTER COLUMN eee TYPE integer',
"ALTER TABLE xyz ALTER COLUMN hhh SET DEFAULT 'abcd'",
'CREATE UNIQUE INDEX xyz_fff_index ON xyz (fff)',
'DROP INDEX xyz_ggg_index']
end
end
describe "Database#add_column" do
it "should construct proper SQL" do
db = Sequel.mock
db.add_column(:test, :name, :text, :unique => true).must_be_nil
db.sqls.must_equal ['ALTER TABLE test ADD COLUMN name text UNIQUE']
end
end
describe "Database#drop_column" do
before do
@db = Sequel.mock
end
it "should construct proper SQL" do
@db.drop_column(:test, :name).must_be_nil
@db.sqls.must_equal ['ALTER TABLE test DROP COLUMN name']
end
it "should use CASCADE for :cascade=>true option" do
@db.drop_column :test, :name, :cascade=>true
@db.sqls.must_equal ['ALTER TABLE test DROP COLUMN name CASCADE']
end
end
describe "Database#rename_column" do
before do
@db = Sequel.mock
end
it "should construct proper SQL" do
@db.rename_column(:test, :abc, :def).must_be_nil
@db.sqls.must_equal ['ALTER TABLE test RENAME COLUMN abc TO def']
end
end
describe "Database#set_column_type" do
before do
@db = Sequel.mock
end
it "should construct proper SQL" do
@db.set_column_type(:test, :name, :integer).must_be_nil
@db.sqls.must_equal ['ALTER TABLE test ALTER COLUMN name TYPE integer']
end
end
describe "Database#set_column_default" do
before do
@db = Sequel.mock
end
it "should construct proper SQL" do
@db.set_column_default(:test, :name, 'zyx').must_be_nil
@db.sqls.must_equal ["ALTER TABLE test ALTER COLUMN name SET DEFAULT 'zyx'"]
end
end
describe "Database#add_index" do
before do
@db = Sequel.mock
end
it "should construct proper SQL" do
@db.add_index(:test, :name, :unique => true).must_be_nil
@db.sqls.must_equal ['CREATE UNIQUE INDEX test_name_index ON test (name)']
end
it "should accept multiple columns" do
@db.add_index :test, [:one, :two]
@db.sqls.must_equal ['CREATE INDEX test_one_two_index ON test (one, two)']
end
end
describe "Database#drop_index" do
before do
@db = Sequel.mock
end
it "should construct proper SQL" do
@db.drop_index(:test, :name).must_be_nil
@db.sqls.must_equal ['DROP INDEX test_name_index']
end
end
describe "Database#drop_table" do
before do
@db = Sequel.mock
end
it "should construct proper SQL" do
@db.drop_table(:test).must_be_nil
@db.sqls.must_equal ['DROP TABLE test']
end
it "should accept multiple table names" do
@db.drop_table :a, :bb, :ccc
@db.sqls.must_equal ['DROP TABLE a', 'DROP TABLE bb', 'DROP TABLE ccc']
end
end
describe "Database#rename_table" do
before do
@db = Sequel.mock
end
it "should construct proper SQL" do
@db.rename_table(:abc, :xyz).must_be_nil
@db.sqls.must_equal ['ALTER TABLE abc RENAME TO xyz']
end
end
describe "Database#create_view" do
before do
@db = Sequel.mock
end
it "should construct proper SQL with raw SQL" do
@db.create_view(:test, "SELECT * FROM xyz").must_be_nil
@db.sqls.must_equal ['CREATE VIEW test AS SELECT * FROM xyz']
@db.create_view Sequel.identifier(:test), "SELECT * FROM xyz"
@db.sqls.must_equal ['CREATE VIEW test AS SELECT * FROM xyz']
end
it "should construct proper SQL with dataset" do
@db.create_view :test, @db[:items].select(:a, :b).order(:c)
@db.sqls.must_equal ['CREATE VIEW test AS SELECT a, b FROM items ORDER BY c']
end
it "should handle :columns option" do
@db.create_view :test, @db[:items].select(:a, :b).order(:c), :columns=>[:d, :e]
@db.sqls.must_equal ['CREATE VIEW test (d, e) AS SELECT a, b FROM items ORDER BY c']
@db.create_view :test, @db[:items].select(:a, :b).order(:c), :columns=>%w'd e'
@db.sqls.must_equal ['CREATE VIEW test (d, e) AS SELECT a, b FROM items ORDER BY c']
@db.create_view :test, @db[:items].select(:a, :b).order(:c), :columns=>[Sequel.identifier('d'), Sequel.lit('e')]
@db.sqls.must_equal ['CREATE VIEW test (d, e) AS SELECT a, b FROM items ORDER BY c']
end
it "should handle :check option" do
@db.create_view :test, @db[:items].select(:a, :b).order(:c), :check=>true
@db.sqls.must_equal ['CREATE VIEW test AS SELECT a, b FROM items ORDER BY c WITH CHECK OPTION']
@db.create_view :test, @db[:items].select(:a, :b).order(:c), :check=>:local
@db.sqls.must_equal ['CREATE VIEW test AS SELECT a, b FROM items ORDER BY c WITH LOCAL CHECK OPTION']
end
with_symbol_splitting "should handle create_or_replace_view with splittable symbols" do
@db.create_or_replace_view :sch__test, "SELECT * FROM xyz"
@db.sqls.must_equal ['DROP VIEW sch.test', 'CREATE VIEW sch.test AS SELECT * FROM xyz']
end
it "should handle create_or_replace_view" do
@db.create_or_replace_view :test, @db[:items].select(:a, :b).order(:c)
@db.sqls.must_equal ['DROP VIEW test', 'CREATE VIEW test AS SELECT a, b FROM items ORDER BY c']
@db.create_or_replace_view Sequel.identifier(:test), @db[:items].select(:a, :b).order(:c)
@db.sqls.must_equal ['DROP VIEW test', 'CREATE VIEW test AS SELECT a, b FROM items ORDER BY c']
end
it "should use CREATE OR REPLACE VIEW if such syntax is supported" do
def @db.supports_create_or_replace_view?() true end
@db.create_or_replace_view :test, @db[:items]
@db.sqls.must_equal ['CREATE OR REPLACE VIEW test AS SELECT * FROM items']
end
end
describe "Database#drop_view" do
before do
@db = Sequel.mock
end
with_symbol_splitting "should construct proper SQL for splittable symbols" do
@db.drop_view(:sch__test).must_be_nil
@db.sqls.must_equal ['DROP VIEW sch.test']
end
it "should construct proper SQL" do
@db.drop_view :test
@db.drop_view Sequel.identifier(:test)
@db.drop_view Sequel.qualify(:sch, :test)
@db.sqls.must_equal ['DROP VIEW test', 'DROP VIEW test', 'DROP VIEW sch.test']
end
it "should drop multiple views at once" do
@db.drop_view :cats, :dogs
@db.sqls.must_equal ['DROP VIEW cats', 'DROP VIEW dogs']
end
it "should support the :cascade option" do
@db.drop_view :cats, :dogs, :cascade=>true
@db.sqls.must_equal ['DROP VIEW cats CASCADE', 'DROP VIEW dogs CASCADE']
end
it "should support the :if_exists option" do
@db.drop_view :cats, :dogs, :if_exists=>true
@db.sqls.must_equal ['DROP VIEW IF EXISTS cats', 'DROP VIEW IF EXISTS dogs']
end
end
describe "Database#alter_table_sql" do
it "should raise error for an invalid op" do
proc {Sequel.mock.send(:alter_table_sql, :mau, :op => :blah)}.must_raise(Sequel::Error)
end
end
describe "Schema Parser" do
before do
@sqls = []
@db = Sequel::Database.new
end
it "should raise an error if there are no columns" do
@db.define_singleton_method(:schema_parse_table) do |t, opts|
[]
end
proc{@db.schema(:x)}.must_raise(Sequel::Error)
end
it "should cache data by default" do
@db.define_singleton_method(:schema_parse_table) do |t, opts|
[[:a, {}]]
end
@db.schema(:x).must_be_same_as(@db.schema(:x))
end
it "should not cache data if :reload=>true is given" do
@db.define_singleton_method(:schema_parse_table) do |t, opts|
[[:a, {}]]
end
@db.schema(:x).wont_be_same_as(@db.schema(:x, :reload=>true))
end
it "should not cache schema metadata if cache_schema is false" do
@db.cache_schema = false
@db.define_singleton_method(:schema_parse_table) do |t, opts|
[[:a, {}]]
end
@db.schema(:x).wont_be_same_as(@db.schema(:x))
end
it "should provide options if given a table name" do
c = nil
@db.define_singleton_method(:schema_parse_table) do |t, opts|
c = [t, opts]
[[:a, {:db_type=>t.to_s}]]
end
@db.schema(:x)
c.must_equal ["x", {}]
@db.schema(Sequel[:s][:x])
c.must_equal ["x", {:schema=>"s"}]
ds = @db[Sequel[:s][:y]]
@db.schema(ds)
c.must_equal ["y", {:schema=>"s", :dataset=>ds}]
end
with_symbol_splitting "should provide options if given a table name with splittable symbols" do
c = nil
@db.define_singleton_method(:schema_parse_table) do |t, opts|
c = [t, opts]
[[:a, {:db_type=>t.to_s}]]
end
@db.schema(:s__x)
c.must_equal ["x", {:schema=>"s"}]
ds = @db[:s__y]
@db.schema(ds)
c.must_equal ["y", {:schema=>"s", :dataset=>ds}]
end
it "should parse the schema correctly for a single table" do
sqls = @sqls
proc{@db.schema(:x)}.must_raise(Sequel::Error)
@db.define_singleton_method(:schema_parse_table) do |t, opts|
sqls << t
[[:a, {:db_type=>t.to_s}]]
end
@db.schema(:x).must_equal [[:a, {:db_type=>"x", :ruby_default=>nil}]]
@sqls.must_equal ['x']
@db.schema(:x).must_equal [[:a, {:db_type=>"x", :ruby_default=>nil}]]
@sqls.must_equal ['x']
@db.schema(:x, :reload=>true).must_equal [[:a, {:db_type=>"x", :ruby_default=>nil}]]
@sqls.must_equal ['x', 'x']
end
it "should set :auto_increment to true by default if unset and a single integer primary key is used" do
@db.define_singleton_method(:schema_parse_table){|*| [[:a, {:primary_key=>true, :db_type=>'integer'}]]}
@db.schema(:x).first.last[:auto_increment].must_equal true
end
it "should not set :auto_increment if already set" do
@db.define_singleton_method(:schema_parse_table){|*| [[:a, {:primary_key=>true, :db_type=>'integer', :auto_increment=>false}]]}
@db.schema(:x).first.last[:auto_increment].must_equal false
end
it "should set :auto_increment to false by default if unset and a single nonintegery primary key is used" do
@db.define_singleton_method(:schema_parse_table){|*| [[:a, {:primary_key=>true, :db_type=>'varchar'}]]}
@db.schema(:x).first.last[:auto_increment].must_equal false
end
it "should set :auto_increment to false by default if unset and a composite primary key" do
@db.define_singleton_method(:schema_parse_table){|*| [[:a, {:primary_key=>true, :db_type=>'integer'}], [:b, {:primary_key=>true, :db_type=>'integer'}]]}
@db.schema(:x).first.last[:auto_increment].must_equal false
@db.schema(:x).last.last[:auto_increment].must_equal false
end
it "should set :auto_increment to true by default if set and not the first column" do
@db.define_singleton_method(:schema_parse_table){|*| [[:b, {}], [:a, {:primary_key=>true, :db_type=>'integer'}]]}
@db.schema(:x).last.last[:auto_increment].must_equal true
end
it "should convert various types of table name arguments" do
@db.define_singleton_method(:schema_parse_table) do |t, opts|
[[t, opts]]
end
s1 = @db.schema(:x)
s1.must_equal [['x', {:ruby_default=>nil}]]
@db.schema(:x).object_id.must_equal s1.object_id
@db.schema(Sequel.identifier(:x)).object_id.must_equal s1.object_id
s2 = @db.schema(Sequel[:x][:y])
s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil}]]
@db.schema(Sequel[:x][:y]).object_id.must_equal s2.object_id
@db.schema(Sequel.qualify(:x, :y)).object_id.must_equal s2.object_id
s2 = @db.schema(Sequel.qualify(:v, Sequel[:x][:y]))
s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil, :information_schema_schema=>Sequel.identifier('v')}]]
@db.schema(Sequel.qualify(:v, Sequel[:x][:y])).object_id.must_equal s2.object_id
@db.schema(Sequel.qualify(Sequel[:v][:x], :y)).object_id.must_equal s2.object_id
s2 = @db.schema(Sequel.qualify(Sequel[:u][:v], Sequel[:x][:y]))
s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil, :information_schema_schema=>Sequel.qualify('u', 'v')}]]
@db.schema(Sequel.qualify(Sequel[:u][:v], Sequel[:x][:y])).object_id.must_equal s2.object_id
@db.schema(Sequel.qualify(Sequel.qualify(:u, :v), Sequel.qualify(:x, :y))).object_id.must_equal s2.object_id
end
with_symbol_splitting "should convert splittable symbol arguments" do
@db.define_singleton_method(:schema_parse_table) do |t, opts|
[[t, opts]]
end
s1 = @db.schema(:x)
s1.must_equal [['x', {:ruby_default=>nil}]]
@db.schema(:x).object_id.must_equal s1.object_id
@db.schema(Sequel.identifier(:x)).object_id.must_equal s1.object_id
s2 = @db.schema(:x__y)
s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil}]]
@db.schema(:x__y).object_id.must_equal s2.object_id
@db.schema(Sequel.qualify(:x, :y)).object_id.must_equal s2.object_id
s2 = @db.schema(Sequel.qualify(:v, :x__y))
s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil, :information_schema_schema=>Sequel.identifier('v')}]]
@db.schema(Sequel.qualify(:v, :x__y)).object_id.must_equal s2.object_id
@db.schema(Sequel.qualify(:v__x, :y)).object_id.must_equal s2.object_id
s2 = @db.schema(Sequel.qualify(:u__v, :x__y))
s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil, :information_schema_schema=>Sequel.qualify('u', 'v')}]]
@db.schema(Sequel.qualify(:u__v, :x__y)).object_id.must_equal s2.object_id
@db.schema(Sequel.qualify(Sequel.qualify(:u, :v), Sequel.qualify(:x, :y))).object_id.must_equal s2.object_id
end
it "should correctly parse all supported data types" do
sm = Module.new do
def schema_parse_table(t, opts)
[[:x, {:db_type=>t.to_s, :type=>schema_column_type(t.to_s)}]]
end
end
@db.extend(sm)
@db.schema(:tinyint).first.last[:type].must_equal :integer
@db.schema(:int).first.last[:type].must_equal :integer
@db.schema(:integer).first.last[:type].must_equal :integer
@db.schema(:bigint).first.last[:type].must_equal :integer
@db.schema(:smallint).first.last[:type].must_equal :integer
@db.schema(:character).first.last[:type].must_equal :string
@db.schema(:"character varying").first.last[:type].must_equal :string
@db.schema(:varchar).first.last[:type].must_equal :string
@db.schema(:"varchar(255)").first.last[:type].must_equal :string
@db.schema(:"varchar(255)").first.last[:max_length].must_equal 255
@db.schema(:text).first.last[:type].must_equal :string
@db.schema(:date).first.last[:type].must_equal :date
@db.schema(:datetime).first.last[:type].must_equal :datetime
@db.schema(:timestamp).first.last[:type].must_equal :datetime
@db.schema(:"timestamp with time zone").first.last[:type].must_equal :datetime
@db.schema(:"timestamp without time zone").first.last[:type].must_equal :datetime
@db.schema(:time).first.last[:type].must_equal :time
@db.schema(:"time with time zone").first.last[:type].must_equal :time
@db.schema(:"time without time zone").first.last[:type].must_equal :time
@db.schema(:boolean).first.last[:type].must_equal :boolean
@db.schema(:real).first.last[:type].must_equal :float
@db.schema(:float).first.last[:type].must_equal :float
@db.schema(:double).first.last[:type].must_equal :float
@db.schema(:"double(1,2)").first.last[:type].must_equal :float
@db.schema(:"double precision").first.last[:type].must_equal :float
@db.schema(:number).first.last[:type].must_equal :decimal
@db.schema(:numeric).first.last[:type].must_equal :decimal
@db.schema(:decimal).first.last[:type].must_equal :decimal
@db.schema(:"number(10,0)").first.last[:type].must_equal :integer
@db.schema(:"numeric(10, 10)").first.last[:type].must_equal :decimal
@db.schema(:"decimal(10,1)").first.last[:type].must_equal :decimal
@db.schema(:bytea).first.last[:type].must_equal :blob
@db.schema(:blob).first.last[:type].must_equal :blob
@db.schema(:image).first.last[:type].must_equal :blob
@db.schema(:nchar).first.last[:type].must_equal :string
@db.schema(:nvarchar).first.last[:type].must_equal :string
@db.schema(:ntext).first.last[:type].must_equal :string
@db.schema(:smalldatetime).first.last[:type].must_equal :datetime
@db.schema(:binary).first.last[:type].must_equal :blob
@db.schema(:varbinary).first.last[:type].must_equal :blob
@db.schema(:enum).first.last[:type].must_equal :enum
@db = Sequel.mock(:host=>'postgres')
@db.extend(sm)
@db.schema(:interval).first.last[:type].must_equal :interval
@db.schema(:citext).first.last[:type].must_equal :string
@db = Sequel.mock(:host=>'mysql')
@db.extend(sm)
@db.schema(:set).first.last[:type].must_equal :set
@db.schema(:mediumint).first.last[:type].must_equal :integer
@db.schema(:mediumtext).first.last[:type].must_equal :string
end
end
| 38.071032 | 287 | 0.684989 |
bb9bda7377711daea8ac50d3fa35d454dcb785ec | 153 | class AddTntIdToDonations < ActiveRecord::Migration
def change
add_column :donations, :tnt_id, :string
add_index :donations, :tnt_id
end
end
| 21.857143 | 51 | 0.751634 |
9112b9d6306379b901cc4c3c3b8e384d6e6694c2 | 4,823 | # == Schema Information
#
# Table name: current_operators_serving_stop
#
# id :integer not null, primary key
# stop_id :integer not null
# operator_id :integer not null
# tags :hstore
# created_at :datetime
# updated_at :datetime
# created_or_updated_in_changeset_id :integer
# version :integer
#
# Indexes
#
# #c_operators_serving_stop_cu_in_changeset_id_index (created_or_updated_in_changeset_id)
# index_current_operators_serving_stop_on_operator_id (operator_id)
# index_current_operators_serving_stop_on_stop_id_and_operator_id (stop_id,operator_id) UNIQUE
#
describe OperatorServingStop do
it 'can be created' do
operator_serving_stop = create(:operator_serving_stop)
expect(OperatorServingStop.exists?(operator_serving_stop.id)).to be true
end
context 'through changesets' do
before(:each) do
@changeset1 = create(:changeset, payload: {
changes: [
{
action: 'createUpdate',
stop: {
onestopId: 's-9q8yt4b-19Hollway',
name: '19th Ave & Holloway St',
timezone: 'America/Los_Angeles',
geometry: { type: "Point", coordinates: [-122.475075, 37.721323] }
}
},
{
action: 'createUpdate',
operator: {
onestopId: 'o-9q8y-SFMTA',
name: 'SFMTA',
serves: ['s-9q8yt4b-19Hollway'],
geometry: { type: "Polygon", coordinates:[[[-121.56649700000001,37.00360599999999],[-122.23195700000001,37.48541199999998],[-122.38653400000001,37.600005999999965],[-122.412018,37.63110599999998],[-122.39432299999996,37.77643899999997],[-121.65072100000002,37.12908099999998],[-121.61080899999999,37.085774999999984],[-121.56649700000001,37.00360599999999]]]}
}
}
]
})
end
it 'can be created' do
@changeset1.apply!
expect(Stop.find_by_onestop_id!('s-9q8yt4b-19Hollway').operators).to include Operator.find_by_onestop_id!('o-9q8y-SFMTA')
expect(Operator.find_by_onestop_id!('o-9q8y-SFMTA').stops).to include Stop.find_by_onestop_id!('s-9q8yt4b-19Hollway')
expect(@changeset1.stops_created_or_updated).to match_array([
Stop.find_by_onestop_id!('s-9q8yt4b-19Hollway')
])
expect(@changeset1.operators_created_or_updated).to match_array([
Operator.find_by_onestop_id!('o-9q8y-SFMTA')
])
expect(@changeset1.operators_serving_stop_created_or_updated).to match_array([
OperatorServingStop.find_by_attributes({ operator_onestop_id: 'o-9q8y-SFMTA', stop_onestop_id: 's-9q8yt4b-19Hollway'})
])
end
it 'can be destroyed' do
@changeset1.apply!
changeset2 = create(:changeset, payload: {
changes: [
{
action: 'createUpdate',
operator: {
onestopId: 'o-9q8y-SFMTA',
doesNotServe: ['s-9q8yt4b-19Hollway']
}
}
]
})
changeset2.apply!
expect(OperatorServingStop.count).to eq 0
expect(OldOperatorServingStop.count).to eq 1
expect(Stop.find_by_onestop_id!('s-9q8yt4b-19Hollway').operators.count).to eq 0
end
it 'will be removed when stop is destroyed' do
@changeset1.apply!
changeset2 = create(:changeset, payload: {
changes: [
{
action: 'destroy',
stop: {
onestopId: 's-9q8yt4b-19Hollway'
}
}
]
})
changeset2.apply!
expect(OperatorServingStop.count).to eq 0
expect(OldOperatorServingStop.count).to eq 1
expect(Operator.find_by_onestop_id!('o-9q8y-SFMTA').stops.count).to eq 0
expect(OldOperatorServingStop.first.stop).to be_a OldStop
expect(OldStop.first.old_operators_serving_stop.first.operator).to eq Operator.find_by_onestop_id!('o-9q8y-SFMTA')
end
it 'will be removed when operator is destroyed' do
@changeset1.apply!
changeset2 = create(:changeset, payload: {
changes: [
{
action: 'destroy',
operator: {
onestopId: 'o-9q8y-SFMTA'
}
}
]
})
changeset2.apply!
expect(OperatorServingStop.count).to eq 0
expect(OldOperatorServingStop.count).to eq 1
expect(Stop.find_by_onestop_id!('s-9q8yt4b-19Hollway').operators.count).to eq 0
expect(OldOperatorServingStop.first.operator).to be_a OldOperator
expect(OldOperatorServingStop.first.stop).to eq Stop.find_by_onestop_id!('s-9q8yt4b-19Hollway')
end
end
end
| 37.679688 | 373 | 0.612482 |
5dc6bee33544a521df7fbf8a68651cf800c26929 | 2,739 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Auxiliary
include Msf::Auxiliary::Report
include Msf::Exploit::Remote::Udp
include Msf::Auxiliary::UDPScanner
include Msf::Auxiliary::NTP
include Msf::Auxiliary::DRDoS
def initialize(info = {})
super(update_info(info,
'Name' => 'NTP Clock Variables Disclosure',
'Description' => %q(
This module reads the system internal NTP variables. These variables contain
potentially sensitive information, such as the NTP software version, operating
system version, peers, and more.
),
'Author' =>
[
'Ewerson Guimaraes(Crash) <crash[at]dclabs.com.br>', # original Metasploit module
'Jon Hart <jon_hart[at]rapid7.com>' # UDPScanner version for faster scans
],
'License' => MSF_LICENSE,
'References' =>
[
['CVE', '2013-5211'], # see also scanner/ntp/ntp_monlist.rb
[ 'URL', 'https://www.rapid7.com/db/vulnerabilities/ntp-clock-variables-disclosure/' ]
]
)
)
end
def scanner_process(data, shost, _sport)
@results[shost] ||= []
@results[shost] << Rex::Proto::NTP::NTPControl.new.read(data)
end
def scan_host(ip)
if spoofed?
datastore['ScannerRecvWindow'] = 0
scanner_spoof_send(@probe, ip, datastore['RPORT'], datastore['SRCIP'], datastore['NUM_REQUESTS'])
else
scanner_send(@probe, ip, datastore['RPORT'])
end
end
def scanner_prescan(batch)
@results = {}
print_status("Sending NTP v2 READVAR probes to #{batch[0]}->#{batch[-1]} (#{batch.length} hosts)")
@probe = Rex::Proto::NTP::NTPControl.new
@probe.version = datastore['VERSION']
@probe.operation = 2
end
def scanner_postscan(_batch)
@results.keys.each do |k|
# TODO: check to see if any of the responses are actually NTP before reporting
report_service(
host: k,
proto: 'udp',
port: rport,
name: 'ntp',
info: @results[k].map { |r| r.payload.slice(0,r.payload_size) }.join.inspect
)
peer = "#{k}:#{rport}"
response_map = { @probe => @results[k] }
vulnerable, proof = prove_amplification(response_map)
what = 'NTP Mode 6 READVAR DRDoS'
if vulnerable
print_good("#{peer} - Vulnerable to #{what}: #{proof}")
report_vuln(
host: k,
port: rport,
proto: 'udp',
name: what,
refs: references
)
else
vprint_status("#{peer} - Not vulnerable to #{what}: #{proof}")
end
end
end
end
| 31.125 | 103 | 0.606426 |
010cf7c575938f44121811ac92a4e01aec1e351d | 224 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'qonto'
unless defined?(SPEC_ROOT)
SPEC_ROOT = File.expand_path('../', __FILE__)
end
Dir[File.join(SPEC_ROOT, 'support/**/*.rb')].each { |f| require f }
| 24.888889 | 67 | 0.683036 |
ac62c977b48779e13b4175a65fbae2852430329d | 281 | class Shimo < Cask
version :latest
sha256 :no_check
url 'http://www.chungwasoft.com/files/Shimo_latest.zip'
appcast 'http://www.chungwasoft.com/library/appcasts/Shimo3/shimocast.php'
homepage 'http://www.chungwasoft.com/shimo/'
license :unknown
app 'Shimo.app'
end
| 23.416667 | 76 | 0.740214 |
6179b2009778cf200a25c90cf39307b3a026bd27 | 4,655 | require 'formula'
class Git < Formula
homepage 'http://git-scm.com'
url 'https://git-core.googlecode.com/files/git-1.9.0.tar.gz'
sha1 'e60667fc16e5a5f1cde46616b0458cc802707743'
head 'https://github.com/git/git.git', :shallow => false
bottle do
sha1 "78bb720052e624b889b7c39e47ec40e463fa13b0" => :mavericks
sha1 "95b604ef6dff8a8abbc6819b1769c6df6ac45b03" => :mountain_lion
sha1 "10d46b289e9877f866e953dfc65fde260c80acb8" => :lion
end
option 'with-blk-sha1', 'Compile with the block-optimized SHA1 implementation'
option 'without-completions', 'Disable bash/zsh completions from "contrib" directory'
option 'with-brewed-openssl', "Build with Homebrew OpenSSL instead of the system version"
option 'with-brewed-curl', "Use Homebrew's version of cURL library"
option 'with-persistent-https', 'Build git-remote-persistent-https from "contrib" directory'
depends_on 'pcre' => :optional
depends_on 'gettext' => :optional
depends_on 'openssl' if build.with? 'brewed-openssl'
depends_on 'curl' if build.with? 'brewed-curl'
depends_on 'go' => :build if build.with? 'persistent-https'
resource 'man' do
url 'http://git-core.googlecode.com/files/git-manpages-1.9.0.tar.gz'
sha1 'cff590c92b4d1c8a143c078473140b653cc5d56a'
end
resource 'html' do
url 'http://git-core.googlecode.com/files/git-htmldocs-1.9.0.tar.gz'
sha1 '65eb3f411f4699695c7081a7c716cabb9ce23d75'
end
def install
# If these things are installed, tell Git build system to not use them
ENV['NO_FINK'] = '1'
ENV['NO_DARWIN_PORTS'] = '1'
ENV['V'] = '1' # build verbosely
ENV['NO_R_TO_GCC_LINKER'] = '1' # pass arguments to LD correctly
ENV['PYTHON_PATH'] = which 'python'
ENV['PERL_PATH'] = which 'perl'
if MacOS.version >= :mavericks and MacOS.dev_tools_prefix
ENV['PERLLIB_EXTRA'] = "#{MacOS.dev_tools_prefix}/Library/Perl/5.16/darwin-thread-multi-2level"
end
unless quiet_system ENV['PERL_PATH'], '-e', 'use ExtUtils::MakeMaker'
ENV['NO_PERL_MAKEMAKER'] = '1'
end
ENV['BLK_SHA1'] = '1' if build.with? 'blk-sha1'
if build.with? 'pcre'
ENV['USE_LIBPCRE'] = '1'
ENV['LIBPCREDIR'] = Formula['pcre'].opt_prefix
end
ENV['NO_GETTEXT'] = '1' unless build.with? 'gettext'
ENV['GIT_DIR'] = cached_download/".git" if build.head?
system "make", "prefix=#{prefix}",
"sysconfdir=#{etc}",
"CC=#{ENV.cc}",
"CFLAGS=#{ENV.cflags}",
"LDFLAGS=#{ENV.ldflags}",
"install"
bin.install Dir["contrib/remote-helpers/git-remote-{hg,bzr}"]
# Install the OS X keychain credential helper
cd 'contrib/credential/osxkeychain' do
system "make", "CC=#{ENV.cc}",
"CFLAGS=#{ENV.cflags}",
"LDFLAGS=#{ENV.ldflags}"
bin.install 'git-credential-osxkeychain'
system "make", "clean"
end
# Install git-subtree
cd 'contrib/subtree' do
system "make", "CC=#{ENV.cc}",
"CFLAGS=#{ENV.cflags}",
"LDFLAGS=#{ENV.ldflags}"
bin.install 'git-subtree'
end
if build.with? 'persistent-https'
cd 'contrib/persistent-https' do
system "make"
bin.install 'git-remote-persistent-http',
'git-remote-persistent-https',
'git-remote-persistent-https--proxy'
end
end
unless build.without? 'completions'
# install the completion script first because it is inside 'contrib'
bash_completion.install 'contrib/completion/git-completion.bash'
bash_completion.install 'contrib/completion/git-prompt.sh'
zsh_completion.install 'contrib/completion/git-completion.zsh' => '_git'
cp "#{bash_completion}/git-completion.bash", zsh_completion
end
(share+'git-core').install 'contrib'
# We could build the manpages ourselves, but the build process depends
# on many other packages, and is somewhat crazy, this way is easier.
man.install resource('man')
(share+'doc/git-doc').install resource('html')
# Make html docs world-readable; check if this is still needed at 1.8.6
chmod 0644, Dir["#{share}/doc/git-doc/**/*.{html,txt}"]
end
def caveats; <<-EOS.undent
The OS X keychain credential helper has been installed to:
#{HOMEBREW_PREFIX}/bin/git-credential-osxkeychain
The 'contrib' directory has been installed to:
#{HOMEBREW_PREFIX}/share/git-core/contrib
EOS
end
test do
HOMEBREW_REPOSITORY.cd do
assert_equal 'bin/brew', `#{bin}/git ls-files -- bin`.strip
end
end
end
| 34.481481 | 101 | 0.654135 |
1d68c684bdfb887fdd313252c803287727ebbe43 | 802 | class Utils
# Common processing to handle a response that is expected to be JSON
def self.handle_json(response, success)
content_type = response.headers.get('content-type') || ''
isJson = content_type.include? 'json'
if response.status == 200 and isJson
response.json().then do |json|
success json
end
else
footer = 'See server log for full details'
if isJson
response.json().then do |json|
# Pick out the exception
message = json['exception'] || ''
alert "#{response.status} #{response.statusText}\n#{message}\n#{footer}"
end
else # not JSON
response.text() do |text|
alert "#{response.status} #{response.statusText}\n#{text}\n#{footer}"
end
end
end
end
end
| 29.703704 | 82 | 0.604738 |
61b0bb6cd5f8c11fa3508ba0766caad5507bf93f | 4,435 | module Fog
module ContainerInfra
class TeleFonica < Fog::Service
SUPPORTED_VERSIONS = /v1/
SUPPORTED_MICROVERSION = '1.3'
requires :telefonica_auth_url
recognizes :telefonica_auth_token, :telefonica_management_url,
:persistent, :telefonica_service_type, :telefonica_service_name,
:telefonica_tenant, :telefonica_tenant_id,
:telefonica_api_key, :telefonica_username, :telefonica_identity_endpoint,
:current_user, :current_tenant, :telefonica_region,
:telefonica_endpoint_type, :telefonica_cache_ttl,
:telefonica_project_name, :telefonica_project_id,
:telefonica_project_domain, :telefonica_user_domain, :telefonica_domain_name,
:telefonica_project_domain_id, :telefonica_user_domain_id, :telefonica_domain_id,
:telefonica_identity_api_version
model_path 'fog/container_infra/telefonica/models'
model :bay
collection :bays
model :bay_model
collection :bay_models
model :certificate
collection :certificates
model :cluster
collection :clusters
model :cluster_template
collection :cluster_templates
request_path 'fog/container_infra/telefonica/requests'
# Bay CRUD
request :create_bay
request :delete_bay
request :get_bay
request :list_bays
request :update_bay
# Bay Model CRUD
request :create_bay_model
request :delete_bay_model
request :get_bay_model
request :list_bay_models
request :update_bay_model
# Certificate CRUD
request :create_certificate
request :get_certificate
# Cluster CRUD
request :create_cluster
request :delete_cluster
request :get_cluster
request :list_clusters
request :update_cluster
# Cluster Template CRUD
request :create_cluster_template
request :delete_cluster_template
request :get_cluster_template
request :list_cluster_templates
request :update_cluster_template
class Mock
def self.data
@data ||= Hash.new do |hash, key|
hash[key] = {
:users => {},
:tenants => {}
}
end
end
def self.reset
@data = nil
end
def initialize(options = {})
@telefonica_username = options[:telefonica_username]
@telefonica_tenant = options[:telefonica_tenant]
@telefonica_auth_uri = URI.parse(options[:telefonica_auth_url])
@auth_token = Fog::Mock.random_base64(64)
@auth_token_expiration = (Time.now.utc + 86400).iso8601
management_url = URI.parse(options[:telefonica_auth_url])
management_url.port = 9511
management_url.path = '/v1'
@telefonica_management_url = management_url.to_s
@data ||= {:users => {}}
unless @data[:users].find { |u| u['name'] == options[:telefonica_username] }
id = Fog::Mock.random_numbers(6).to_s
@data[:users][id] = {
'id' => id,
'name' => options[:telefonica_username],
'email' => "#{options[:telefonica_username]}@mock.com",
'tenantId' => Fog::Mock.random_numbers(6).to_s,
'enabled' => true
}
end
end
def data
self.class.data[@telefonica_username]
end
def reset_data
self.class.data.delete(@telefonica_username)
end
def credentials
{:provider => 'telefonica',
:telefonica_auth_url => @telefonica_auth_uri.to_s,
:telefonica_auth_token => @auth_token,
:telefonica_management_url => @telefonica_management_url}
end
end
class Real
include Fog::TeleFonica::Core
def self.not_found_class
Fog::ContainerInfra::TeleFonica::NotFound
end
def default_path_prefix
'v1'
end
def default_service_type
%w[container-infra]
end
def request(options = {})
options[:headers] = {'TeleFonica-API-Version' => "container-infra #{SUPPORTED_MICROVERSION}"}
super(options)
end
end
end
end
end
| 30.586207 | 103 | 0.600902 |
4a36f52712815d0bfc22c8005b53fb83adefbeeb | 478 | FactoryBot.define do
factory :sprint_market_position, class: 'IGMarkets::SprintMarketPosition' do
created_date { '2014-10-22T18:30:15' }
currency { 'USD' }
deal_id { 'DEAL' }
description { 'Description' }
direction { 'BUY' }
epic { 'FM.D.FTSE.FTSE.IP' }
expiry_time { '2014-10-22T19:30:14' }
instrument_name { 'Instrument' }
market_status { 'TRADEABLE' }
payout_amount { 210.8 }
size { 120.50 }
strike_level { 110.1 }
end
end
| 28.117647 | 78 | 0.635983 |
f7393ac2b99d34c020956a3b70ae7a486acd2243 | 411 | # If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
# Find the sum of all the multiples of 3 or 5 below 1000.
def multiples_of_3_and_5(num)
multiples = []
num.times do |int|
if int % 3 == 0
multiples << int
elsif int % 5 == 0
multiples << int
end
end
return multiples.reduce(:+)
end
p multiples_of_3_and_5(1000) | 24.176471 | 131 | 0.683698 |
b9b6fd8e2733e6a6702df90ec961355a2a3b3fa7 | 1,560 | require 'spec_helper'
require 'rake'
require 'stringio'
describe "mesh rake tasks" do # rubocop:disable RSpec/DescribeClass
let(:rake) { Rake::Application.new }
before do
Rake.application = rake
Rake.application.rake_require "mesh", [Rails.root.join('lib', 'tasks'), Rails.root.join('..', 'lib', 'tasks')], []
Rake::Task.define_task(:environment) # rspec has loaded rails
end
describe "mesh:import" do
let(:task_name) { "mesh:import" }
let(:output) { StringIO.new }
before do
$stdout = output # rubocop:disable RSpec/ExpectOutput # TODO: Explore how to remove this disable
end
after :all do
$stdout = STDOUT
end
it "has 'environment' as a prereq" do
expect(rake[task_name].prerequisites).to include("environment")
end
it "requires $MESH_FILE to be set" do
old_mesh_file = ENV.delete('MESH_FILE')
rake[task_name].invoke
output.seek(0)
expect(output.read).to match(/Need to set \$MESH_FILE with path to file to ingest/)
ENV['MESH_FILE'] = old_mesh_file
end
describe "create or update" do
let(:input) { StringIO.new("*NEWRECORD\nUI = 5\nMH = test\n") }
let(:term) { Qa::SubjectMeshTerm.find_by_term_id(5) }
before do
ENV['MESH_FILE'] = "dummy"
allow(File).to receive(:open).with("dummy").and_yield(input)
rake[task_name].invoke
end
it "creates or update all records in the config file" do
expect(term).not_to be_nil
expect(term.term).to eq("test")
end
end
end
end
| 31.836735 | 118 | 0.646154 |
01bede0d0ccecc367481ce5495701d7dc4086537 | 653 | require 'rspec'
require 'asciidoctor'
require 'asciidoctor-jats'
RSpec.describe Asciidoctor::JATS::Entity::Bold do
context '#to_s' do
it 'should wrap a word in <bold> tags' do
actual = build_strong('Example').to_s
expected = '<bold>Example</bold>'
expect(actual).to eq(expected)
end
it 'should wrap a phrase in <bold> tags' do
actual = build_strong('This is an example.').to_s
expected = '<bold>This is an example.</bold>'
expect(actual).to eq(expected)
end
end
private
def build_strong(text)
node = Asciidoctor::Inline.new(nil, :strong, text)
described_class.new(node)
end
end
| 23.321429 | 55 | 0.663093 |
bbf9a57e771805d5fef8056bd961737ea6742a43 | 3,297 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Suggestotron
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
#wfarr told me to put this here so the asset pipeline would work with a Rails 3 app :)
#the reason being that on heroku apps connect to the database using an environment variable
#but the way the heroku buildpack works for rails 3 and precompiling assets, the environment variables aren't available to the app when assets go to precompile
#so this disables the full app environment loading to precompile assets
config.assets.initialize_on_precompile = false
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
| 47.782609 | 163 | 0.740977 |
f76d88be79278da7a1c530b3ae1bd3d341ad1ad7 | 120 | require 'test_helper'
class HeaderTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15 | 42 | 0.7 |
18aaa41f423679968f3307583b437ccf462fd605 | 370 | require "bundler/setup"
require "versioned_item"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 24.666667 | 66 | 0.756757 |
ab258109c2b4b699b07da5ee0188eb7f1d335ca9 | 6,926 | module Fastlane
module Actions
module SharedValues
CREATE_PULL_REQUEST_HTML_URL = :CREATE_PULL_REQUEST_HTML_URL
end
class CreatePullRequestAction < Action
def self.run(params)
UI.message("Creating new pull request from '#{params[:head]}' to branch '#{params[:base]}' of '#{params[:repo]}'")
payload = {
'title' => params[:title],
'head' => params[:head],
'base' => params[:base]
}
payload['body'] = params[:body] if params[:body]
GithubApiAction.run(
server_url: params[:api_url],
api_token: params[:api_token],
http_method: 'POST',
path: "repos/#{params[:repo]}/pulls",
body: payload,
error_handlers: {
'*' => proc do |result|
UI.error("GitHub responded with #{result[:status]}: #{result[:body]}")
return nil
end
}
) do |result|
json = result[:json]
number = json['number']
html_url = json['html_url']
UI.success("Successfully created pull request ##{number}. You can see it at '#{html_url}'")
# Add labels to pull request
add_labels(params, number) if params[:labels]
Actions.lane_context[SharedValues::CREATE_PULL_REQUEST_HTML_URL] = html_url
return html_url
end
end
def self.add_labels(params, number)
payload = {
'labels' => params[:labels]
}
GithubApiAction.run(
server_url: params[:api_url],
api_token: params[:api_token],
http_method: 'PATCH',
path: "repos/#{params[:repo]}/issues/#{number}",
body: payload,
error_handlers: {
'*' => proc do |result|
UI.error("GitHub responded with #{result[:status]}: #{result[:body]}")
return nil
end
}
)
end
#####################################################
# @!group Documentation
#####################################################
def self.description
"This will create a new pull request on GitHub"
end
def self.output
[
['CREATE_PULL_REQUEST_HTML_URL', 'The HTML URL to the created pull request']
]
end
def self.available_options
[
FastlaneCore::ConfigItem.new(key: :api_token,
env_name: "GITHUB_PULL_REQUEST_API_TOKEN",
description: "Personal API Token for GitHub - generate one at https://github.com/settings/tokens",
sensitive: true,
code_gen_sensitive: true,
default_value: ENV["GITHUB_API_TOKEN"],
default_value_dynamic: true,
is_string: true,
optional: false),
FastlaneCore::ConfigItem.new(key: :repo,
env_name: "GITHUB_PULL_REQUEST_REPO",
description: "The name of the repository you want to submit the pull request to",
is_string: true,
optional: false),
FastlaneCore::ConfigItem.new(key: :title,
env_name: "GITHUB_PULL_REQUEST_TITLE",
description: "The title of the pull request",
is_string: true,
optional: false),
FastlaneCore::ConfigItem.new(key: :body,
env_name: "GITHUB_PULL_REQUEST_BODY",
description: "The contents of the pull request",
is_string: true,
optional: true),
FastlaneCore::ConfigItem.new(key: :labels,
env_name: "GITHUB_PULL_REQUEST_LABELS",
description: "The labels for the pull request",
type: Array,
optional: true),
FastlaneCore::ConfigItem.new(key: :head,
env_name: "GITHUB_PULL_REQUEST_HEAD",
description: "The name of the branch where your changes are implemented (defaults to the current branch name)",
is_string: true,
code_gen_sensitive: true,
default_value: Actions.git_branch,
default_value_dynamic: true,
optional: true),
FastlaneCore::ConfigItem.new(key: :base,
env_name: "GITHUB_PULL_REQUEST_BASE",
description: "The name of the branch you want your changes pulled into (defaults to `master`)",
is_string: true,
default_value: 'master',
optional: true),
FastlaneCore::ConfigItem.new(key: :api_url,
env_name: "GITHUB_PULL_REQUEST_API_URL",
description: "The URL of GitHub API - used when the Enterprise (default to `https://api.github.com`)",
is_string: true,
code_gen_default_value: 'https://api.github.com',
default_value: 'https://api.github.com',
optional: true)
]
end
def self.author
["seei", "tommeier"]
end
def self.is_supported?(platform)
return true
end
def self.return_value
"The pull request URL when successful"
end
def self.example_code
[
'create_pull_request(
api_token: "secret", # optional, defaults to ENV["GITHUB_API_TOKEN"]
repo: "fastlane/fastlane",
title: "Amazing new feature",
head: "my-feature", # optional, defaults to current branch name
base: "master", # optional, defaults to "master"
body: "Please pull this in!", # optional
api_url: "http://yourdomain/api/v3" # optional, for GitHub Enterprise, defaults to "https://api.github.com"
)'
]
end
def self.category
:source_control
end
end
end
end
| 42.231707 | 150 | 0.461161 |
ff751d232511cc1ae937ad75325b086b0e43a0a5 | 794 | # frozen_string_literal: true
# == Schema Information
#
# Table name: translations
#
# id :integer not null, primary key
# language_id :integer
# text :string
# resource_content_id :integer
# resource_type :string
# resource_id :integer
# language_name :string
# created_at :datetime not null
# updated_at :datetime not null
# resource_name :string
#
class Translation < ApplicationRecord
include LanguageFilterable
include TranslationSearchable
belongs_to :verse
belongs_to :resource_content
belongs_to :language
has_many :foot_notes
scope :approved, -> {joins(:resource_content).where('resource_contents.approved = ?', true)}
def es_analyzer
end
end
| 24.060606 | 94 | 0.653652 |
61ae0b112623ef2997200016110e8b2f87a67556 | 365 | require 'spec_helper'
require Rails.root.join('lib/migrations/ezine/20150408081234_enable_member_state.rb')
RSpec.describe SS::Migration20150408081234, dbscope: :example do
before do
member = create :ezine_member
member.unset :state
end
it do
expect { described_class.new.change }
.to change { Ezine::Member.enabled.count }.by 1
end
end
| 24.333333 | 85 | 0.742466 |
bbb94b14e5dccccaba6f10b45b91e0e550811052 | 6,417 | require 'spec_helper'
RSpec.describe Karafka::Params::Params do
describe 'class methods' do
subject { described_class }
describe '#build' do
let(:controller) { double }
let(:defaults) { double }
let(:merged_with_defaults) { double }
before do
expect(subject)
.to receive(:defaults)
.with(controller)
.and_return(defaults)
end
context 'when we build from a hash' do
let(:message) { { rand => rand } }
it 'expect to build a new based on defaults and merge a message' do
expect(defaults)
.to receive(:merge!)
.with(message)
.and_return(merged_with_defaults)
expect(subject.build(message, controller)).to eq merged_with_defaults
end
end
context 'when we build based on Karafka::Connection::Message' do
let(:content) { rand }
let(:message) do
instance_double(Karafka::Connection::Message, content: content)
end
it 'expect to build defaults and merge with additional values and content' do
Timecop.freeze do
expect(defaults).to receive(:merge!)
.with(parsed: false, received_at: Time.now, content: content)
.and_return(merged_with_defaults)
expect(subject.build(message, controller)).to eq merged_with_defaults
end
end
end
end
describe '#defaults' do
let(:worker) { double }
let(:parser) { double }
let(:topic) { double }
let(:controller) do
instance_double(
Karafka::BaseController,
worker: worker,
parser: parser,
topic: topic
)
end
it 'expect to return default params' do
params = subject.send(:defaults, controller)
expect(params).to be_a subject
expect(params[:controller]).to eq controller.class
expect(params[:worker]).to eq worker
expect(params[:parser]).to eq parser
expect(params[:topic]).to eq topic
end
end
end
describe 'instance methods' do
subject { described_class.send(:new, {}) }
describe '#retrieve' do
context 'when params are already parsed' do
before do
subject[:parsed] = true
end
it 'expect not to parse again and return self' do
expect(subject)
.not_to receive(:parse)
expect(subject)
.not_to receive(:merge!)
expect(subject.retrieve).to eq subject
end
end
context 'when params were not yet parsed' do
let(:content) { double }
before do
subject[:parsed] = false
subject[:content] = content
expect(subject)
.to receive(:parse)
.with(content)
.and_return(parsed_content)
end
context 'when parsed content does not contain same keys as already existing' do
let(:parsed_content) { { double => double } }
it 'expect to merge with parsed stuff that is under content key and remove this key' do
expect(subject.retrieve[parsed_content.keys[0]]).to eq parsed_content.values[0]
expect(subject.keys).not_to include :content
end
end
context 'when parsed content contains same keys as already existing' do
let(:parsed_content) { { received_at: rand } }
it 'expect not to overwrite existing keys' do
subject.retrieve
expect(subject[parsed_content[:received_at]]).not_to eq parsed_content[:received_at]
expect(subject.keys).not_to include :content
end
end
end
end
describe '#parse' do
let(:parser) { double }
let(:content) { double }
before do
subject[:parser] = parser
subject[:parsed] = false
end
context 'when we are able to successfully parse' do
let(:parsed_content) { { rand => rand } }
before do
expect(parser)
.to receive(:parse)
.with(content)
.and_return(parsed_content)
end
it 'expect to mark as parsed and return content in a message key' do
expect(subject.send(:parse, content)).to eq parsed_content
expect(subject[:parsed]).to eq true
end
end
context 'when parsing fails' do
before do
expect(parser)
.to receive(:parse)
.with(content)
.and_raise(::Karafka::Errors::ParserError)
end
it 'expect to monitor, mark as parsed and return content in a message key' do
expect(Karafka.monitor)
.to receive(:notice_error)
expect(subject.send(:parse, content)).to eq(message: content)
expect(subject[:parsed]).to eq true
end
end
end
describe '#merge!' do
subject { described_class.send(:new, base) }
context 'string based params merge with string key' do
let(:initial_value) { rand }
let(:key) { rand.to_s }
let(:base) { { key => initial_value } }
it 'expect to keep initial values' do
subject.send :merge!, key => rand
expect(subject[key]).to eq initial_value
end
end
context 'string based params merge with symbol key' do
let(:initial_value) { rand }
let(:key) { rand.to_s }
let(:base) { { key => initial_value } }
it 'expect to keep initial values' do
subject.send :merge!, key.to_sym => rand
expect(subject[key]).to eq initial_value
end
end
context 'symbol based params merge with symbol key' do
let(:initial_value) { rand }
let(:key) { rand.to_s.to_sym }
let(:base) { { key => initial_value } }
it 'expect to keep initial values' do
subject.send :merge!, key.to_sym => rand
expect(subject[key]).to eq initial_value
end
end
context 'symbol based params merge with string key' do
let(:initial_value) { rand }
let(:key) { rand.to_s.to_sym }
let(:base) { { key.to_s => initial_value } }
it 'expect to keep initial values' do
subject.send :merge!, key.to_sym => rand
expect(subject[key]).to eq initial_value
end
end
end
end
end
| 28.775785 | 97 | 0.580489 |
9158f9fb32490047a9fcc2c3aa0b17c2cee96b95 | 8,116 | =begin
= monitor.rb
Copyright (C) 2001 Shugo Maeda <[email protected]>
This library is distributed under the terms of the Ruby license.
You can freely distribute/modify this library.
== example
This is a simple example.
require 'monitor.rb'
buf = []
buf.extend(MonitorMixin)
empty_cond = buf.new_cond
# consumer
Thread.start do
loop do
buf.synchronize do
empty_cond.wait_while { buf.empty? }
print buf.shift
end
end
end
# producer
while line = ARGF.gets
buf.synchronize do
buf.push(line)
empty_cond.signal
end
end
The consumer thread waits for the producer thread to push a line
to buf while buf.empty?, and the producer thread (main thread)
reads a line from ARGF and push it to buf, then call
empty_cond.signal.
=end
#
# Adds monitor functionality to an arbitrary object by mixing the module with
# +include+. For example:
#
# require 'monitor.rb'
#
# buf = []
# buf.extend(MonitorMixin)
# empty_cond = buf.new_cond
#
# # consumer
# Thread.start do
# loop do
# buf.synchronize do
# empty_cond.wait_while { buf.empty? }
# print buf.shift
# end
# end
# end
#
# # producer
# while line = ARGF.gets
# buf.synchronize do
# buf.push(line)
# empty_cond.signal
# end
# end
#
# The consumer thread waits for the producer thread to push a line
# to buf while buf.empty?, and the producer thread (main thread)
# reads a line from ARGF and push it to buf, then call
# empty_cond.signal.
#
module MonitorMixin
#
# FIXME: This isn't documented in Nutshell.
#
# Since MonitorMixin.new_cond returns a ConditionVariable, and the example
# above calls while_wait and signal, this class should be documented.
#
class ConditionVariable
class Timeout < Exception; end
# Create a new timer with the argument timeout, and add the
# current thread to the list of waiters. Then the thread is
# stopped. It will be resumed when a corresponding #signal
# occurs.
def wait(timeout = nil)
@monitor.instance_eval {mon_check_owner()}
timer = create_timer(timeout)
Thread.critical = true
count = @monitor.instance_eval {mon_exit_for_cond()}
@waiters.push(Thread.current)
begin
Thread.stop
return true
rescue Timeout
return false
ensure
Thread.critical = true
begin
if timer && timer.alive?
Thread.kill(timer)
end
if @waiters.include?(Thread.current) # interrupted?
@waiters.delete(Thread.current)
end
@monitor.instance_eval {mon_enter_for_cond(count)}
ensure
Thread.critical = false
end
end
end
# call #wait while the supplied block returns +true+.
def wait_while
while yield
wait
end
end
# call #wait until the supplied block returns +true+.
def wait_until
until yield
wait
end
end
# Wake up and run the next waiter
def signal
@monitor.instance_eval {mon_check_owner()}
Thread.critical = true
t = @waiters.shift
t.wakeup if t
Thread.critical = false
Thread.pass
end
# Wake up all the waiters.
def broadcast
@monitor.instance_eval {mon_check_owner()}
Thread.critical = true
for t in @waiters
t.wakeup
end
@waiters.clear
Thread.critical = false
Thread.pass
end
def count_waiters
return @waiters.length
end
private
def initialize(monitor)
@monitor = monitor
@waiters = []
end
def create_timer(timeout)
if timeout
waiter = Thread.current
return Thread.start {
Thread.pass
sleep(timeout)
Thread.critical = true
waiter.raise(Timeout.new)
}
else
return nil
end
end
end
def self.extend_object(obj)
super(obj)
obj.instance_eval {mon_initialize()}
end
#
# Attempts to enter exclusive section. Returns +false+ if lock fails.
#
def mon_try_enter
result = false
Thread.critical = true
if @mon_owner.nil?
@mon_owner = Thread.current
end
if @mon_owner == Thread.current
@mon_count += 1
result = true
end
Thread.critical = false
return result
end
# For backward compatibility
alias try_mon_enter mon_try_enter
#
# Enters exclusive section.
#
def mon_enter
Thread.critical = true
mon_acquire(@mon_entering_queue)
@mon_count += 1
ensure
Thread.critical = false
end
#
# Leaves exclusive section.
#
def mon_exit
mon_check_owner
Thread.critical = true
@mon_count -= 1
if @mon_count == 0
mon_release
end
Thread.critical = false
Thread.pass
end
#
# Enters exclusive section and executes the block. Leaves the exclusive
# section automatically when the block exits. See example under
# +MonitorMixin+.
#
def mon_synchronize
mon_enter
begin
yield
ensure
mon_exit
end
end
alias synchronize mon_synchronize
#
# FIXME: This isn't documented in Nutshell.
#
# Create a new condition variable for this monitor.
# This facilitates control of the monitor with #signal and #wait.
#
def new_cond
return ConditionVariable.new(self)
end
private
def initialize(*args)
super
mon_initialize
end
# called by initialize method to set defaults for instance variables.
def mon_initialize
@mon_owner = nil
@mon_count = 0
@mon_entering_queue = []
@mon_waiting_queue = []
end
# Throw a ThreadError exception if the current thread
# does't own the monitor
def mon_check_owner
if @mon_owner != Thread.current
raise ThreadError, "current thread not owner"
end
end
def mon_acquire(queue)
while @mon_owner && @mon_owner != Thread.current
queue.push(Thread.current)
Thread.stop
Thread.critical = true
end
@mon_owner = Thread.current
end
# mon_release requires Thread.critical == true
def mon_release
@mon_owner = nil
while t = @mon_waiting_queue.shift || @mon_entering_queue.shift
if t.alive?
t.wakeup
return
end
end
end
def mon_enter_for_cond(count)
mon_acquire(@mon_waiting_queue)
@mon_count = count
end
def mon_exit_for_cond
count = @mon_count
@mon_count = 0
return count
ensure
mon_release
end
end
# Monitors provide means of mutual exclusion for Thread programming.
# A critical region is created by means of the synchronize method,
# which takes a block.
# The condition variables (created with #new_cond) may be used
# to control the execution of a monitor with #signal and #wait.
#
# the Monitor class wraps MonitorMixin, and provides aliases
# alias try_enter try_mon_enter
# alias enter mon_enter
# alias exit mon_exit
# to access its methods more concisely.
class Monitor
include MonitorMixin
alias try_enter try_mon_enter
alias enter mon_enter
alias exit mon_exit
end
# Documentation comments:
# - All documentation comes from Nutshell.
# - MonitorMixin.new_cond appears in the example, but is not documented in
# Nutshell.
# - All the internals (internal modules Accessible and Initializable, class
# ConditionVariable) appear in RDoc. It might be good to hide them, by
# making them private, or marking them :nodoc:, etc.
# - The entire example from the RD section at the top is replicated in the RDoc
# comment for MonitorMixin. Does the RD section need to remain?
# - RDoc doesn't recognise aliases, so we have mon_synchronize documented, but
# not synchronize.
# - mon_owner is in Nutshell, but appears as an accessor in a separate module
# here, so is hard/impossible to RDoc. Some other useful accessors
# (mon_count and some queue stuff) are also in this module, and don't appear
# directly in the RDoc output.
# - in short, it may be worth changing the code layout in this file to make the
# documentation easier
# Local variables:
# mode: Ruby
# tab-width: 8
# End:
| 22.733894 | 80 | 0.670034 |
e82f303c19ed7693160cc84abb9fc86bb176a711 | 1,811 | class Tcpsplit < Formula
desc "Break a packet trace into some number of sub-traces"
homepage "https://www.icir.org/mallman/software/tcpsplit/"
url "https://www.icir.org/mallman/software/tcpsplit/tcpsplit-0.2.tar.gz"
sha256 "885a6609d04eb35f31f1c6f06a0b9afd88776d85dec0caa33a86cef3f3c09d1d"
livecheck do
url :homepage
regex(/href=.*?tcpsplit[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "5bf4362d064b7f982e0fb8cb2e79010c80c19f555b79f18dd0e4f3a9dbfda8a1"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "f40f957faef51ed496030a97cda8ca0eb0716826969872185080bb8e94780f36"
sha256 cellar: :any_skip_relocation, monterey: "51e4f267ddd4cd76011a85b0e094d78d4df67b4a3d16d6dd918834a929cba203"
sha256 cellar: :any_skip_relocation, big_sur: "49781c99d1496c5c0c8ec3e56e2edc604f5e8643f36e93b0ff8b974d448363d1"
sha256 cellar: :any_skip_relocation, catalina: "ab3131cd8829f943cc4142dc616adfa696ff9d0af5dc21f94408d114f59434cd"
sha256 cellar: :any_skip_relocation, mojave: "b3a7f083a50a33edf1799fc16b6d52db71eee85bd69bad9d1d3d42e6de5cfa6f"
sha256 cellar: :any_skip_relocation, high_sierra: "0b603f1737a000ec2452bd3ac48df7c4e04d6cfb15fc48dabca96bd23137f40a"
sha256 cellar: :any_skip_relocation, sierra: "2e9d12ee609d30075f141527c3804ce78a8c312e5b72ce6eb655ed08521faf45"
sha256 cellar: :any_skip_relocation, el_capitan: "5014edcbc87913b2103c9347dd4b132ca1b4c3b1a007c853eda75213481e7d30"
sha256 cellar: :any_skip_relocation, yosemite: "c87bf331cb20c6301b922ee3fb37f0c92957f3e32d0391b07aa7b36980b20819"
end
uses_from_macos "libpcap"
def install
system "make"
bin.install "tcpsplit"
end
test do
system "#{bin}/tcpsplit", "--version"
end
end
| 50.305556 | 123 | 0.793484 |
035c3814c50014717ca99ce7101b9d82f7325d19 | 1,762 | # frozen_string_literal: true
module Spotlight
##
# A controller to handle the adminstration of site admin users
class AdminUsersController < Spotlight::ApplicationController
before_action :authenticate_user!
before_action :load_site
before_action :load_users
load_and_authorize_resource :site, class: 'Spotlight::Site'
def index; end
def create
if update_roles
Spotlight::InviteUsersService.call(resource: @site)
flash[:notice] = t('spotlight.admin_users.create.success')
else
flash[:error] = t('spotlight.admin_users.create.error')
end
redirect_to spotlight.admin_users_path
end
def update
user = Spotlight::Engine.user_class.find(params[:id])
if user
Spotlight::Role.create(user_key: user.email, role: 'admin', resource: @site).save
flash[:notice] = t('spotlight.admin_users.create.success')
else
flash[:error] = t('spotlight.admin_users.create.error')
end
redirect_to spotlight.admin_users_path
end
def destroy
user = Spotlight::Engine.user_class.find(params[:id])
if user.roles.where(resource: @site).first.destroy
flash[:notice] = t('spotlight.admin_users.destroy.success')
else
flash[:error] = t('spotlight.admin_users.destroy.error')
end
redirect_to spotlight.admin_users_path
end
private
def load_users
@users ||= ::User.all.reject(&:guest?)
end
def load_site
@site ||= Spotlight::Site.instance
end
def create_params
params.require(:user).permit(:email)
end
def update_roles
Spotlight::Role.create(user_key: create_params[:email], role: 'admin', resource: @site).save
end
end
end
| 26.298507 | 98 | 0.671396 |
f779e1336d1e5c95d29fa406935ca3ac8f94d50b | 1,131 | # == Schema Information
#
# Table name: dhcp4_options
#
# code :integer not null
# dhcp_client_class :string(128)
# formatted_value :text
# persistent :boolean default(FALSE), not null
# space :string(128)
# user_context :text
# value :binary
# dhcp4_subnet_id :bigint(8)
# host_id :integer
# option_id :integer not null, primary key
# scope_id :integer not null
#
# Indexes
#
# fk_dhcp4_options_host1_idx (host_id)
# fk_dhcp4_options_scope_idx (scope_id)
#
# Foreign Keys
#
# fk_dhcp4_option_scode (scope_id => dhcp_option_scope.scope_id) ON DELETE => cascade
# fk_options_host1 (host_id => hosts.host_id) ON DELETE => cascade
#
class KeaDhcp4Option < ApplicationRecord
def self.table_name
'dhcp4_options'
end
def self.primary_key
'option_id'
end
belongs_to :kea_dhcp_option_scope, class_name: 'KeaDhcpOptionScope', foreign_key: 'scope_id', primary_key: 'scope_id'
belongs_to :kea_host, class_name: 'KeaHost', foreign_key: 'host_id', primary_key: 'host_id'
end
| 26.928571 | 120 | 0.657825 |
e95c637930778dccb4aa060223c466bfbee496e8 | 840 | Pod::Spec.new do |s|
s.name = "WZYCamera"
s.version = "1.0.3"
s.summary = "WZYCamera is a lightweight custom camera controller. A line of code integration, bid farewell to invoke complex system API distress."
s.description = <<-DESC
WZYCamera is a lightweight custom camera controller. A line of code integration, bid farewell to invoke complex system API distress.
DESC
s.homepage = "https://github.com/CoderZYWang/WZYCamera"
s.license = "MIT"
s.author = { "CoderZYWang" => "[email protected]" }
s.social_media_url = "http://blog.csdn.net/felicity294250051"
s.platform = :ios
s.source = { :git => "https://github.com/CoderZYWang/WZYCamera.git", :tag => "1.0.3" }
s.source_files = "WZYCamera/*.{h,m}"
s.frameworks = 'UIKit', 'Foundation','AVFoundation','AssetsLibrary','CoreMotion'
end
| 44.210526 | 151 | 0.683333 |
91f8dd84e6c182a013b350b48724544635ac2575 | 8,488 | require File.dirname(__FILE__) + '/../sass'
require 'sass/tree/node'
require 'strscan'
module Sass
# :stopdoc:
module Tree
class Node
def to_sass(opts = {})
result = ''
children.each do |child|
result << "#{child.to_sass(0, opts)}\n"
end
result
end
end
class ValueNode
def to_sass(tabs, opts = {})
"#{value}\n"
end
end
class RuleNode
def to_sass(tabs, opts = {})
str = "\n#{' ' * tabs}#{rule}#{children.any? { |c| c.is_a? AttrNode } ? "\n" : ''}"
children.each do |child|
str << "#{child.to_sass(tabs + 1, opts)}"
end
str
end
end
class AttrNode
def to_sass(tabs, opts = {})
"#{' ' * tabs}#{opts[:alternate] ? '' : ':'}#{name}#{opts[:alternate] ? ':' : ''} #{value}\n"
end
end
class DirectiveNode
def to_sass(tabs, opts = {})
"#{' ' * tabs}#{value}#{children.map {|c| c.to_sass(tabs + 1, opts)}}\n"
end
end
end
# This class is based on the Ruby 1.9 ordered hashes.
# It keeps the semantics and most of the efficiency of normal hashes
# while also keeping track of the order in which elements were set.
class OrderedHash
Node = Struct.new(:key, :value, :next, :prev)
include Enumerable
def initialize
@hash = {}
end
def initialize_copy(other)
@hash = other.instance_variable_get('@hash').clone
end
def [](key)
@hash[key] && @hash[key].value
end
def []=(key, value)
node = Node.new(key, value)
if old = @hash[key]
if old.prev
old.prev.next = old.next
else # old is @first and @last
@first = @last = nil
end
end
if @first.nil?
@first = @last = node
else
node.prev = @last
@last.next = node
@last = node
end
@hash[key] = node
value
end
def each
return unless @first
yield [@first.key, @first.value]
node = @first
yield [node.key, node.value] while node = node.next
self
end
def values
self.map { |k, v| v }
end
end
# :startdoc:
# This class contains the functionality used in the +css2sass+ utility,
# namely converting CSS documents to Sass templates.
class CSS
# Creates a new instance of Sass::CSS that will compile the given document
# to a Sass string when +render+ is called.
def initialize(template, options = {})
if template.is_a? IO
template = template.read
end
@options = options
@template = StringScanner.new(template)
end
# Processes the document and returns the result as a string
# containing the CSS template.
def render
begin
build_tree.to_sass(@options).strip + "\n"
rescue Exception => err
line = @template.string[[email protected]].split("\n").size
err.backtrace.unshift "(css):#{line}"
raise err
end
end
private
def build_tree
root = Tree::Node.new({})
whitespace
rules root
expand_commas root
parent_ref_rules root
remove_parent_refs root
flatten_rules root
fold_commas root
root
end
def rules(root)
while r = rule
root << r
whitespace
end
end
def rule
return unless rule = @template.scan(/[^\{\};]+/)
rule.strip!
directive = rule[0] == ?@
if directive
node = Tree::DirectiveNode.new(rule, {})
return node if @template.scan(/;/)
assert_match /\{/
whitespace
rules(node)
return node
end
assert_match /\{/
node = Tree::RuleNode.new(rule, {})
attributes(node)
return node
end
def attributes(rule)
while @template.scan(/[^:\}\s]+/)
name = @template[0]
whitespace
assert_match /:/
value = ''
while @template.scan(/[^;\s\}]+/)
value << @template[0] << whitespace
end
assert_match /(;|(?=\}))/
rule << Tree::AttrNode.new(name, value, {})
end
assert_match /\}/
end
def whitespace
space = @template.scan(/\s*/) || ''
# If we've hit a comment,
# go past it and look for more whitespace
if @template.scan(/\/\*/)
@template.scan_until(/\*\//)
return space + whitespace
end
return space
end
def assert_match(re)
if [email protected](re)
line = @template.string[[email protected]].count "\n"
# Display basic regexps as plain old strings
expected = re.source == Regexp.escape(re.source) ? "\"#{re.source}\"" : re.inspect
raise Exception.new("Invalid CSS on line #{line}: expected #{expected}")
end
whitespace
end
# Transform
#
# foo, bar, baz
# color: blue
#
# into
#
# foo
# color: blue
# bar
# color: blue
# baz
# color: blue
#
# Yes, this expands the amount of code,
# but it's necessary to get nesting to work properly.
def expand_commas(root)
root.children.map! do |child|
next child unless Tree::RuleNode === child && child.rule.include?(',')
child.rule.split(',').map do |rule|
node = Tree::RuleNode.new(rule, {})
node.children = child.children
node
end
end
root.children.flatten!
end
# Make rules use parent refs so that
#
# foo
# color: green
# foo.bar
# color: blue
#
# becomes
#
# foo
# color: green
# &.bar
# color: blue
#
# This has the side effect of nesting rules,
# so that
#
# foo
# color: green
# foo bar
# color: red
# foo baz
# color: blue
#
# becomes
#
# foo
# color: green
# & bar
# color: red
# & baz
# color: blue
#
def parent_ref_rules(root)
rules = OrderedHash.new
root.children.select { |c| Tree::RuleNode === c }.each do |child|
root.children.delete child
first, rest = child.rule.scan(/^(&?(?: .|[^ ])[^.#: \[]*)([.#: \[].*)?$/).first
rules[first] ||= Tree::RuleNode.new(first, {})
if rest
child.rule = "&" + rest
rules[first] << child
else
rules[first].children += child.children
end
end
rules.values.each { |v| parent_ref_rules(v) }
root.children += rules.values
end
# Remove useless parent refs so that
#
# foo
# & bar
# color: blue
#
# becomes
#
# foo
# bar
# color: blue
#
def remove_parent_refs(root)
root.children.each do |child|
if child.is_a?(Tree::RuleNode)
child.rule.gsub! /^& /, ''
remove_parent_refs child
end
end
end
# Flatten rules so that
#
# foo
# bar
# baz
# color: red
#
# becomes
#
# foo bar baz
# color: red
#
# and
#
# foo
# &.bar
# color: blue
#
# becomes
#
# foo.bar
# color: blue
#
def flatten_rules(root)
root.children.each { |child| flatten_rule(child) if child.is_a?(Tree::RuleNode) }
end
def flatten_rule(rule)
while rule.children.size == 1 && rule.children.first.is_a?(Tree::RuleNode)
child = rule.children.first
if child.rule[0] == ?&
rule.rule = child.rule.gsub /^&/, rule.rule
else
rule.rule = "#{rule.rule} #{child.rule}"
end
rule.children = child.children
end
flatten_rules(rule)
end
# Transform
#
# foo
# bar
# color: blue
# baz
# color: blue
#
# into
#
# foo
# bar, baz
# color: blue
#
def fold_commas(root)
prev_rule = nil
root.children.map! do |child|
next child unless Tree::RuleNode === child
if prev_rule && prev_rule.children == child.children
prev_rule.rule << ", #{child.rule}"
next nil
end
fold_commas(child)
prev_rule = child
child
end
root.children.compact!
end
end
end
| 21.488608 | 102 | 0.513902 |
d5e3b0c8d5f525c51a0268ad78db960b1b0a0b7e | 11,284 | # frozen_string_literal: true
require "business_time"
require "set"
##
# AssignJudgesToHearingDays is used to assign judges to hearing days for a schedule period while filtering out
# blackout days for the judges. Full details of the algorithm can be
# found `HearingSchedule.md` in Appeals-team repo(link: https://github.com/department-of-veterans-affairs/appeals-team
# /blob/master/Project%20Folders/Caseflow%20Projects/Hearings/Hearing%20Schedule/Tech%20Specs/HearingSchedule.md.).
# WIKI: https://github.com/department-of-veterans-affairs/caseflow/wiki/Caseflow-Hearings#build-hearing-schedule
# This class analogous to `GenerateHearingDaysSchedule` which is the algo that creates hearing days prior to this point.
##
class HearingSchedule::AssignJudgesToHearingDays
attr_reader :judges, :video_co_hearing_days
TB_ADDITIONAL_NA_DAYS = 3
CO_ROOM_NUM = "2"
class HearingDaysNotAllocated < StandardError; end
class NoJudgesProvided < StandardError; end
# sets @judges(hash) and @video_co_hearing_days(array) to be later used by algo
def initialize(schedule_period)
@video_co_hearing_days = []
@judges = {}
@schedule_period = schedule_period
@algo_counter = 0
fetch_judge_non_availabilities
fetch_judge_details
fetch_hearing_days_for_schedule_period
end
# Starting point of judge assignment algorithm
def match_hearing_days_to_judges
@assigned_hearing_days = []
@unassigned_hearing_days = @video_co_hearing_days.shuffle # shuffle is a ruby method that just shuffles arr values
evenly_assign_judges_to_hearing_days
assign_remaining_hearing_days
verify_assignments
@assigned_hearing_days.sort_by(&:scheduled_for_as_date)
end
private
def evenly_assign_judges_to_hearing_days
sorted_judges = judges_sorted_by_available_days # priortize judges with most non-available days
judge_count = sorted_judges.length
total_hearing_day_count = @unassigned_hearing_days.length
# maximum number of days a judge should assigned for; can be be 1 or greater
max_days_per_judge = [(total_hearing_day_count.to_f / judge_count).floor, 1].max
sorted_judges.each do |css_id|
days_assigned = 0
# iterate in order of hearing days, and assign judge to them; remove this day from list if assigned
@unassigned_hearing_days.delete_if do |current_hearing_day|
break if days_assigned >= max_days_per_judge # if we've already assigned max days to judges
if day_can_be_assigned?(current_hearing_day, css_id)
@assigned_hearing_days.push(*assign_judge_to_hearing_day(current_hearing_day, css_id))
days_assigned += 1
next true
end
end
end
end
def assign_remaining_hearing_days
@unassigned_hearing_days.delete_if do |current_hearing_day|
assigned = false
judges_sorted_by_assigned_days.each do |css_id|
next unless day_can_be_assigned?(current_hearing_day, css_id)
@assigned_hearing_days.push(*assign_judge_to_hearing_day(current_hearing_day, css_id))
assigned = true
break
end
next true if assigned
end
end
def judges_sorted_by_assigned_days
# Count the number of assigned days per judge
# Example output => {"BVA1"=>2, "BVA2"=>1}
days_by_judge = @assigned_hearing_days.reduce({}) do |acc, hearing_day|
acc[hearing_day.judge.css_id] ||= 0
acc[hearing_day.judge.css_id] += 1
acc
end
# Shuffle the above hash => [["BVA1", 2], ["BVA2", 1]]
# Sort by count of assigned days in ascending order => [["BVA2", 1], ["BVA1", 2]]
# return the css_ids => ["BVA2", "BVA1"]
days_by_judge.to_a.shuffle.sort_by { |e| e[1] }.map { |e| e[0] }
end
def day_can_be_assigned?(current_hearing_day, css_id)
scheduled_for = current_hearing_day.scheduled_for_as_date
judge_id = @judges[css_id][:staff_info].sattyid
# hearing day is a blackout day for judge OR
# hearing is already assigned OR
# judge was assigned for this hearing day OR
# hearing_day is a CO docket and judge was already assigned to one CO hearing day
problems = @judges[css_id][:non_availabilities].include?(scheduled_for) ||
hearing_day_already_assigned?(current_hearing_day.id) ||
judge_already_assigned_on_date?(judge_id, scheduled_for) ||
(current_hearing_day.central_office? && judge_already_assigned_to_co?(judge_id))
!problems
end
def hearing_day_already_assigned?(id)
@assigned_hearing_days.any? { |day| day.id == id }
end
def judge_already_assigned_on_date?(judge_id, date)
@assigned_hearing_days.any? do |day|
day.judge_id.to_s == judge_id.to_s && day.scheduled_for_as_date == date
end
end
def judge_already_assigned_to_co?(judge_id)
@assigned_hearing_days.any? do |day|
day.request_type == HearingDay::REQUEST_TYPES[:central] && day.judge_id.to_s == judge_id.to_s
end
end
def verify_assignments
if @assigned_hearing_days.length != @video_co_hearing_days.length
# if after running the algo 20 times there are unassigned days, fail
if @algo_counter >= 20 # aribitrary algo count
dates = @unassigned_hearing_days.map(&:scheduled_for_as_date)
fail HearingSchedule::Errors::CannotAssignJudges.new(
"Hearing days on these dates couldn't be assigned #{dates}.",
dates: dates
)
end
@algo_counter += 1
match_hearing_days_to_judges # try to re-run the algorithm 20 times
end
end
# It's expected that the judge validations have been run before
# running the algorithm. This assumes that the judge information
# already exists in VACOLS and in Caseflow database.
#
# Example output =>
# {
# "BVA1" => {
# :non_availabilities => #<Set: {Sat, 14 Apr 2018, Sun, 15 Apr 2018},
# :staff_info => #<VACOLS::Staff:0x00007fe7ef1d3c68 stafkey: "1" ...
# },
# ...
# }
def fetch_judge_details
fail NoJudgesProvided if @judges.keys.empty?
VACOLS::Staff.load_users_by_css_ids(@judges.keys).map do |judge|
@judges[judge.sdomainid][:staff_info] = judge
end
end
# Sort judges in descending order of non-available days
# Example output =>
# give @judges
# {
# "BVA2"=> {:non_availabilities=>#<Set: {}>,
# "BVA1"=> {:non_availabilities=>#<Set: {Sat, 14 Apr 2018, Sun, 15 Apr 2018}>...},
# }
# =>
# {
# "BVA1"=> {:non_availabilities=>#<Set: {Sat, 14 Apr 2018, Sun, 15 Apr 2018}>...},
# "BVA2"=> {:non_availabilities=>#<Set: {}>,
# }
def judges_sorted_by_available_days
@judges.sort_by { |_k, v| v[:non_availabilities].count }.to_h.keys.reverse
end
# fetch all CO hearing days for this date; can there be multple CO hearings for a date?
def co_hearing_days_by_date(date)
@video_co_hearing_days
.select(&:central_office?)
.select { |day| day.scheduled_for_as_date == date }
end
def assign_judge_to_hearing_day(day, css_id)
hearing_days = day.central_office? ? co_hearing_days_by_date(day.scheduled_for_as_date) : [day]
hearing_days.map do |hearing_day|
# Doing `.new` here instead of `.create` (or similar) to mimic
# old behavior, and ensure backwards compatibility.
hearing_day.judge = User.new(
id: @judges[css_id][:staff_info].sattyid,
full_name: get_judge_name(css_id),
css_id: css_id
)
hearing_day
end
end
def get_judge_name(css_id)
staff_info = @judges[css_id][:staff_info]
"#{staff_info.snamel}, #{staff_info.snamemi} #{staff_info.snamef}"
end
# Example output =>
# { "BVA1"=>{:non_availabilities => #<Set: {Sat, 14 Apr 2018, Sun, 15 Apr 2018}> }... }
def fetch_judge_non_availabilities
non_availabilities = @schedule_period.non_availabilities
non_availabilities.each do |non_availability|
next unless non_availability.instance_of? JudgeNonAvailability
css_id = non_availability.object_identifier
@judges[css_id] ||= {}
@judges[css_id][:non_availabilities] ||= Set.new
@judges[css_id][:non_availabilities] << non_availability.date if non_availability.date
end
end
def fetch_hearing_days_for_schedule_period
# fetch all hearing days within the schedule period (video + CO)
hearing_days = HearingDayRange.new(@schedule_period.start_date, @schedule_period.end_date).load_days
# filter out days that have judges assigned
@video_co_hearing_days = filter_co_hearings(hearing_days).freeze
# raises an exception if hearing days have not already been allocated
fail HearingDaysNotAllocated if @video_co_hearing_days.empty?
# get list of upcoming travel hearing days for the schedule period
travel_board_hearing_days = TravelBoardScheduleRepository.load_tb_days_for_range(@schedule_period.start_date,
@schedule_period.end_date)
# add non-availibility days for judges who have travel board hearing days
filter_travel_board_hearing_days(travel_board_hearing_days)
end
def valid_co_day?(day)
day.central_office? && day.room == CO_ROOM_NUM
end
# from the video + co hearing days, select the days that don't have judges assigned
def filter_co_hearings(video_co_hearing_days)
video_co_hearing_days.select do |hearing_day|
(valid_co_day?(hearing_day) || !hearing_day.regional_office.nil?) && !hearing_day_already_assigned(hearing_day)
end
end
# if hearing day was assigned to a judge then add this day to the non_availabilities for the judge
def hearing_day_already_assigned(hearing_day)
assigned = !hearing_day.judge_id.nil?
if assigned
@judges.each do |css_id, judge|
if judge[:staff_info].sattyid == hearing_day.judge_id.to_s
@judges[css_id][:non_availabilities] << hearing_day.scheduled_for_as_date
end
end
end
assigned
end
# Adds 3 days after and 3 days prior non-available days for each Judge assigned to a travel board.
def filter_travel_board_hearing_days(tb_hearing_days)
tb_hearing_days_formatted = TravelBoardScheduleMapper.convert_from_vacols_format(tb_hearing_days)
tb_hearing_days_formatted.each do |tb_record|
# assign non-availability days to all the travel board judges
tb_judge_ids = [tb_record[:tbmem_1], tb_record[:tbmem_2], tb_record[:tbmem_3], tb_record[:tbmem_4]].compact
# find judges that have travel board hearings
judges = @judges.select { |_key, judge| tb_judge_ids.include?(judge[:staff_info].sattyid) }
# travel board hearing days have a start and end date
# for each judge that has a a travel board hearing, give 3 days padding to before start date and after end date
# and add those days to the non_availabilities list
judges.each_value do |judge_staff_info|
css_id = judge_staff_info[:staff_info].sdomainid
@judges[css_id][:non_availabilities] ||= Set.new
@judges[css_id][:non_availabilities] +=
(
TB_ADDITIONAL_NA_DAYS.business_days
.before(tb_record[:start_date])..TB_ADDITIONAL_NA_DAYS.business_days
.after(tb_record[:end_date])
).reject(&:on_weekend?)
end
end
end
end
| 38.250847 | 120 | 0.712247 |
e9a9871125a652cb64b566fa138fc37a6cc80f4d | 952 | # encoding: utf-8
require 'webp-ffi'
module CarrierWave
module WebP
module Converter
def convert_to_webp(options = {})
manipulate! do |img|
img = yield(img) if block_given?
webp_path = "#{path}.webp"
old_filename = filename
# Конвертируем картинку в RGB:
`convert -colorspace RGB #{path} #{path}`
::WebP.encode(path, webp_path, options)
# XXX: Hacks ahead!
# I can't find any other way to store an alomost exact copy
# of file for any particular version
instance_variable_set('@filename', "#{filename}.webp")
storage.store! SanitizedFile.new({
tempfile: webp_path, filename: webp_path,
content_type: 'image/webp'
})
FileUtils.rm(webp_path) rescue nil
instance_variable_set('@filename', old_filename)
img
end
end
end
end
end
| 25.72973 | 69 | 0.577731 |
ac7632168d2ef9a5ff19bf1483279d6b13f33493 | 1,273 | # encoding: utf-8
# This module contains the behaviour for getting the various states through which a
# resource can transition.
module Tripod::State
extend ActiveSupport::Concern
attr_writer :destroyed, :new_record
# Returns true if the +Resource+ has not been persisted to the database,
# false if it has. This is determined by the variable @new_record
# and NOT if the object has an id.
#
# @example Is the resource new?
# person.new_record?
#
# @return [ true, false ] True if new, false if not.
def new_record?
@new_record ||= false
end
# Checks if the resource has been saved to the database. Returns false
# if the resource has been destroyed.
#
# @example Is the resource persisted?
# person.persisted?
#
# @return [ true, false ] True if persisted, false if not.
def persisted?
!new_record? && !destroyed?
end
# Returns true if the +Resource+ has been succesfully destroyed, and false
# if it hasn't. This is determined by the variable @destroyed and NOT
# by checking the database.
#
# @example Is the resource destroyed?
# person.destroyed?
#
# @return [ true, false ] True if destroyed, false if not.
def destroyed?
@destroyed ||= false
end
alias :deleted? :destroyed?
end | 27.085106 | 83 | 0.693637 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.