hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
7aa864fec1179793bd364f166c94ab5f47edba6b
| 1,915 |
require 'test_helper'
class UsersEditTest < ActionDispatch::IntegrationTest
def setup
@user = users(:example)
end
test "unsuccessful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
patch user_path(@user), params: { user: { name: "",
email: "foo@invalid",
password: "foo",
password_confirmation: "bar" } }
assert_template 'users/edit'
assert_select 'div.alert', text: "The form contains 4 errors."
end
test "successful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
name = "Foo Bar"
email = "[email protected]"
patch user_path(@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
test "successful edit with friendly forwarding" do
get edit_user_path(@user)
assert_equal session[:forwarding_url], edit_user_url(@user)
log_in_as(@user)
assert_redirected_to edit_user_url(@user)
name = "Foo Bar"
email = "[email protected]"
patch user_path(@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
assert_nil session[:forwarding_url]
end
end
| 31.393443 | 80 | 0.530026 |
62f9989f81383f76ec3575dfbac42181be0f903b
| 1,842 |
class Renderer
construct_with :viewport
def initialize
clear_drawables
end
def register_drawable(drawable)
layer = drawable.layer
parallax = drawable.parallax
unless @drawables[parallax]
@drawables[parallax] = {}
@parallax_layers = @drawables.keys.sort.reverse
end
unless @drawables[parallax][layer]
@drawables[parallax][layer] = []
@layer_orders[parallax] = @drawables[parallax].keys.sort
end
@drawables[parallax][layer] << drawable
end
def unregister_drawable(drawable)
@drawables[drawable.parallax][drawable.layer].delete drawable
end
def clear_drawables
@drawables = {}
@layer_orders = {}
@parallax_layers = []
end
def draw(target)
center_x = viewport.width / 2
center_y = viewport.height / 2
target.rotate(-viewport.rotation, center_x, center_y) do
z = 0
@parallax_layers.each do |parallax_layer|
drawables_on_parallax_layer = @drawables[parallax_layer]
if drawables_on_parallax_layer
@layer_orders[parallax_layer].each do |layer|
trans_x = viewport.x_offset parallax_layer
trans_y = viewport.y_offset parallax_layer
z += 1
drawables_on_parallax_layer[layer].each do |drawable|
drawable.draw target, trans_x, trans_y, z
end
end
end
end
end
end
# move all actors from one layer to another
# note, this will remove all actors in that layer!
def move_layer(from_parallax, from_layer, to_parallax, to_layer)
drawable_list = @drawables[from_parallax][from_layer].dup
drawable_list.each do |drawable|
unregister_drawable drawable
drawable.parallax = to_parallax
drawable.layer = to_layer
register_drawable drawable
end
end
end
| 25.232877 | 66 | 0.668838 |
e299a94d8ef21cb6ea37234a9a8fdbd9ae536eac
| 290 |
# frozen_string_literal: true
class TimezoneValidator < ApplicationEachValidator
class Validation < Validation
TIMEZONE_RE = /\A-?\d\d:\d\d:00\z/.freeze
def perform
error! :blank if str_value.blank?
error! :timezone unless TIMEZONE_RE.match? value
end
end
end
| 22.307692 | 54 | 0.710345 |
877818c9bc30c3001cafd0db8151c7a9eab712e6
| 1,228 |
require "spec_helper"
RSpec.describe SpreadsheetStreamReader::Sheet do
before(:all) do
@file_path = 'spec/fixtures/xls-files/test-file.xls'
@batch_size = 10
@document = SpreadsheetStreamReader::Reader.new(@file_path, @batch_size)
@sheet = @document.get_sheet(0)
end
describe '.new' do
it 'sets the provided batch size' do
expect(@sheet.batch_size).to eq(@batch_size)
end
it 'checks the default value for offset, counter and data' do
expect(@sheet.offset).to eq(1)
expect(@sheet.counter).to eq(0)
expect(@sheet.data).to eq(Array.new)
end
it 'w_sheet should have sheet object' do
expect(@sheet.w_sheet).to be_instance_of(Spreadsheet::Excel::Worksheet)
end
end
describe '.stream_rows_in_batch' do
context 'providing block' do
it 'iterates through all the rows' do
expect {
@sheet.stream_rows_in_batch do |row|
row
end
}.to_not raise_exception
end
end
context 'without providing block' do
it 'returns the data' do
expect(@sheet.stream_rows_in_batch).not_to be_nil
expect(@sheet.stream_rows_in_batch).to be_instance_of(Array)
end
end
end
end
| 27.288889 | 77 | 0.662866 |
accdaed79703c5a1f817780382a4d4cb90cafc4b
| 711 |
cask 'alt-tab' do
version '3.24.1'
sha256 'f61c392e29fcf12cbf4596d29fc0aab140195c5d880b772339f5e2470935a73b'
url "https://github.com/lwouis/alt-tab-macos/releases/download/v#{version}/AltTab-#{version}.zip"
appcast 'https://github.com/lwouis/alt-tab-macos/releases.atom'
name 'alt-tab'
homepage 'https://github.com/lwouis/alt-tab-macos'
auto_updates true
depends_on macos: '>= :sierra'
app 'AltTab.app'
uninstall quit: 'com.lwouis.alt-tab-macos'
zap trash: [
'~/Library/Caches/com.lwouis.alt-tab-macos',
'~/Library/Cookies/com.lwouis.alt-tab-macos.binarycookies',
'~/Library/Preferences/com.lwouis.alt-tab-macos.plist',
]
end
| 30.913043 | 99 | 0.677918 |
ed92c5d23b4ecf0cce085b84b271322a1b8e42e3
| 704 |
cask 'font-monoid-xtrasmall-dollar-0-1-l' do
version :latest
sha256 :no_check
# github.com/larsenwork/monoid was verified as official when first introduced to the cask
url 'https://github.com/larsenwork/monoid/blob/release/Monoid-XtraSmall-Dollar-0-1-l.zip?raw=true'
name 'Monoid-XtraSmall-Dollar-0-1-l'
homepage 'http://larsenwork.com/monoid/'
font 'Monoid-Bold-XtraSmall-Dollar-0-1-l.ttf'
font 'Monoid-Italic-XtraSmall-Dollar-0-1-l.ttf'
font 'Monoid-Regular-XtraSmall-Dollar-0-1-l.ttf'
font 'Monoid-Retina-XtraSmall-Dollar-0-1-l.ttf'
caveats <<~EOS
#{token} is dual licensed with MIT and OFL licenses.
https://github.com/larsenwork/monoid/tree/master#license
EOS
end
| 35.2 | 100 | 0.741477 |
f7cc4657514b7e9c01b7c19e3a7eade7e9ba8606
| 168 |
#
# Cookbook Name:: tools_nexus
# Recipe:: splunk
#
# Copyright 2014, Intuit Inc.
#
# All rights reserved - Do Not Redistribute
#
include_recipe 'platform_chef-splunk'
| 16.8 | 43 | 0.738095 |
ff9b2d328c94e9a3dd25943b83deb6f773cf0cc0
| 2,014 |
#--
# Ruby Whois
#
# An intelligent pure Ruby WHOIS client and parser.
#
# Copyright (c) 2009-2015 Simone Carletti <[email protected]>
#++
require 'whois/record/parser/base_icann_compliant'
module Whois
class Record
class Parser
# Parser for the ZA Central Registry servers.
#
class ZaCentralRegistry < BaseIcannCompliant
property_supported :domain_id do
node('Domain ID')
end
property_supported :expires_on do
node('Registry Expiry Date') do |value|
parse_time(value)
end
end
property_supported :registrar do
return unless node("Sponsoring Registrar")
Record::Registrar.new(
id: node('Sponsoring Registrar IANA ID'),
name: node('Sponsoring Registrar'),
organization: node('Sponsoring Registrar'),
)
end
property_supported :available? do
!node("Creation Date")
end
private
def build_contact(element, type)
node("#{element} Name") do
Record::Contact.new(
type: type,
id: node("#{element} ID").presence,
name: value_for_property(element, 'Name'),
organization: value_for_property(element, 'Organization'),
address: value_for_property(element, 'Street'),
city: value_for_property(element, 'City'),
zip: value_for_property(element, 'Postal Code'),
state: value_for_property(element, 'State/Province'),
country_code: value_for_property(element, 'Country'),
phone: value_for_phone_property(element, 'Phone'),
fax: value_for_phone_property(element, 'Fax'),
email: value_for_property(element, 'Email')
)
end
end
end
end
end
end
| 28.366197 | 76 | 0.549652 |
e906c4b4d1683e45d5ad8e048b5893accbe9f799
| 271 |
class AddSourceIdToUser < ActiveRecord::Migration
def change
add_reference :users, :source, index: true, foreign_key: true
User.find_each do |u|
s = Source.where(user_id: u.id).first
u.update_columns(source_id: s.id) unless s.nil?
end
end
end
| 27.1 | 65 | 0.697417 |
bbca9cbb2ab81214e7bd0080ca544415177ffa40
| 430 |
# This migration comes from cmsify (originally 20160221044856)
class CreateCmsifyTexts < ActiveRecord::Migration
def change
create_table :cmsify_texts do |t|
t.text :content
t.integer :owner_id
t.string :owner_type
t.string :slug
t.timestamps null: false
end
add_index :cmsify_texts, :slug
add_index :cmsify_texts, :owner_id
add_index :cmsify_texts, :owner_type
end
end
| 23.888889 | 62 | 0.7 |
d59fbd75ce8037e7ee0d6737d157cd252bc92312
| 2,113 |
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module ZomekiCMS
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.time_zone = 'Tokyo'
config.active_record.default_timezone = :local
config.active_record.time_zone_aware_attributes = false
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.load_path += Dir[Rails.root.join('config', 'modules', '**', 'locales', '*.yml').to_s]
config.i18n.default_locale = :ja
# Custom directories with classes and modules you want to be autoloadable.
#config.autoload_paths += %W(#{config.root}/lib)
config.eager_load_paths += %W(#{config.root}/lib)
config.active_job.queue_adapter = :delayed_job
config.generators do |g|
g.test_framework :rspec,
fixtures: true,
view_specs: false,
helper_specs: false,
routing_specs: false,
controller_specs: true,
request_specs: false
g.fixture_replacement :factory_girl, dir: 'spec/factories'
end
config.action_view.sanitized_allowed_tags = ActionView::Base.sanitized_allowed_tags.to_a | %w(table caption tr th td iframe)
config.action_view.sanitized_allowed_attributes = ActionView::Base.sanitized_allowed_attributes.to_a | %w(style class href src alt title colspan rowspan target id)
end
ADMIN_URL_PREFIX = '_system'
end
| 41.431373 | 167 | 0.719356 |
390ee2f7696717501e5a224b522de3fd0fa77f4a
| 1,340 |
module Rack
class Jekyll
class FileHandler
attr_reader :root, :files
# Initializes a FileHandler for a given root directory
# (for testing only: use a given array of filenames, +files+).
def initialize(root, files = nil)
@root = ::File.expand_path(root)
@files = files || get_file_list
end
def empty?
@files.empty?
end
def update
@files = get_file_list
end
# Returns the full file system path of the file corresponding to
# the given URL path, or +nil+ if no corresponding file exists.
def get_filename(path)
fullpath = ::File.join(@root, path)
if fullpath.end_with?("/")
normalized = fullpath + "index.html"
elsif [email protected]?(fullpath)
normalized = fullpath + "/index.html"
else
normalized = fullpath
end
if @files.include?(normalized)
filename = normalized
else
filename = nil
end
filename
end
private
# Retrieves and returns a list of all files in the root directory
# (excluding directory names).
def get_file_list
files = ::Dir[@root + "/**/*"]
files.delete_if {|file| ::FileTest.directory?(file) }
files
end
end
end
end
| 23.508772 | 71 | 0.576119 |
1c37c64478690b6eeff97dc7dd74336a5d0605ed
| 6,008 |
require_relative '../../spec_helper'
require_relative 'fixtures/common'
describe "Proc.new with an associated block" do
it "returns a proc that represents the block" do
Proc.new { }.call.should == nil
Proc.new { "hello" }.call.should == "hello"
end
describe "called on a subclass of Proc" do
before :each do
@subclass = Class.new(Proc) do
attr_reader :ok
def initialize
@ok = true
super
end
end
end
it "returns an instance of the subclass" do
proc = @subclass.new {"hello"}
proc.class.should == @subclass
proc.call.should == "hello"
proc.ok.should == true
end
# JRUBY-5026
describe "using a reified block parameter" do
it "returns an instance of the subclass" do
cls = Class.new do
def self.subclass=(subclass)
@subclass = subclass
end
def self.foo(&block)
@subclass.new(&block)
end
end
cls.subclass = @subclass
proc = cls.foo {"hello"}
proc.class.should == @subclass
proc.call.should == "hello"
proc.ok.should == true
end
end
end
# JRUBY-5261; Proc sets up the block during .new, not in #initialize
describe "called on a subclass of Proc that does not 'super' in 'initialize'" do
before :each do
@subclass = Class.new(Proc) do
attr_reader :ok
def initialize
@ok = true
end
end
end
it "still constructs a functional proc" do
proc = @subclass.new {'ok'}
proc.call.should == 'ok'
proc.ok.should == true
end
end
it "raises a LocalJumpError when context of the block no longer exists" do
def some_method
Proc.new { return }
end
res = some_method()
lambda { res.call }.should raise_error(LocalJumpError)
end
it "returns from within enclosing method when 'return' is used in the block" do
# we essentially verify that the created instance behaves like proc,
# not like lambda.
def some_method
Proc.new { return :proc_return_value }.call
:method_return_value
end
some_method.should == :proc_return_value
end
it "returns a subclass of Proc" do
obj = ProcSpecs::MyProc.new { }
obj.should be_kind_of(ProcSpecs::MyProc)
end
it "calls initialize on the Proc object" do
obj = ProcSpecs::MyProc2.new(:a, 2) { }
obj.first.should == :a
obj.second.should == 2
end
ruby_version_is ""..."2.7" do
it "returns a new Proc instance from the block passed to the containing method" do
prc = ProcSpecs.new_proc_in_method { "hello" }
prc.should be_an_instance_of(Proc)
prc.call.should == "hello"
end
it "returns a new Proc instance from the block passed to the containing method" do
prc = ProcSpecs.new_proc_subclass_in_method { "hello" }
prc.should be_an_instance_of(ProcSpecs::ProcSubclass)
prc.call.should == "hello"
end
end
end
describe "Proc.new with a block argument" do
it "returns the passed proc created from a block" do
passed_prc = Proc.new { "hello".size }
prc = Proc.new(&passed_prc)
prc.should equal(passed_prc)
prc.call.should == 5
end
it "returns the passed proc created from a method" do
method = "hello".method(:size)
passed_prc = Proc.new(&method)
prc = Proc.new(&passed_prc)
prc.should equal(passed_prc)
prc.call.should == 5
end
it "returns the passed proc created from a symbol" do
passed_prc = Proc.new(&:size)
prc = Proc.new(&passed_prc)
prc.should equal(passed_prc)
prc.call("hello").should == 5
end
end
describe "Proc.new with a block argument called indirectly from a subclass" do
it "returns the passed proc created from a block" do
passed_prc = ProcSpecs::MyProc.new { "hello".size }
passed_prc.class.should == ProcSpecs::MyProc
prc = ProcSpecs::MyProc.new(&passed_prc)
prc.should equal(passed_prc)
prc.call.should == 5
end
it "returns the passed proc created from a method" do
method = "hello".method(:size)
passed_prc = ProcSpecs::MyProc.new(&method)
passed_prc.class.should == ProcSpecs::MyProc
prc = ProcSpecs::MyProc.new(&passed_prc)
prc.should equal(passed_prc)
prc.call.should == 5
end
it "returns the passed proc created from a symbol" do
passed_prc = ProcSpecs::MyProc.new(&:size)
passed_prc.class.should == ProcSpecs::MyProc
prc = ProcSpecs::MyProc.new(&passed_prc)
prc.should equal(passed_prc)
prc.call("hello").should == 5
end
end
describe "Proc.new without a block" do
it "raises an ArgumentError" do
lambda { Proc.new }.should raise_error(ArgumentError)
end
it "raises an ArgumentError if invoked from within a method with no block" do
lambda { ProcSpecs.new_proc_in_method }.should raise_error(ArgumentError)
end
it "raises an ArgumentError if invoked on a subclass from within a method with no block" do
lambda { ProcSpecs.new_proc_subclass_in_method }.should raise_error(ArgumentError)
end
ruby_version_is ""..."2.7" do
it "uses the implicit block from an enclosing method" do
def some_method
Proc.new
end
prc = some_method { "hello" }
prc.call.should == "hello"
end
end
ruby_version_is "2.7" do
it "can be created if invoked from within a method with a block" do
lambda { ProcSpecs.new_proc_in_method { "hello" } }.should complain(/tried to create Proc object without a block/)
end
it "can be created if invoked on a subclass from within a method with a block" do
lambda { ProcSpecs.new_proc_subclass_in_method { "hello" } }.should complain(/tried to create Proc object without a block/)
end
it "can be create when called with no block" do
def some_method
Proc.new
end
-> {
some_method { "hello" }
}.should complain(/tried to create Proc object without a block/)
end
end
end
| 27.814815 | 129 | 0.65496 |
39bf81cedbb1fd74b4951a7c1cdb2da3ab0133c3
| 779 |
Sequel.migration do
change do
create_table(:transfers) do
uuid :uuid, default: Sequel.function(:uuid_generate_v4), primary_key: true
timestamptz :created_at, default: Sequel.function(:now), null: false
timestamptz :updated_at
timestamptz :canceled_at
timestamptz :finished_at
boolean :succeeded
uuid :user_id, null: false
text :group, null: false
text :logplex_token
text :from_url, null: false
text :to_url, null: false
json :options, null: false, default: '{}'
bigint :source_bytes
bigint :processed_bytes, null: false, default: 0
timestamptz :deleted_at
timestamptz :purged_at
end
end
end
| 33.869565 | 88 | 0.604621 |
bb6c4103e6852185b33ce46b63ea545949584753
| 2,025 |
class Sproxy < Formula
desc "HTTP proxy server collecting URLs in a 'siege-friendly' manner"
homepage "https://www.joedog.org/sproxy-home/"
url "http://download.joedog.org/sproxy/sproxy-1.02.tar.gz"
sha256 "29b84ba66112382c948dc8c498a441e5e6d07d2cd5ed3077e388da3525526b72"
bottle do
cellar :any_skip_relocation
rebuild 2
# sha256 "2d689087925622e4f7e2c2572c2339c62a6c2b891bce7093bcd664f1a15c28d9" => :mojave
sha256 "326b01fa9a1370c54929ae4c11d1b67b2238875eca8188365486b9c2a374264f" => :high_sierra
sha256 "8d57317644b76b465adc5caf984f1e3cf57f9486f642705eee66128adbcf3589" => :sierra
sha256 "4ed786b0b05ca3c88d5904e3119d84725a9f9bedf5d952c055f22a81661a825c" => :el_capitan
sha256 "19da9a5b680a860e721ec60763dd48e9a5213505ee643703abcdc66707e8ce51" => :yosemite
sha256 "96b9cdebf5a11907998ba33e2b568fd5a77d46261a6faaa9c33a5d8eeca9a27f" => :mavericks
end
# Only needed due to the change to "Makefile.am"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
def install
# Prevents "ERROR: Can't create '/usr/local/share/man/man3'"; also fixes an
# audit violation triggered if the man page is installed in #{prefix}/man.
# After making the change below and running autoreconf, the default ends up
# being the same as #{man}, so there's no need for us to pass --mandir to
# configure, though, as a result of this change, that flag would be honored.
# Reported 10th May 2016 to https://www.joedog.org/support/
inreplace "doc/Makefile.am", "$(prefix)/man", "$(mandir)"
inreplace "lib/Makefile.am", "Makefile.PL", "Makefile.PL PREFIX=$(prefix)"
# Only needed due to the change to "Makefile.am"
system "autoreconf", "-fiv"
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
assert_match "SPROXY v#{version}-", shell_output("#{bin}/sproxy -V")
end
end
| 44.021739 | 93 | 0.728395 |
5dbd346aa05f04c4ab381bc6fa902f1fd9937b78
| 10,574 |
module CloudRCS
# Hunk is one type of primitive patch. It represents a deletion or
# an insertion, or a combination of both, in a text file.
#
# A Hunk is constructed using the path of a file, the first line
# modifications to the file, and a set of diffs, each of which
# represents a line added to or deleted from the file.
class Hunk < PrimitivePatch
serialize :contents, Array
validates_presence_of :path, :contents, :position
validates_numericality_of :position, :only_integer => true, :greater_than_or_equal_to => 1
def validate
# Make sure diffs only contain the actions '+' and '-'
if contents.respond_to? :each
contents.each do |d|
unless ['+','-'].include? d.action
errors.add(:contents, "contains an unknown action.")
end
end
end
end
# def after_initialize
# verify_path_prefix
# starting_line ||= contents.first.position
# end
def to_s
"hunk #{self.class.escape_path(path)} #{position}\n" + contents.collect do |d|
"#{d.action}#{d.element}"
end.join("\n")
end
# The inverse of a Hunk simply swaps adds and deletes.
def inverse
new_removals = added_lines.collect do |d|
Diff::LCS::Change.new('-', d.position, d.element)
end
new_adds = removed_lines.collect do |d|
Diff::LCS::Change.new('+', d.position, d.element)
end
Hunk.new(:path => path, :position => position, :contents => (new_removals + new_adds))
end
# Given another patch, generates two new patches that have the
# same effect as the original two, but with the order of the
# analogous patches reversed. The message receiver is the first
# patch, and the argument is the second; so after commuting the
# analog of this patch will be second.
def commute(patch)
if patch.is_a? Hunk and patch.path == self.path
# self is applied first and precedes patch in the file
if self.position + self.lengthnew < patch.position
patch1 = Hunk.new(:path => patch.path,
:position => (patch.position - self.lengthnew + self.lengthold),
:contents => patch.contents)
patch2 = Hunk.new(:path => self.path, :position => self.position, :contents => self.contents)
# self is applied first, but is preceded by patch in the file
elsif patch.position + patch.lengthold < self.position
patch1 = Hunk.new(:path => patch.path, :position => patch.position, :contents => patch.contents)
patch2 = Hunk.new(:path => self.path,
:position => (self.position + patch.lengthnew - patch.lengthold),
:contents => self.contents)
# patch precedes self in file, but bumps up against it
elsif patch.position + patch.lengthnew == self.position and
self.lengthold != 0 and patch.lengthold != 0 and
self.lengthnew != 0 and patch.lengthnew != 0
patch1 = Hunk.new(:path => patch.path, :position => patch.position, :contents => patch.contents)
patch2 = Hunk.new(:path => self.path,
:position => (self.position - patch.lengthnew + patch.lengthold),
:contents => self.contents)
# self precedes patch in file, but bumps up against it
elsif self.position + self.lengthold == patch.position and
self.lengthold != 0 and patch.lengthold != 0 and
self.lengthnew != 0 and patch.lengthnew != 0
patch1 = Hunk.new(:path => patch.path, :position => patch.position, :contents => patch.contents)
patch2 = Hunk.new(:path => self.path,
:position => (self.position + patch.lengthnew - patch.lengthold),
:contents => self.contents)
# Patches overlap. This is a conflict scenario
else
raise CommuteException.new(true, "Conflict: hunk patches overlap.")
end
elsif patch.is_a? Rmfile and patch.path == self.path
raise CommuteException.new(true, "Conflict: cannot modify a file after it is removed.")
elsif patch.is_a? Move and self.path == patch.original_path
patch1 = patch.clone
patch2 = self.clone
patch2.path = patch.new_path
# Commutation is trivial
else
patch1, patch2 = patch, self
end
return patch1, patch2
end
def apply_to(file)
return file unless file.path == path
# Passing a negative number as the second argument of split
# preserves trailing newline characters at the end of the file
# when the lines are re-joined.
lines = file.contents.split("\n",-1)
# First, remove lines
removed_lines.each do |d|
if lines[position-1] == d.element.sub(/(\s+)\$\s*$/) { $1 }
lines.delete_at(position-1)
else
raise ApplyException.new(true), "Line in hunk marked for removal does not match contents of existing line in file\nfile contents: #{position} -'#{lines[position-1]}'\nline to be removed: #{d.position} -'#{d.element}'"
end
end
# Next, add lines
added_lines.each_with_index do |d,i|
lines.insert(position - 1 + i, d.element.sub(/(\s+)\$\s*$/) { $1 })
end
file.contents = lines.join("\n")
return file
end
# Returns the number of lines added by the hunk patch
def lengthnew
added_lines.length
end
# Returns the number of lines removed by the hunk patch
def lengthold
removed_lines.length
end
def removed_lines
contents.find_all { |d| d.action == '-' } # .sort { |a,b| a.position <=> b.position }
end
def added_lines
contents.find_all { |d| d.action == '+' } # .sort { |a,b| a.position <=> b.position }
end
class << self
# Given a list of files, determine whether this patch type
# describes the changes between the files and generate patches
# accordingly.
#
# In this case we use the Diff::LCS algorithm to generate Change
# objects representing each changed line between two files. The
# changesets are automatically nested into a two dimensional
# Array, where each row represents a changed portion of the file
# that is separated from the other rows by an unchanged portion
# of the file. So we split that dimension of the Array into
# separate Hunk patches and return the resulting list.
def generate(orig_file, changed_file)
return if orig_file.nil? and changed_file.nil?
return if (orig_file and orig_file.contents.is_binary_data?) or
(changed_file and changed_file.contents.is_binary_data?)
# If the original or the changed file is nil, the hunk should
# contain the entirety of the other file. This is so that a
# record is kept of a file that is deleted; and so that the
# contents of a file is added to it after it is created.
orig_lines = orig_file ? orig_file.contents.split("\n",-1) : []
changed_lines = changed_file ? changed_file.contents.split("\n",-1) : []
# Insert end-of-line tokens to preserve white space at the end
# of lines. This is part of the darcs patch format.
orig_lines.each { |l| l += "$" if l =~ /\s+$/ }
changed_lines.each { |l| l += "$" if l =~ /\s+$/ }
file_path = orig_file ? orig_file.path : changed_file.path
diffs = Diff::LCS.diff(orig_lines, changed_lines)
hunks = []
offset = 0
diffs.each do |d|
# Diff::LCS assumes that removed lines from all hunks will be
# removed from file before new lines are added. Unfortunately,
# in this implementation we remove and add lines from each
# hunk in order. So the position values for removed lines will
# be off in all but the first hunk. So we need to adjust those
# position values before we create the hunk patch.
unless hunks.empty?
offset += hunks.last.lengthnew - hunks.last.lengthold
end
d.collect! do |l|
if l.action == '-'
Diff::LCS::Change.new(l.action, l.position + offset, l.element)
else
l
end
end
# The darcs patch format counts lines starting from 1; whereas
# Diff::LCS counts lines starting from 0. So we add 1 to the
# position of the first changed line to get the
# darcs-compatible starting line number for the Hunk patch.
position = d.first.position + 1
hunks << Hunk.new(:path => file_path, :position => position, :contents => d)
end
return hunks
end
# Parse hunk info from a file and convert into a Hunk object.
def parse(contents)
unless contents =~ /^hunk\s+(\S+)\s+(\d+)\s+(.*)$/m
raise ParseException.new(true), "Failed to parse hunk patch: \"#{contents}\""
end
file_path = unescape_path($1)
starting_position = $2.to_i
contents = $3
last_action = nil
line_offset = 0
diffs = []
add_line_offset = 0
del_line_offset = 0
contents.split("\n").each do |line|
# These regular expressions ensure that each line ends with a
# non-whitespace character, or is empty. A dollar sign is
# added during patch generation to the end of lines that end
# in whitespace; so parsing this way will not cut off
# whitespace that is supposed to be added to any patched file.
#
# If the line is empty, $1 will be nil. So it is important to
# pass $1.to_s instead of just $1 to change nil to "".
if line =~ /^\+(.*[\S\$])?\s*$/
diffs << Diff::LCS::Change.new('+', starting_position + add_line_offset, $1.to_s)
add_line_offset += 1
elsif line =~ /^-(.*[\S\$])?\s*$/
diffs << Diff::LCS::Change.new('-', starting_position + del_line_offset, $1.to_s)
del_line_offset += 1
else
raise "Failed to parse a line in hunk: \"#{line}\""
end
end
return Hunk.new(:path => file_path, :position => starting_position, :contents => diffs)
end
end
end
PATCH_TYPES << Hunk
end
| 40.05303 | 227 | 0.5993 |
116686aa2620a169532f3bd72f14f94f11e6b850
| 214 |
#
# This file is auto-generated, do not edit
#
require 'spec_helper'
require_relative "list_entities_with_properties"
describe RecombeeApiClient::ListItems do
it_behaves_like "list entities with properties"
end
| 21.4 | 49 | 0.817757 |
1c28e5874f9efe42924f6075fab69c20d24b3d6e
| 734 |
Pod::Spec.new do |s|
s.name = 'SSPullToRefresh'
s.version = '1.0.0'
s.summary = 'Simple and highly customizable pull to refresh view.'
s.homepage = 'https://github.com/soffes/sspulltorefresh'
s.author = { 'Sam Soffes' => '[email protected]' }
s.source = { :git => 'https://github.com/soffes/sspulltorefresh.git', :tag => '1.0.0' }
s.license = {
:type => 'MIT',
:file => 'LICENSE'
}
s.source_files = '*.{h,m}'
s.description = 'SSPullToRefresh is a simple andhighly customizable pull to refresh view. It lets you implement a content view separate so you don\'t have to hack up the pulling logic everything you want to customize the appearance.'
s.platform = :ios
s.requires_arc = true
end
| 43.176471 | 235 | 0.659401 |
ac52ec359974ce6cfa9de7ae68a3a6a17bd21c69
| 1,260 |
require "spec_helper"
module Omnibus
describe Compressor do
describe ".for_current_system" do
context "on macOS" do
before { stub_ohai(platform: "mac_os_x") }
context "when :dmg is activated" do
it "prefers dmg" do
expect(described_class.for_current_system(%i{tgz dmg})).to eq(Compressor::DMG)
end
end
context "when :dmg is not activated" do
it "prefers tgz" do
expect(described_class.for_current_system(%i{tgz foo})).to eq(Compressor::TGZ)
end
end
context "when nothing is given" do
it "returns null" do
expect(described_class.for_current_system([])).to eq(Compressor::Null)
end
end
end
context "on Ubuntu" do
before { stub_ohai(platform: "ubuntu", version: "16.04") }
context "when :tgz activated" do
it "prefers tgz" do
expect(described_class.for_current_system(%i{tgz foo})).to eq(Compressor::TGZ)
end
end
context "when nothing is given" do
it "returns null" do
expect(described_class.for_current_system([])).to eq(Compressor::Null)
end
end
end
end
end
end
| 27.391304 | 90 | 0.586508 |
ab38b000394e782accde1d101e5c0c72438db488
| 1,198 |
class Bat < Formula
desc "Clone of cat(1) with syntax highlighting and Git integration"
homepage "https://github.com/sharkdp/bat"
url "https://github.com/sharkdp/bat/archive/v0.15.4.tar.gz"
sha256 "03b7c8ad6221ca87cecd71f9e3e2167f04f750401e2d3dcc574183aabeb76a8b"
bottle do
cellar :any_skip_relocation
sha256 "ae2c26d25a0dac35bd839a091f89201b5d9eee32ef613325426c7e8b8812d1a9" => :catalina
sha256 "40dea8577c06a08d3e3bd20a949245ff02ea85153d25f72a65cee03c1b1e1cf9" => :mojave
sha256 "59bed16f8a4741a9d92f62cb7c9965d1abe40dc5dd2323bc4f37e71330b1abf2" => :high_sierra
end
depends_on "rust" => :build
uses_from_macos "zlib"
def install
ENV["SHELL_COMPLETIONS_DIR"] = buildpath
ENV.append_to_cflags "-fno-stack-check" if DevelopmentTools.clang_build_version >= 1010
system "cargo", "install", *std_cargo_args
assets_dir = Dir["target/release/build/bat-*/out/assets"].first
man1.install "#{assets_dir}/manual/bat.1"
fish_completion.install "#{assets_dir}/completions/bat.fish"
end
test do
pdf = test_fixtures("test.pdf")
output = shell_output("#{bin}/bat #{pdf} --color=never")
assert_match "Homebrew test", output
end
end
| 35.235294 | 93 | 0.75626 |
1a18a0abe770dd5f2b04378f08c2fb764b2c0c3f
| 3,501 |
# frozen_string_literal: true
require 'spec_helper'
require 'liam/test_producer'
RSpec.describe Liam::MessageProcessor do
let(:config_path) { File.expand_path('spec/support/liam_config.yml') }
let(:event) { 'liam_TestProducer' }
let(:value_message_attribute) { { Value: event } }
let(:message) do
Aws::SQS::Types::Message.new(
message_id: '77c972cf-fbaa-4d98-b0d9-f66196da3986',
receipt_handle: '77c972cf-fbaa-4d98-b0d9-f66196da3986#f7307ca5-0580-4e52-b8e9-c8e0f7ec4325',
md5_of_body: 'bd6e137230719036d2bd8008c1e11a3d',
body: {
MessageId: '7beeffda-2519-4ea7-8fe3-b52c629053a2',
Type: 'Notification',
Timestamp: '2020-01-07T11:32:13.189882Z',
Message: {
books: [
{ id: 1, isbn10: '9561111853' },
{ id: 2, isbn10: '9562623246' }
]
},
TopicArn: 'arn:aws:sns:us-east-1:000000000000:liam_TestProducer',
MessageAttributes: {
event_name: {
data_type: 'String'
}.merge(value_message_attribute)
}
}.to_json,
attributes: {
SentTimestamp: '1578396733208',
ApproximateReceiveCount: '1',
ApproximateFirstReceiveTimestamp: '1578396733208',
SenderId: '127.0.0.1',
MessageDeduplicationId: '',
MessageGroupId: ''
},
md5_of_message_attributes: nil,
message_attributes: {}
)
end
describe 'failure cases' do
context 'when the class expected to process the message has not been initialized' do
before do
Liam.send(:remove_const, :TestProducer)
end
after(:all) do
module Liam
class TestProducer
def initialize(message)
@message = message
end
def process; end
end
end
end
it do
expect { described_class.process(message) }.to(
raise_error(
Liam::UninitializedMessageProcessorError,
<<~MSG.gsub(/\n/, '')
Expected file Liam::TestProducer defined in app/services/liam to process
the message, but it has not been initialized.
MSG
)
)
end
end
context 'when the message received does not have a expected Value key' do
let(:value_message_attribute) { { string_value: event } }
it 'raises a custom MessageWithoutValueAttributeError' do
expect { described_class.process(message) }.to(
raise_error(
Liam::MessageWithoutValueAttributeError,
<<~MSG.gsub(/\n/, '')
Expected to get a message attribute value to initialize the class to process
this message, but the value received is invalid.
MSG
)
)
end
end
context 'when initialized without an Aws::SQS::Types::Message object' do
let(:message) { nil }
it do
expect { described_class.process(message) }.to(
raise_error(
Liam::UnexpectedMessageError,
"Expected #{message.class} to be an instance of Aws::SQS::Types::Message."
)
)
end
end
end
describe 'success cases' do
it 'invokes the process method in the class expected to process the message' do
mock = double(Liam::TestProducer)
expect(Liam::TestProducer).to receive(:new).and_return(mock)
expect(mock).to receive(:process)
described_class.process(message)
end
end
end
| 30.443478 | 98 | 0.604113 |
26178185c9b4a704636fe25b225f4548cad46b57
| 1,894 |
class Service::Lighthouse < Service
string :subdomain, :project_id, :token
boolean :private, :send_only_ticket_commits
white_list :subdomain, :project_id
def receive_push
# matches string with square braces with content starting with # and a digit.
check_for_lighthouse_flags = /\[#\d.+?\]/
payload['commits'].each do |commit|
next if commit['message'] =~ /^x /
next if data['send_only_ticket_commits'] == false && (commit['message'] =~ check_for_lighthouse_flags).nil?
commit_id = commit['id']
added = commit['added'].map { |f| ['A', f] }
removed = commit['removed'].map { |f| ['R', f] }
modified = commit['modified'].map { |f| ['M', f] }
diff = YAML.dump(added + removed + modified)
diff = YAML.dump([]) if data['private']
title = "Changeset [%s] by %s" % [commit_id, commit['author']['name']]
body = "#{commit['message']}\n#{commit['url']}"
changeset_xml = <<-XML.strip
<changeset>
<title>#{CGI.escapeHTML(title)}</title>
<body>#{CGI.escapeHTML(body)}</body>
<changes type="yaml">#{CGI.escapeHTML(diff)}</changes>
<committer>#{CGI.escapeHTML(commit['author']['name'])}</committer>
<revision>#{CGI.escapeHTML(commit_id)}</revision>
<changed-at type="datetime">#{CGI.escapeHTML(commit['timestamp'])}</changed-at>
</changeset>
XML
account = "http://#{data['subdomain']}.lighthouseapp.com"
begin
http.basic_auth data['token'], 'x'
http.headers['Content-Type'] = 'application/xml'
http_post '%s/projects/%d/changesets.xml' % [
"http://#{data['subdomain']}.lighthouseapp.com", data['project_id'].to_i],
changeset_xml
rescue URI::InvalidURIError
raise_config_error "Invalid subdomain: #{data['subdomain']}"
end
end
end
end
| 38.653061 | 113 | 0.600317 |
919174b51fc12483251e357f8e570fb442185615
| 1,506 |
GURPURB_IN_NS_MONTHS = {
1 => {
1 => ['Nanakshahi New Year', 'Gurgaddi Guru Har Rai Sahib'],
6 => ['Joti Jot Guru Hargobind Sahib']
},
2 => {
1 => ['Vaisakhi'],
3 => ['Joti Jot Guru Angad Sahib', 'Gurgaddi Guru Amardas Sahib', 'Joti Jot Guru Harkrishan Sahib', 'Gurgaddi Guru Tegh Bahadur Sahib'],
5 => ['Parkash Guru Angad Sahib', 'Parkash Guru Tegh Bahadur Sahib'],
19 => ['Parkash Guru Arjan Sahib ']
},
3 => {
9 => ['Parkash Guru Amardas Sahib'],
28 => ['Gurgaddi Guru Hargobind Sahib']
},
4 => {
2 => ['Shaheedi Guru Arjan Sahib'],
18 => ['Foundation Day Sri Akal Takht Sahib'],
21 => ['Parkash Guru Hargobind Sahib']
},
5 => {
6 => ['Miri-Piri Day'],
8 => ['Parkash Guru Harkrishan Sahib']
},
6 => {
15 => ['Completion Guru Granth Sahib'],
17 => ['First Parkash Guru Granth Sahib']
},
7 => {
2 => ['Joti Jot Guru Amardas Sahib', 'Gurgaddi Guru Ramdas Sahib', 'Joti Jot Guru Ramdas Sahib', 'Gurgaddi Guru Arjan Sahib'],
4 => ['Gurgaddi Guru Angad Sahib'],
8 => ['Joti Jot Guru Nanak Sahib'],
25 => ['Parkash Guru Ramdas Sahib']
},
8 => {
6 => ['Joti Jot Guru Har Rai Sahib', 'Gurgaddi Guru Harkrishan Sahib', 'Gurgaddi Adi Guru Granth Sahib '],
7 => ['Joti Jot Guru Gobind Singh Sahib']
},
9 => {
11 => ['Gurgaddi Guru Gobind Singh Sahib', 'Shaheedi Day Guru Tegh Bahadur Sahib']
},
10 => {
23 => ['Parkash Guru Gobind Singh Sahib']
},
11 => {
19 => ['Parkash Guru Har Rai Sahib']
},
12 => {}
}
| 30.734694 | 139 | 0.590969 |
b9cbd53a0c9cb040e2ec3ec7d4ad874f5d0afc3b
| 1,005 |
class VirtualConferenceProposalsController < ApplicationController
def new
@proposal = VirtualConferenceProposal.new
end
def index
authorize @proposals = VirtualConferenceProposal.all
end
def show
authorize @proposal = VirtualConferenceProposal.find( params[:id] )
end
def create
@proposal = VirtualConferenceProposal.new( proposal_params )
if @proposal.save
VirtualConferenceMailer.proposal_confirmation( @proposal.id ).deliver_later
redirect_to virtual_conference_proposal_confirmation_path( token: @proposal.token ), notice: "Virtual Conference Proposal successfully submitted!"
else
flash[:alert] = @proposal.errors.full_messages
render :new
end
end
private
def proposal_params
params.require(:virtual_conference_proposal).
permit(
:name,
:email,
:presentation_theme,
:presentation_title,
:presentation_description,
:presentation_takeaway
)
end
end
| 25.125 | 152 | 0.714428 |
08e661665d922155a4d39bf8d9a09bafe718806e
| 2,269 |
# frozen_string_literal: true
# Basic integration example - run code against a dummy repo
#
# * Requires git configured on the machine.
#
# Create a tmp dir and create a dummy git repo, run code at various stages,
# eg no commits, a commit, a tag, etc.
#
$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require_relative "./support/jekyll_template"
require "jekyll_version_plugin"
require "tmpdir"
RUN_OPTIONS = [
"",
"tag short",
"tag long",
"commit short",
"commit long"
].freeze
def run_code_with_options(message)
say_with_colour message, :green
RUN_OPTIONS.each do |options|
say_with_colour "Running with options: '#{options}'", :blue
tag = Jekyll::VersionPlugin::Tag.new(nil, options, nil)
say_with_colour tag.render(nil), :yellow
end
end
COLOUR_MAP = {
red: 31,
green: 32,
yellow: 33,
blue: 34
}.freeze
def say_with_colour(text, colour_name)
colour_code = COLOUR_MAP.fetch(colour_name)
puts "\e[#{colour_code}m#{text}\e[0m"
end
def run_commands(message)
say_with_colour message, :red
yield
end
def main
Dir.mktmpdir("jekyll_version_plugin_test") do |dir|
say_with_colour "Created temp dir: #{dir}", :red
Dir.chdir(dir) do
run_code_with_options ">> Without git repo"
run_commands("** Creating git repo") { `git init .` }
run_code_with_options ">> With git repo"
run_commands("** Adding file") do
File.open("test-file.txt", "w") do |f|
f.write("this is some text\n")
end
`git add -A`
`git commit -m"first commit"`
end
run_code_with_options ">> With a commit"
run_commands("** Creating tag") { `git tag -a v1 -m"first tag"` }
run_code_with_options ">> With tag"
run_commands("** Updating file") do
File.open("test-file.txt", "w") do |f|
f.write("this is some more text\n")
end
`git add -A`
`git commit -m"another commit"`
end
run_code_with_options ">> With a commit after the tag"
run_commands("** Creating tag again") { `git tag -a v2 -m"second tag"` }
run_code_with_options ">> With a commit after the tag"
end
end
end
if __FILE__ == $PROGRAM_NAME
say_with_colour "Running integration tests...", :red
main
end
| 24.138298 | 78 | 0.654033 |
38ff690d6d6c6c2dd6328c2f03943eb8318aeae5
| 2,472 |
require_relative "test_helper"
class DebugAsTraceLoggerTest < Minitest::Test
describe SemanticLogger::Logger do
describe ".level?" do
let :logger do
SemanticLogger::DebugAsTraceLogger.new("TestLogger")
end
it "return true for debug? with :trace level" do
SemanticLogger.default_level = :trace
assert_equal :trace, logger.level
assert_equal true, logger.debug?
assert_equal true, logger.trace?
end
it "return false for debug? with global :debug level" do
SemanticLogger.default_level = :debug
assert_equal :debug, logger.level, logger
assert logger.info?, logger.inspect
refute logger.debug?, logger.inspect
refute logger.trace?, logger.inspect
end
it "return true for debug? with global :info level" do
SemanticLogger.default_level = :info
assert_equal :info, logger.level, logger.inspect
refute logger.debug?, logger.inspect
refute logger.trace?, logger.inspect
end
it "return false for debug? with instance :debug level" do
logger.level = :debug
assert_equal :debug, logger.level, logger.inspect
refute logger.debug?, logger.inspect
refute logger.trace?, logger.inspect
end
it "return true for debug? with instance :info level" do
logger.level = :info
assert_equal :info, logger.level, logger.inspect
refute logger.debug?, logger.inspect
refute logger.trace?, logger.inspect
end
end
describe "log" do
include InMemoryAppenderHelper
let :logger do
SemanticLogger::DebugAsTraceLogger.new("TestLogger")
end
it "not log trace when level is debug" do
logger.level = :debug
logger.trace("hello world", payload) { "Calculations" }
refute log_message
end
it "not log debug when level is debug" do
logger.level = :debug
logger.debug("hello world", payload) { "Calculations" }
refute log_message
end
it "map debug to trace" do
logger.level = :trace
logger.debug("hello world")
assert log = log_message
assert_equal :trace, log.level
end
it "log trace as trace" do
logger.level = :trace
logger.trace("hello world", payload) { "Calculations" }
assert log = log_message
assert_equal :trace, log.level
end
end
end
end
| 30.146341 | 64 | 0.638754 |
389acc28434d4fea59b0b42c766f8cb874c38be2
| 817 |
describe Fakeit::Openapi::Example do
let(:schema) { load_schema('boolean_schema') }
let(:use_static) { double('lambda', :[] => false) }
let(:example_options) { { use_static: use_static, property: 'static_boolean' } }
it 'calls use_static' do
expect(use_static).to receive(:[]).with(type: 'boolean', property: 'static_boolean')
schema.to_example(example_options)
end
context 'static' do
let(:use_static) { double('lambda', :[] => true) }
it 'boolean example' do
boolean = schema.to_example(example_options)
expect(boolean).to be(true)
end
end
context 'random' do
it 'boolean example' do
expect(Faker::Boolean).to receive(:boolean).and_return(true)
boolean = schema.to_example(example_options)
expect(boolean).to be(true)
end
end
end
| 24.757576 | 88 | 0.667075 |
2638adcd151801e7046e803f2f040a118eb94ded
| 7,579 |
# -*- coding: us-ascii -*-
# frozen_string_literal: true
require 'minitest_helper'
class TestRDocParser < RDoc::TestCase
def setup
super
@RP = RDoc::Parser
@binary_dat = File.expand_path '../binary.dat', __FILE__
@fn = 'file.rb'
@top_level = RDoc::TopLevel.new @fn
@options = RDoc::Options.new
end
def test_class_binary_eh_ISO_2022_JP
iso_2022_jp = File.join Dir.tmpdir, "test_rdoc_parser_#{$$}.rd"
File.open iso_2022_jp, 'wb' do |io|
io.write "# coding: ISO-2022-JP\n"
io.write ":\e$B%3%^%s%I\e(B:\n"
end
refute @RP.binary? iso_2022_jp
ensure
File.unlink iso_2022_jp
end
def test_class_binary_eh_marshal
marshal = File.join Dir.tmpdir, "test_rdoc_parser_#{$$}.marshal"
File.open marshal, 'wb' do |io|
io.write Marshal.dump('')
io.write 'lots of text ' * 500
end
assert @RP.binary?(marshal)
ensure
File.unlink marshal
end
def test_class_binary_japanese_text
file_name = File.expand_path '../test.ja.txt', __FILE__
refute @RP.binary?(file_name)
end
def test_class_binary_large_japanese_rdoc
capture_io do
begin
extenc, Encoding.default_external =
Encoding.default_external, Encoding::US_ASCII
file_name = File.expand_path '../test.ja.largedoc', __FILE__
assert [email protected]?(file_name)
ensure
Encoding.default_external = extenc
end
end
end
def test_class_binary_japanese_rdoc
file_name = File.expand_path '../test.ja.rdoc', __FILE__
refute @RP.binary?(file_name)
end
def test_class_can_parse
assert_equal @RP.can_parse(__FILE__), @RP::Ruby
readme_file_name = File.expand_path '../test.txt', __FILE__
assert_equal @RP::Simple, @RP.can_parse(readme_file_name)
assert_equal @RP::Simple, @RP.can_parse(@binary_dat)
jtest_file_name = File.expand_path '../test.ja.txt', __FILE__
assert_equal @RP::Simple, @RP.can_parse(jtest_file_name)
jtest_rdoc_file_name = File.expand_path '../test.ja.rdoc', __FILE__
assert_equal @RP::Simple, @RP.can_parse(jtest_rdoc_file_name)
readme_file_name = File.expand_path '../README', __FILE__
assert_equal @RP::Simple, @RP.can_parse(readme_file_name)
jtest_largerdoc_file_name = File.expand_path '../test.ja.largedoc', __FILE__
assert_equal @RP::Simple, @RP.can_parse(jtest_largerdoc_file_name)
@RP.alias_extension 'rdoc', 'largedoc'
assert_equal @RP::Simple, @RP.can_parse(jtest_largerdoc_file_name)
end
def test_class_for_executable
temp_dir do
content = "#!/usr/bin/env ruby -w\n"
File.open 'app', 'w' do |io| io.write content end
app = @store.add_file 'app'
parser = @RP.for app, 'app', content, @options, :stats
assert_kind_of RDoc::Parser::Ruby, parser
assert_equal 'app', parser.file_name
end
end
def test_class_for_forbidden
skip 'chmod not supported' if Gem.win_platform?
tf = Tempfile.open 'forbidden' do |io|
begin
File.chmod 0000, io.path
forbidden = @store.add_file io.path
parser = @RP.for forbidden, 'forbidden', '', @options, :stats
assert_nil parser
ensure
File.chmod 0400, io.path
end
io
end
tf.close!
end
def test_class_for_modeline
temp_dir do
content = "# -*- rdoc -*-\n= NEWS\n"
File.open 'NEWS', 'w' do |io| io.write content end
app = @store.add_file 'NEWS'
parser = @RP.for app, 'NEWS', content, @options, :stats
assert_kind_of RDoc::Parser::Simple, parser
assert_equal "= NEWS\n", parser.content
end
end
def test_can_parse_modeline
readme_ext = File.join Dir.tmpdir, "README.EXT.#{$$}"
File.open readme_ext, 'w' do |io|
io.puts "# README.EXT - -*- rdoc -*- created at: Mon Aug 7 16:45:54 JST 1995"
io.puts
io.puts "This document explains how to make extension libraries for Ruby."
end
assert_equal RDoc::Parser::Simple, @RP.can_parse(readme_ext)
ensure
File.unlink readme_ext
end
##
# Selenium hides a .jar file using a .txt extension.
def test_class_can_parse_zip
hidden_zip = File.expand_path '../hidden.zip.txt', __FILE__
assert_nil @RP.can_parse(hidden_zip)
end
def test_check_modeline
readme_ext = File.join Dir.tmpdir, "README.EXT.#{$$}"
File.open readme_ext, 'w' do |io|
io.puts "# README.EXT - -*- RDoc -*- created at: Mon Aug 7 16:45:54 JST 1995"
io.puts
io.puts "This document explains how to make extension libraries for Ruby."
end
assert_equal 'rdoc', @RP.check_modeline(readme_ext)
ensure
File.unlink readme_ext
end
def test_check_modeline_coding
readme_ext = File.join Dir.tmpdir, "README.EXT.#{$$}"
File.open readme_ext, 'w' do |io|
io.puts "# -*- coding: utf-8 -*-"
end
assert_nil @RP.check_modeline readme_ext
ensure
File.unlink readme_ext
end
def test_check_modeline_with_other
readme_ext = File.join Dir.tmpdir, "README.EXT.#{$$}"
File.open readme_ext, 'w' do |io|
io.puts "# README.EXT - -*- mode: RDoc; indent-tabs-mode: nil -*-"
io.puts
io.puts "This document explains how to make extension libraries for Ruby."
end
assert_equal 'rdoc', @RP.check_modeline(readme_ext)
ensure
File.unlink readme_ext
end
def test_check_modeline_no_modeline
readme_ext = File.join Dir.tmpdir, "README.EXT.#{$$}"
File.open readme_ext, 'w' do |io|
io.puts "This document explains how to make extension libraries for Ruby."
end
assert_nil @RP.check_modeline(readme_ext)
ensure
File.unlink readme_ext
end
def test_class_for_binary
rp = @RP.dup
class << rp
alias old_can_parse can_parse
end
def rp.can_parse(*args) nil end
assert_nil @RP.for(nil, @binary_dat, nil, nil, nil)
end
def test_class_for_markup
content = <<-CONTENT
# coding: utf-8 markup: rd
CONTENT
parser = @RP.for @top_level, __FILE__, content, @options, nil
assert_kind_of @RP::RD, parser
end
def test_class_use_markup
content = <<-CONTENT
# coding: utf-8 markup: rd
CONTENT
parser = @RP.use_markup content
assert_equal @RP::RD, parser
end
def test_class_use_markup_markdown
content = <<-CONTENT
# coding: utf-8 markup: markdown
CONTENT
parser = @RP.use_markup content
assert_equal @RP::Ruby, parser
end
def test_class_use_markup_modeline
content = <<-CONTENT
# -*- coding: utf-8 -*-
# markup: rd
CONTENT
parser = @RP.use_markup content
assert_equal @RP::RD, parser
end
def test_class_use_markup_modeline_shebang
content = <<-CONTENT
#!/bin/sh
/* -*- coding: utf-8 -*-
* markup: rd
*/
CONTENT
parser = @RP.use_markup content
assert_equal @RP::RD, parser
end
def test_class_use_markup_shebang
content = <<-CONTENT
#!/usr/bin/env ruby
# coding: utf-8 markup: rd
CONTENT
parser = @RP.use_markup content
assert_equal @RP::RD, parser
end
def test_class_use_markup_tomdoc
content = <<-CONTENT
# coding: utf-8 markup: tomdoc
CONTENT
parser = @RP.use_markup content
assert_equal @RP::Ruby, parser
end
def test_class_use_markup_none
parser = @RP.use_markup ''
assert_nil parser
end
def test_class_use_markup_unknown
content = <<-CONTENT
# :markup: RDoc
CONTENT
parser = @RP.use_markup content
assert_nil parser
end
def test_initialize
@RP.new @top_level, @fn, '', @options, nil
assert_equal @RP, @top_level.parser
end
end
| 23.391975 | 84 | 0.667106 |
0378919ccbb6b57acf475ff5e529fce9d65525e6
| 1,621 |
class Api::V1::ReservationsController < ApplicationController
before_action :set_reservation, only: %i[show destroy]
# GET /reservations
def index
@user = logged_in_user
@reservations = @user.reservations.includes(:city, car: [:description])
if @user
render json: @reservations.to_json(include: [:city, { car: { include: [:description] } }])
else
render json: { error: 'No Reservation Found' }, status: 409
end
end
# POST /reservations
def create
@car = Car.find_by_id(params[:reservation][:car_id])
@reservation = Reservation.new(reservation_params.merge(user_id: @user.id, car_id: @car.id))
if @reservation.save
render json: @reservation.to_json(include: [:city, { car: { include: [:description] } }]), status: :created
else
render json: { error: @car.errors.full_messages }, status: 409
end
end
# DELETE /reservations/1
def destroy
if @reservation
@reservation.destroy
if @reservation.destroyed?
render json: { message: "Reservation with id: #{params[:id]} Successfully Canceled", id: params[:id] },
status: 200
else
render json: { error: "Reservation with id: #{params[:id]} cannot be canceled" }, status: 400
end
else
render json: { error: 'Reservation not found' }, status: 409
end
end
private
def set_reservation
@reservation = Reservation.find_by_id(params[:id])
end
# Only allow a list of trusted parameters through.
def reservation_params
params.require(:reservation).permit(:start_date, :end_date, :city_id, :car_id)
end
end
| 30.018519 | 113 | 0.664405 |
3857607edff69db22de8b18822b391bf83c246c2
| 11,261 |
#--
# Cloud Foundry 2012.02.03 Beta
# Copyright (c) [2009-2012] VMware, Inc. All Rights Reserved.
#
# This product is licensed to you under the Apache License, Version 2.0 (the "License").
# You may not use this product except in compliance with the License.
#
# This product includes a number of subcomponents with
# separate copyright notices and license terms. Your use of these
# subcomponents is subject to the terms and conditions of the
# subcomponent's license, as noted in the LICENSE file.
#++
require 'uaa/http'
require 'uaa/error'
require 'base64'
require 'uaa/token_coder'
# Utility API for client of the UAA server. Provides convenience
# methods to obtain and decode OAuth2 access tokens.
class CF::UAA::Client
include CF::UAA::Http
# The target (base url) of calls to the UAA server. Default is "http://uaa.cloudfoundry.com".
attr_writer :target
# The token for authenticated calls to the UAA server if there is one currently.
attr_accessor :token
# The client id to use if client authorization is needed (default "vmc")
attr_writer :client_id
# The client secret to use if client authorization is needed
attr_writer :client_secret
# The key used by the server to sign JWT tokens
attr_writer :token_key
# The oauth scope to use if needed (default "read")
attr_writer :scope
# The grant type to use when logging in (default "implicit")
attr_writer :grant_type
def initialize
@target = 'http://uaa.cloudfoundry.com'
@client_id = "vmc"
@client_secret = nil
@grant_type = "implicit"
@scope = ["read"]
@redirect_uri = "http://uaa.cloudfoundry.com/redirect/vmc"
end
# Get the prompts (login info) required by the UAA server. The response
# is a hash in the form {:name=>[<type>,<message>],...}
def prompts
return @prompts if @prompts # TODO: reset prompts when the target changes?
begin
response = json_get('/login')
rescue
# Ignore
end
raise StandardError, "No response from prompts endpoint. Is the server running at #{@target}?" unless response
@prompts ||= response[:prompts]
raise StandardError, "No prompts available. Is the server running at #{@target}?" unless @prompts
@prompts
end
# The default prompts that can be used to elicit input for resource
# owner password credentials (username and password).
def default_prompts
{:username=>["text", "Username"], :password=>["password", "Password"]}
end
# The prompts that can be used to elicit input for account
# registration (assuming it is supported on the server).
def registration_prompts
{:email=>["text", "Email"], :username=>["text", "Username"], :given_name=>["text", "Given (first) name"], :family_name=>["text", "Family (last) name"], :password=>["password", "Choose a password"]}
end
# Login and get back an OAuth token.
#
# === Attributes
#
# * +opts+ - parameters to send, e.g.
# * +client_id+ - the client id (defaults to the instance attribute)
# * +grant_type+ - the OAuth2 grant type (default to the instance attribute)
# * +client_secret+ - the client secret (defaults to the instance attribute)
# * +scope+ - the oauth scopes to request, array of String, or comma- or space-separated list (defaults to "read")
# * +credentials+ - a hash of credentials to be passed to the server as a JSON literal (with :grant_type=>"implicit")
# * +username+ - the username of the resource owner to login (with :grant_type="password")
# * +password+ - the password of the resource owner to login (with :grant_type="password")
# (defaults to the instance attribute)
#
# === Implicit Grant
#
# The default grant type is "implicit" which is used by vmc and
# other untrusted clients. The UAA server authenticates the user in
# that case using the data provided in the +credentials+ option.
#
# As a convenience the +credentials+ default to the +username+ and
# +password+ if those are provided.
#
# If +credentials+ are not provided, or if +username+ is provided
# without a +password+ then a CF::UAA::PromptRequiredError
# is raised.
def login(opts={})
opts = opts.dup
opts[:client_id] ||= @client_id
opts[:client_secret] ||= @client_secret if @client_secret
opts[:scope] ||= @scope
grant_type = opts[:grant_type] || @grant_type
opts[:grant_type] = grant_type
username = opts[:username]
password = opts[:password]
case grant_type
when "password"
raise CF::UAA::PromptRequiredError.new(default_prompts) if (username.nil? || password.nil?)
when "implicit"
if prompts_require_username_and_password? && username && password then
opts[:credentials] = {:username=>username, :password=>password}
end
raise CF::UAA::PromptRequiredError.new(prompts) unless opts[:credentials]
end
# make sure they don't get used as request or form params unless we want them to
opts.delete :username
opts.delete :password
if grant_type!="client_credentials" && grant_type!="password" then
opts[:redirect_uri] ||= @redirect_uri
end
opts[:scope] = join_array(opts[:scope]) if opts[:scope]
headers = {'Content-Type'=>"application/x-www-form-urlencoded",
'Accept'=>"application/json"}
add_client_auth(grant_type, headers, opts)
url = '/oauth/token'
case grant_type
when "implicit"
url = '/oauth/authorize'
opts[:response_type] = "token"
opts.delete :grant_type # don't send grant type
when "authorization_code"
url = '/oauth/authorize'
opts[:response_type] = "code"
opts.delete :grant_type # don't send grant type
when "password"
opts[:username] = username
opts[:password] = password
end
opts.delete :client_secret # don't send secret in post data
form_data = opts.map{|k,v| value=v.is_a?(Hash) ? v.to_json : v; "#{k}=#{value}"}.join('&')
status, body, headers = request(:post, url, form_data, headers)
if (grant_type=="implicit") then
token = extract_implicit_token(headers)
end
return token if token
json = CF::UAA::Util.json_parse(body)
return json if !json
return json[:access_token]
end
# Decode the contents of a JWT token obtained from the target UAA.
#
# === Attributes
#
# * +token+ - mandatory: the token to decode (e.g. obtained from #login)
# * +opts+ - optional: additional parameters to send, e.g.
# * +token_key+ - the token key (defaults to the instance attribute)
#
# Note that the default client (vmc) is not authorized to decode
# tokens, so callers will need to change the default or provide
# explicit values in the options. The secret is the one used by the
# server to sign the token (not the same as the client secret) but
# we overload the option with that name for the purpose of this
# call.
def decode_jwt_token(token=nil, opts={})
CF::UAA::TokenCoder.decode(token || @token, opts[:token_key])
end
# Decode the contents of an opaque token obtained from the target
# UAA by sending an HTTP request to the UAA and getting back the
# result.
#
# === Attributes
#
# * +token+ - mandatory: the token to decode (e.g. obtained from #login)
# * +opts+ - optional: additional parameters to send, e.g.
# * +client_id+ - the client id (defaults to the instance attribute)
# * +client_secret+ - the client secret (defaults to the instance attribute)
#
# Note that the default client (vmc) is not authorized to decode
# tokens, so callers will need to change the default or provide
# explicit values in the options. Authoeized clients must be
# pre-registered with the server.
def decode_opaque_token(token=nil, opts={})
headers = {'Accept'=>"application/json",
'Authorization'=>client_auth(opts)}
token ||= @token
status, body, headers = request(:get, "/check_token?token=#{token}", nil, headers)
result = CF::UAA::Util.json_parse(body)
end
def decode_token(token=nil, opts={})
begin
return decode_jwt_token(token, opts) if opts[:token_key] || @token_key
rescue DecodeError
# log something?
end
decode_opaque_token(token, opts)
end
# Register a new user account.
#
# === Attributes
#
# * +options+ - additional parameters to send
# * +username+ - the username to register
# * +password+ - the password to use for the new account
# * +email+ - the email addres of the new account
# * +family_name+ - the family name of the new user
# * +given_name+ - the given name of the new user
# * +name+ - (optional) the formatted name (defaults to use the given and family names)
#
# Any missing attributes will cause a PromptRequiredError to be
# raised with a set of prompts to provide to the user to elicit the
# required information.
def register(options={})
token ||= @token
raise StandardError, "No token provided. You must login first and set the authorization token up." unless token
username = options[:username]
password = options[:password]
family_name = options[:family_name]
given_name = options[:given_name]
email = options[:email]
name = options[:name]
raise CF::UAA::PromptRequiredError.new(registration_prompts) if (username.nil? || password.nil? || family_name.nil? || given_name.nil? || email.nil?)
name ||= "#{given_name} #{family_name}"
options = options.dup
options[:name] = name
request= {
:name=>{
"givenName"=>options[:given_name],
"familyName"=>options[:family_name],
"formatted"=>options[:name]},
:userName=>options[:username],
:emails=>[{:value=>options[:email]}]
}
status, body, headers = http_post("/User", request.to_json, "application/json", "Bearer #{token}")
user = CF::UAA::Util.json_parse(body)
id = user[:id]
password_request = {:password=>password}
# TODO: rescue from 403 and ask user to reset password through
# another channel
status, body, headers = http_put("/User/#{id}/password", password_request.to_json, "application/json", "Bearer #{token}")
user
end
private
def prompts_require_username_and_password?
prompts.has_key?(:username) && prompts.has_key?(:password) && prompts.length==2
end
def join_array(value)
return value.join(" ") if value.is_a?(Array)
value
end
def add_client_auth(grant_type, headers={}, opts={})
if (grant_type!="implicit") then
auth = client_auth(opts)
headers['Authorization'] = auth if auth
end
end
def client_auth(opts={})
client_id = opts[:client_id] ? opts[:client_id] : @client_id
client_secret = opts[:client_secret] ? opts[:client_secret] : @client_secret
if client_id || client_secret then
auth = Base64::strict_encode64("#{client_id}:#{client_secret}")
"Basic #{auth}"
end
end
def extract_implicit_token(headers={})
return nil unless headers
location = headers['Location'] || headers['location'] || headers[:location]
parts = location.split('#')
if parts.length > 1
values=parts[1].split('&')
token = values.each do |kv|
k,v = kv.split('=')
return v if k=="access_token"
end
end
return nil
end
end
| 35.190625 | 201 | 0.676405 |
1a74e9ac1093a808bac17cc8fb81ebe16cf30999
| 287 |
Deface::Override.new(
:virtual_path => "spree/admin/shared/sub_menu/_plugins",
:name => "web_hooks_admin_tab",
:insert_bottom => "[data-hook='admin_plugins_sub_tabs']",
:text => '<%= tab :web_hooks, :match_path => "/admin/web_hooks", label: "#{Spree.t(:web_hooks)}" %>'
)
| 47.833333 | 104 | 0.651568 |
287f02a32b3469c78cf9a1d95c3d1fee8b99a5d8
| 851 |
require 'test_helper'
class StaticPagesControllerTest < ActionDispatch::IntegrationTest
test "should get root" do
get root_path
assert_response :success
assert_select "title", "Ruby on Rails Tutorial Sample App"
end
test "should get home" do
get root_path
assert_response :success
assert_select "title", "Ruby on Rails Tutorial Sample App"
end
test "should get help" do
get help_path
assert_response :success
assert_select "title", "Help | Ruby on Rails Tutorial Sample App"
end
test "should get about" do
get about_path
assert_response :success
assert_select "title", "About | Ruby on Rails Tutorial Sample App"
end
test "should get contact" do
get contact_path
assert_response :success
assert_select "title", "Contact | Ruby on Rails Tutorial Sample App"
end
end
| 24.314286 | 72 | 0.721504 |
33f967957a4fb096e66f9d646691494575322671
| 4,625 |
module Cypress
class CreateDownloadZip
include ChecklistTestsHelper
def self.create_test_zip(test_id, format = 'html')
pt = ProductTest.find(test_id)
create_zip(pt.patients.to_a, format)
end
def self.create_zip(patients, format)
file = Tempfile.new("patients-#{Time.now.to_i}")
Cypress::PatientZipper.zip(file, patients, format)
file
end
def self.bundle_directory(bundle, path)
patients = bundle.patients
%w[html qrda].each do |format|
extensions = { html: 'html', qrda: 'xml' }
formatter = formatter_for_patients(patients, format)
FileUtils.mkdir_p(File.join(path, "#{format}_records/"))
patients.each do |r|
filename = "#{format}_records/#{r.givenNames.join('_')}_#{r.familyName}.#{extensions[format.to_sym]}".delete("'").tr(' ', '_')
File.open(File.join(path, filename), 'w') do |f|
f.write(formatter.export(r))
end
end
end
end
def self.create_combined_report_zip(product, report_content)
file = Tempfile.new("combined-report-#{Time.now.to_i}")
Zip::ZipOutputStream.open(file.path) do |z|
add_file_to_zip(z, 'product_report.html', report_content)
product.product_tests.each do |m|
next if m.is_a?(ChecklistTest)
filter_folder = m.is_a?(FilteringTest) ? '/' + m.name_slug : ''
folder_name = "#{m._type.underscore.dasherize}s/#{m.cms_id}#{filter_folder}"
add_file_to_zip(z, "#{folder_name}/records/#{m.cms_id}_#{m.id}.qrda.zip", m.patient_archive.read)
m.tasks.each do |t|
most_recent_execution = t.most_recent_execution
if most_recent_execution
mre_filename = "#{folder_name}/uploads/#{most_recent_execution.artifact.file.uploaded_filename}"
add_file_to_zip(z, mre_filename, most_recent_execution.artifact.file.read)
end
end
end
end
file
end
def self.create_c1_criteria_zip(checklist_test, criteria_list)
file = Tempfile.new("c1_sample_criteria-#{Time.now.to_i}.zip")
# Archive records checks whether the archive has already been created and creates
# it if it has not.
c1_patient_zip = checklist_test.archive_patients
Zip::ZipOutputStream.open(file.path) do |output_zip|
# Copy contents of existing c1_patient_zip into output file
Zip::InputStream.open(c1_patient_zip.path) do |patient_archive|
while (entry = patient_archive.get_next_entry)
add_file_to_zip(output_zip, entry.name, patient_archive.read)
end
end
# Add criteria_list to zip
add_file_to_zip(output_zip, 'criteria_list.html', criteria_list)
end
file
end
# The intent of this is to break the create c1 criteria zip out into 2 parts and pre-package
# the patients so that even if a measure is deprecated and the patient cache is deleted
# the user will still be able to Download All Patients and View Record Samples.
# It would be good to merge these back together when rails 5 comes out since rails 5
# supports calls to render outside of the controller.
def self.create_c1_patient_zip(checklist_test)
file = Tempfile.new("c1_sample_patients-#{Time.now.to_i}.zip")
example_patients = {}
checklist_test.measures.each do |m|
example_patients[m.cms_id] = Cypress::ExamplePatientFinder.find_example_patient(m)
end
formatter = formatter_for_patients(example_patients.values, 'html')
Zip::ZipOutputStream.open(file.path) do |z|
example_patients.each do |measure_id, patient|
# TODO: R2P: format patients for export
add_file_to_zip(z, "sample patient for #{measure_id}.html", formatter.export(patient))
end
end
file
end
def self.export_log_files
file = Tempfile.new("application_logs-#{Time.now.to_i}.zip")
Zip::ZipOutputStream.open(file.path) do |z|
Dir.glob('*/*.log') do |log_file|
add_file_to_zip(z, log_file, IO.read(log_file))
end
end
file
end
def self.add_file_to_zip(z, file_name, file_content)
z.put_next_entry(file_name)
z << file_content
end
def self.formatter_for_patients(patients, format)
mes, sd, ed = Cypress::PatientZipper.measure_start_end(patients)
if format == 'html'
formatter = Cypress::HTMLExporter.new(mes, sd, ed)
elsif format == 'qrda'
formatter = Cypress::QRDAExporter.new(mes, sd, ed)
end
formatter
end
end
end
| 38.541667 | 136 | 0.660108 |
088012d9b57dff0a76d5cd1c5f9591ff22014fb6
| 309 |
require_relative 'aws-eventstream/decoder'
require_relative 'aws-eventstream/encoder'
require_relative 'aws-eventstream/bytes_buffer'
require_relative 'aws-eventstream/message'
require_relative 'aws-eventstream/header_value'
require_relative 'aws-eventstream/types'
require_relative 'aws-eventstream/errors'
| 34.333333 | 47 | 0.860841 |
1ace717d1d8c59774f64f4545d95fb446dae679e
| 1,754 |
Pod::Spec.new do |s|
s.name = 'AWSAuthUI'
s.version = '2.12.8'
s.summary = 'Amazon Web Services SDK for iOS.'
s.description = 'The AWS SDK for iOS provides a library, code samples, and documentation for developers to build connected mobile applications using AWS.'
s.homepage = 'http://aws.amazon.com/mobile/sdk'
s.license = 'Apache License, Version 2.0'
s.author = { 'Amazon Web Services' => 'amazonwebservices' }
s.platform = :ios, '9.0'
s.source = { :git => 'https://github.com/aws/aws-sdk-ios.git',
:tag => s.version}
s.requires_arc = true
s.dependency 'AWSCore', '2.12.8'
s.dependency 'AWSAuthCore', '2.12.8'
s.source_files = 'AWSAuthSDK/Sources/AWSAuthUI/*.{h,m}', 'AWSAuthSDK/Sources/AWSAuthUI/**/*.{h,m}', 'AWSAuthSDK/Sources/AWSUserPoolsSignIn/UserPoolsUI/AWSFormTableCell.h', 'AWSAuthSDK/Sources/AWSUserPoolsSignIn/UserPoolsUI/AWSTableInputCell.h', 'AWSAuthSDK/Sources/AWSUserPoolsSignIn/UserPoolsUI/AWSFormTableDelegate.h', 'AWSAuthSDK/Sources/AWSUserPoolsSignIn/UserPoolsUI/AWSUserPoolsUIHelper.h'
s.public_header_files = 'AWSAuthSDK/Sources/AWSAuthUI/AWSAuthUI.h', 'AWSAuthSDK/Sources/AWSAuthUI/AWSAuthUIViewController.h', 'AWSAuthSDK/Sources/AWSAuthUI/AWSAuthUIConfiguration.h'
s.private_header_files = 'AWSAuthSDK/Sources/AWSUserPoolsSignIn/UserPoolsUI/AWSFormTableCell.h', 'AWSAuthSDK/Sources/AWSAuthUI/AWSSignInViewController.h', 'AWSAuthSDK/Sources/AWSUserPoolsSignIn/UserPoolsUI/AWSTableInputCell.h', 'AWSAuthSDK/Sources/AWSUserPoolsSignIn/UserPoolsUI/AWSFormTableDelegate.h'
s.resource_bundles = { 'AWSAuthUI' => ['AWSAuthSDK/Sources/AWSAuthUI/*.{storyboard}', 'AWSAuthSDK/Sources/AWSAuthUI/Images.xcassets'] }
end
| 79.727273 | 398 | 0.733751 |
4a649ca2de0c0c5f67f920fec4a480a6af26c5bd
| 1,764 |
# This file is automatically created by Recurly's OpenAPI generation process
# and thus any edits you make by hand will be lost. If you wish to make a
# change to this file, please create a Github issue explaining the changes you
# need and we will usher them to the appropriate places.
module Recurly
module Resources
class AccountMini < Resource
# @!attribute bill_to
# @return [String]
define_attribute :bill_to, String
# @!attribute code
# @return [String] The unique identifier of the account.
define_attribute :code, String
# @!attribute company
# @return [String]
define_attribute :company, String
# @!attribute dunning_campaign_id
# @return [String] Unique ID to identify a dunning campaign. Available when the Dunning Campaigns feature is enabled. Used to specify if a non-default dunning campaign should be assigned to this account. For sites without multiple dunning campaigns enabled, the default dunning campaign will always be used.
define_attribute :dunning_campaign_id, String
# @!attribute email
# @return [String] The email address used for communicating with this customer.
define_attribute :email, String
# @!attribute first_name
# @return [String]
define_attribute :first_name, String
# @!attribute id
# @return [String]
define_attribute :id, String
# @!attribute last_name
# @return [String]
define_attribute :last_name, String
# @!attribute object
# @return [String] Object type
define_attribute :object, String
# @!attribute parent_account_id
# @return [String]
define_attribute :parent_account_id, String
end
end
end
| 34.588235 | 315 | 0.691043 |
e83399f62ffc7116e03247ef833b4e989e4d7683
| 74,557 |
# frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/endpoint_discovery.rb'
require 'aws-sdk-core/plugins/endpoint_pattern.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/client_metrics_plugin.rb'
require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb'
require 'aws-sdk-core/plugins/transfer_encoding.rb'
require 'aws-sdk-core/plugins/http_checksum.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/rest_json.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:kafka)
module Aws::Kafka
# An API client for Kafka. To construct a client, you need to configure a `:region` and `:credentials`.
#
# client = Aws::Kafka::Client.new(
# region: region_name,
# credentials: credentials,
# # ...
# )
#
# For details on configuring region and credentials see
# the [developer guide](/sdk-for-ruby/v3/developer-guide/setup-config.html).
#
# See {#initialize} for a full list of supported configuration options.
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :kafka
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::EndpointDiscovery)
add_plugin(Aws::Plugins::EndpointPattern)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::ClientMetricsPlugin)
add_plugin(Aws::Plugins::ClientMetricsSendPlugin)
add_plugin(Aws::Plugins::TransferEncoding)
add_plugin(Aws::Plugins::HttpChecksum)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::RestJson)
# @overload initialize(options)
# @param [Hash] options
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::SharedCredentials` - Used for loading static credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# * `Aws::AssumeRoleWebIdentityCredentials` - Used when you need to
# assume a role after providing credentials via the web.
#
# * `Aws::SSOCredentials` - Used for loading credentials from AWS SSO using an
# access token generated from `aws login`.
#
# * `Aws::ProcessCredentials` - Used for loading credentials from a
# process that outputs to stdout.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::ECSCredentials` - Used for loading credentials from
# instances running in ECS.
#
# * `Aws::CognitoIdentityCredentials` - Used for loading credentials
# from the Cognito Identity service.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2/ECS IMDS instance profile - When used by default, the timeouts
# are very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` or `Aws::ECSCredentials` to
# enable retries and extended timeouts.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is searched for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :active_endpoint_cache (false)
# When set to `true`, a thread polling for endpoints will be running in
# the background every 60 secs (default). Defaults to `false`.
#
# @option options [Boolean] :adaptive_retry_wait_to_fill (true)
# Used only in `adaptive` retry mode. When true, the request will sleep
# until there is sufficent client side capacity to retry the request.
# When false, the request will raise a `RetryCapacityNotAvailableError` and will
# not retry instead of sleeping.
#
# @option options [Boolean] :client_side_monitoring (false)
# When `true`, client-side metrics will be collected for all API requests from
# this client.
#
# @option options [String] :client_side_monitoring_client_id ("")
# Allows you to provide an identifier for this client which will be attached to
# all generated client side metrics. Defaults to an empty string.
#
# @option options [String] :client_side_monitoring_host ("127.0.0.1")
# Allows you to specify the DNS hostname or IPv4 or IPv6 address that the client
# side monitoring agent is running on, where client metrics will be published via UDP.
#
# @option options [Integer] :client_side_monitoring_port (31000)
# Required for publishing client metrics. The port that the client side monitoring
# agent is running on, where client metrics will be published via UDP.
#
# @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher)
# Allows you to provide a custom client-side monitoring publisher class. By default,
# will use the Client Side Monitoring Agent Publisher.
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [Boolean] :correct_clock_skew (true)
# Used only in `standard` and adaptive retry modes. Specifies whether to apply
# a clock skew correction and retry requests with skewed client clocks.
#
# @option options [Boolean] :disable_host_prefix_injection (false)
# Set to true to disable SDK automatically adding host prefix
# to default service endpoint when available.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test or custom endpoints. This should be a valid HTTP(S) URI.
#
# @option options [Integer] :endpoint_cache_max_entries (1000)
# Used for the maximum size limit of the LRU cache storing endpoints data
# for endpoint discovery enabled operations. Defaults to 1000.
#
# @option options [Integer] :endpoint_cache_max_threads (10)
# Used for the maximum threads in use for polling endpoints to be cached, defaults to 10.
#
# @option options [Integer] :endpoint_cache_poll_interval (60)
# When :endpoint_discovery and :active_endpoint_cache is enabled,
# Use this option to config the time interval in seconds for making
# requests fetching endpoints information. Defaults to 60 sec.
#
# @option options [Boolean] :endpoint_discovery (false)
# When set to `true`, endpoint discovery will be enabled for operations when available.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [Integer] :max_attempts (3)
# An integer representing the maximum number attempts that will be made for
# a single request, including the initial attempt. For example,
# setting this value to 5 will result in a request being retried up to
# 4 times. Used in `standard` and `adaptive` retry modes.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Proc] :retry_backoff
# A proc or lambda used for backoff. Defaults to 2**retries * retry_base_delay.
# This option is only used in the `legacy` retry mode.
#
# @option options [Float] :retry_base_delay (0.3)
# The base delay in seconds used by the default backoff function. This option
# is only used in the `legacy` retry mode.
#
# @option options [Symbol] :retry_jitter (:none)
# A delay randomiser function used by the default backoff function.
# Some predefined functions can be referenced by name - :none, :equal, :full,
# otherwise a Proc that takes and returns a number. This option is only used
# in the `legacy` retry mode.
#
# @see https://www.awsarchitectureblog.com/2015/03/backoff.html
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors, auth errors,
# endpoint discovery, and errors from expired credentials.
# This option is only used in the `legacy` retry mode.
#
# @option options [Integer] :retry_max_delay (0)
# The maximum number of seconds to delay between retries (0 for no limit)
# used by the default backoff function. This option is only used in the
# `legacy` retry mode.
#
# @option options [String] :retry_mode ("legacy")
# Specifies which retry algorithm to use. Values are:
#
# * `legacy` - The pre-existing retry behavior. This is default value if
# no retry mode is provided.
#
# * `standard` - A standardized set of retry rules across the AWS SDKs.
# This includes support for retry quotas, which limit the number of
# unsuccessful retries a client can make.
#
# * `adaptive` - An experimental retry mode that includes all the
# functionality of `standard` mode along with automatic client side
# throttling. This is a provisional mode that may change behavior
# in the future.
#
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
# @option options [URI::HTTP,String] :http_proxy A proxy to send
# requests through. Formatted like 'http://proxy.com:123'.
#
# @option options [Float] :http_open_timeout (15) The number of
# seconds to wait when opening a HTTP session before raising a
# `Timeout::Error`.
#
# @option options [Integer] :http_read_timeout (60) The default
# number of seconds to wait for response data. This value can
# safely be set per-request on the session.
#
# @option options [Float] :http_idle_timeout (5) The number of
# seconds a connection is allowed to sit idle before it is
# considered stale. Stale connections are closed and removed
# from the pool before making a request.
#
# @option options [Float] :http_continue_timeout (1) The number of
# seconds to wait for a 100-continue response before sending the
# request body. This option has no effect unless the request has
# "Expect" header set to "100-continue". Defaults to `nil` which
# disables this behaviour. This value can safely be set per
# request on the session.
#
# @option options [Boolean] :http_wire_trace (false) When `true`,
# HTTP debug output will be sent to the `:logger`.
#
# @option options [Boolean] :ssl_verify_peer (true) When `true`,
# SSL peer certificates are verified when establishing a
# connection.
#
# @option options [String] :ssl_ca_bundle Full path to the SSL
# certificate authority bundle file that should be used when
# verifying peer certificates. If you do not pass
# `:ssl_ca_bundle` or `:ssl_ca_directory` the the system default
# will be used if available.
#
# @option options [String] :ssl_ca_directory Full path of the
# directory that contains the unbundled SSL certificate
# authority files for verifying peer certificates. If you do
# not pass `:ssl_ca_bundle` or `:ssl_ca_directory` the the
# system default will be used if available.
#
def initialize(*args)
super
end
# @!group API Operations
# Creates a new MSK cluster.
#
# @option params [required, Types::BrokerNodeGroupInfo] :broker_node_group_info
# Information about the broker nodes in the cluster.
#
# @option params [Types::ClientAuthentication] :client_authentication
# Includes all client authentication related information.
#
# @option params [required, String] :cluster_name
# The name of the cluster.
#
# @option params [Types::ConfigurationInfo] :configuration_info
# Represents the configuration that you want MSK to use for the cluster.
#
# @option params [Types::EncryptionInfo] :encryption_info
# Includes all encryption-related information.
#
# @option params [String] :enhanced_monitoring
# Specifies the level of monitoring for the MSK cluster. The possible
# values are DEFAULT, PER\_BROKER, and PER\_TOPIC\_PER\_BROKER.
#
# @option params [required, String] :kafka_version
# The version of Apache Kafka.
#
# @option params [Types::LoggingInfo] :logging_info
# LoggingInfo details.
#
# @option params [required, Integer] :number_of_broker_nodes
# The number of Kafka broker nodes in the Amazon MSK cluster.
#
# @option params [Types::OpenMonitoringInfo] :open_monitoring
# The settings for open monitoring.
#
# @option params [Hash<String,String>] :tags
# Create tags when creating the cluster.
#
# @return [Types::CreateClusterResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateClusterResponse#cluster_arn #cluster_arn} => String
# * {Types::CreateClusterResponse#cluster_name #cluster_name} => String
# * {Types::CreateClusterResponse#state #state} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_cluster({
# broker_node_group_info: { # required
# broker_az_distribution: "DEFAULT", # accepts DEFAULT
# client_subnets: ["__string"], # required
# instance_type: "__stringMin5Max32", # required
# security_groups: ["__string"],
# storage_info: {
# ebs_storage_info: {
# volume_size: 1,
# },
# },
# },
# client_authentication: {
# tls: {
# certificate_authority_arn_list: ["__string"],
# },
# },
# cluster_name: "__stringMin1Max64", # required
# configuration_info: {
# arn: "__string", # required
# revision: 1, # required
# },
# encryption_info: {
# encryption_at_rest: {
# data_volume_kms_key_id: "__string", # required
# },
# encryption_in_transit: {
# client_broker: "TLS", # accepts TLS, TLS_PLAINTEXT, PLAINTEXT
# in_cluster: false,
# },
# },
# enhanced_monitoring: "DEFAULT", # accepts DEFAULT, PER_BROKER, PER_TOPIC_PER_BROKER
# kafka_version: "__stringMin1Max128", # required
# logging_info: {
# broker_logs: { # required
# cloud_watch_logs: {
# enabled: false, # required
# log_group: "__string",
# },
# firehose: {
# delivery_stream: "__string",
# enabled: false, # required
# },
# s3: {
# bucket: "__string",
# enabled: false, # required
# prefix: "__string",
# },
# },
# },
# number_of_broker_nodes: 1, # required
# open_monitoring: {
# prometheus: { # required
# jmx_exporter: {
# enabled_in_broker: false, # required
# },
# node_exporter: {
# enabled_in_broker: false, # required
# },
# },
# },
# tags: {
# "__string" => "__string",
# },
# })
#
# @example Response structure
#
# resp.cluster_arn #=> String
# resp.cluster_name #=> String
# resp.state #=> String, one of "ACTIVE", "CREATING", "UPDATING", "DELETING", "FAILED"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/CreateCluster AWS API Documentation
#
# @overload create_cluster(params = {})
# @param [Hash] params ({})
def create_cluster(params = {}, options = {})
req = build_request(:create_cluster, params)
req.send_request(options)
end
# Creates a new MSK configuration.
#
# @option params [String] :description
# The description of the configuration.
#
# @option params [Array<String>] :kafka_versions
# The versions of Apache Kafka with which you can use this MSK
# configuration.
#
# @option params [required, String] :name
# The name of the configuration. Configuration names are strings that
# match the regex "^\[0-9A-Za-z-\]+$".
#
# @option params [required, String, StringIO, File] :server_properties
#
# @return [Types::CreateConfigurationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateConfigurationResponse#arn #arn} => String
# * {Types::CreateConfigurationResponse#creation_time #creation_time} => Time
# * {Types::CreateConfigurationResponse#latest_revision #latest_revision} => Types::ConfigurationRevision
# * {Types::CreateConfigurationResponse#name #name} => String
# * {Types::CreateConfigurationResponse#state #state} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_configuration({
# description: "__string",
# kafka_versions: ["__string"],
# name: "__string", # required
# server_properties: "data", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.creation_time #=> Time
# resp.latest_revision.creation_time #=> Time
# resp.latest_revision.description #=> String
# resp.latest_revision.revision #=> Integer
# resp.name #=> String
# resp.state #=> String, one of "ACTIVE", "DELETING", "DELETE_FAILED"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/CreateConfiguration AWS API Documentation
#
# @overload create_configuration(params = {})
# @param [Hash] params ({})
def create_configuration(params = {}, options = {})
req = build_request(:create_configuration, params)
req.send_request(options)
end
# Deletes the MSK cluster specified by the Amazon Resource Name (ARN) in
# the request.
#
# @option params [required, String] :cluster_arn
#
# @option params [String] :current_version
#
# @return [Types::DeleteClusterResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteClusterResponse#cluster_arn #cluster_arn} => String
# * {Types::DeleteClusterResponse#state #state} => String
#
# @example Request syntax with placeholder values
#
# resp = client.delete_cluster({
# cluster_arn: "__string", # required
# current_version: "__string",
# })
#
# @example Response structure
#
# resp.cluster_arn #=> String
# resp.state #=> String, one of "ACTIVE", "CREATING", "UPDATING", "DELETING", "FAILED"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/DeleteCluster AWS API Documentation
#
# @overload delete_cluster(params = {})
# @param [Hash] params ({})
def delete_cluster(params = {}, options = {})
req = build_request(:delete_cluster, params)
req.send_request(options)
end
# Deletes the specified MSK configuration. The configuration must be in
# the ACTIVE or DELETE\_FAILED state.
#
# @option params [required, String] :arn
# The Amazon Resource Name (ARN) of the configuration.
#
# @return [Types::DeleteConfigurationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteConfigurationResponse#arn #arn} => String
# * {Types::DeleteConfigurationResponse#state #state} => String
#
# @example Request syntax with placeholder values
#
# resp = client.delete_configuration({
# arn: "__string", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.state #=> String, one of "ACTIVE", "DELETING", "DELETE_FAILED"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/DeleteConfiguration AWS API Documentation
#
# @overload delete_configuration(params = {})
# @param [Hash] params ({})
def delete_configuration(params = {}, options = {})
req = build_request(:delete_configuration, params)
req.send_request(options)
end
# Returns a description of the MSK cluster whose Amazon Resource Name
# (ARN) is specified in the request.
#
# @option params [required, String] :cluster_arn
#
# @return [Types::DescribeClusterResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeClusterResponse#cluster_info #cluster_info} => Types::ClusterInfo
#
# @example Request syntax with placeholder values
#
# resp = client.describe_cluster({
# cluster_arn: "__string", # required
# })
#
# @example Response structure
#
# resp.cluster_info.active_operation_arn #=> String
# resp.cluster_info.broker_node_group_info.broker_az_distribution #=> String, one of "DEFAULT"
# resp.cluster_info.broker_node_group_info.client_subnets #=> Array
# resp.cluster_info.broker_node_group_info.client_subnets[0] #=> String
# resp.cluster_info.broker_node_group_info.instance_type #=> String
# resp.cluster_info.broker_node_group_info.security_groups #=> Array
# resp.cluster_info.broker_node_group_info.security_groups[0] #=> String
# resp.cluster_info.broker_node_group_info.storage_info.ebs_storage_info.volume_size #=> Integer
# resp.cluster_info.client_authentication.tls.certificate_authority_arn_list #=> Array
# resp.cluster_info.client_authentication.tls.certificate_authority_arn_list[0] #=> String
# resp.cluster_info.cluster_arn #=> String
# resp.cluster_info.cluster_name #=> String
# resp.cluster_info.creation_time #=> Time
# resp.cluster_info.current_broker_software_info.configuration_arn #=> String
# resp.cluster_info.current_broker_software_info.configuration_revision #=> Integer
# resp.cluster_info.current_broker_software_info.kafka_version #=> String
# resp.cluster_info.logging_info.broker_logs.cloud_watch_logs.enabled #=> Boolean
# resp.cluster_info.logging_info.broker_logs.cloud_watch_logs.log_group #=> String
# resp.cluster_info.logging_info.broker_logs.firehose.delivery_stream #=> String
# resp.cluster_info.logging_info.broker_logs.firehose.enabled #=> Boolean
# resp.cluster_info.logging_info.broker_logs.s3.bucket #=> String
# resp.cluster_info.logging_info.broker_logs.s3.enabled #=> Boolean
# resp.cluster_info.logging_info.broker_logs.s3.prefix #=> String
# resp.cluster_info.current_version #=> String
# resp.cluster_info.encryption_info.encryption_at_rest.data_volume_kms_key_id #=> String
# resp.cluster_info.encryption_info.encryption_in_transit.client_broker #=> String, one of "TLS", "TLS_PLAINTEXT", "PLAINTEXT"
# resp.cluster_info.encryption_info.encryption_in_transit.in_cluster #=> Boolean
# resp.cluster_info.enhanced_monitoring #=> String, one of "DEFAULT", "PER_BROKER", "PER_TOPIC_PER_BROKER"
# resp.cluster_info.number_of_broker_nodes #=> Integer
# resp.cluster_info.open_monitoring.prometheus.jmx_exporter.enabled_in_broker #=> Boolean
# resp.cluster_info.open_monitoring.prometheus.node_exporter.enabled_in_broker #=> Boolean
# resp.cluster_info.state #=> String, one of "ACTIVE", "CREATING", "UPDATING", "DELETING", "FAILED"
# resp.cluster_info.tags #=> Hash
# resp.cluster_info.tags["__string"] #=> String
# resp.cluster_info.zookeeper_connect_string #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/DescribeCluster AWS API Documentation
#
# @overload describe_cluster(params = {})
# @param [Hash] params ({})
def describe_cluster(params = {}, options = {})
req = build_request(:describe_cluster, params)
req.send_request(options)
end
# Returns a description of the cluster operation specified by the ARN.
#
# @option params [required, String] :cluster_operation_arn
#
# @return [Types::DescribeClusterOperationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeClusterOperationResponse#cluster_operation_info #cluster_operation_info} => Types::ClusterOperationInfo
#
# @example Request syntax with placeholder values
#
# resp = client.describe_cluster_operation({
# cluster_operation_arn: "__string", # required
# })
#
# @example Response structure
#
# resp.cluster_operation_info.client_request_id #=> String
# resp.cluster_operation_info.cluster_arn #=> String
# resp.cluster_operation_info.creation_time #=> Time
# resp.cluster_operation_info.end_time #=> Time
# resp.cluster_operation_info.error_info.error_code #=> String
# resp.cluster_operation_info.error_info.error_string #=> String
# resp.cluster_operation_info.operation_steps #=> Array
# resp.cluster_operation_info.operation_steps[0].step_info.step_status #=> String
# resp.cluster_operation_info.operation_steps[0].step_name #=> String
# resp.cluster_operation_info.operation_arn #=> String
# resp.cluster_operation_info.operation_state #=> String
# resp.cluster_operation_info.operation_type #=> String
# resp.cluster_operation_info.source_cluster_info.broker_ebs_volume_info #=> Array
# resp.cluster_operation_info.source_cluster_info.broker_ebs_volume_info[0].kafka_broker_node_id #=> String
# resp.cluster_operation_info.source_cluster_info.broker_ebs_volume_info[0].volume_size_gb #=> Integer
# resp.cluster_operation_info.source_cluster_info.configuration_info.arn #=> String
# resp.cluster_operation_info.source_cluster_info.configuration_info.revision #=> Integer
# resp.cluster_operation_info.source_cluster_info.number_of_broker_nodes #=> Integer
# resp.cluster_operation_info.source_cluster_info.open_monitoring.prometheus.jmx_exporter.enabled_in_broker #=> Boolean
# resp.cluster_operation_info.source_cluster_info.open_monitoring.prometheus.node_exporter.enabled_in_broker #=> Boolean
# resp.cluster_operation_info.source_cluster_info.enhanced_monitoring #=> String, one of "DEFAULT", "PER_BROKER", "PER_TOPIC_PER_BROKER"
# resp.cluster_operation_info.source_cluster_info.kafka_version #=> String
# resp.cluster_operation_info.source_cluster_info.logging_info.broker_logs.cloud_watch_logs.enabled #=> Boolean
# resp.cluster_operation_info.source_cluster_info.logging_info.broker_logs.cloud_watch_logs.log_group #=> String
# resp.cluster_operation_info.source_cluster_info.logging_info.broker_logs.firehose.delivery_stream #=> String
# resp.cluster_operation_info.source_cluster_info.logging_info.broker_logs.firehose.enabled #=> Boolean
# resp.cluster_operation_info.source_cluster_info.logging_info.broker_logs.s3.bucket #=> String
# resp.cluster_operation_info.source_cluster_info.logging_info.broker_logs.s3.enabled #=> Boolean
# resp.cluster_operation_info.source_cluster_info.logging_info.broker_logs.s3.prefix #=> String
# resp.cluster_operation_info.target_cluster_info.broker_ebs_volume_info #=> Array
# resp.cluster_operation_info.target_cluster_info.broker_ebs_volume_info[0].kafka_broker_node_id #=> String
# resp.cluster_operation_info.target_cluster_info.broker_ebs_volume_info[0].volume_size_gb #=> Integer
# resp.cluster_operation_info.target_cluster_info.configuration_info.arn #=> String
# resp.cluster_operation_info.target_cluster_info.configuration_info.revision #=> Integer
# resp.cluster_operation_info.target_cluster_info.number_of_broker_nodes #=> Integer
# resp.cluster_operation_info.target_cluster_info.open_monitoring.prometheus.jmx_exporter.enabled_in_broker #=> Boolean
# resp.cluster_operation_info.target_cluster_info.open_monitoring.prometheus.node_exporter.enabled_in_broker #=> Boolean
# resp.cluster_operation_info.target_cluster_info.enhanced_monitoring #=> String, one of "DEFAULT", "PER_BROKER", "PER_TOPIC_PER_BROKER"
# resp.cluster_operation_info.target_cluster_info.kafka_version #=> String
# resp.cluster_operation_info.target_cluster_info.logging_info.broker_logs.cloud_watch_logs.enabled #=> Boolean
# resp.cluster_operation_info.target_cluster_info.logging_info.broker_logs.cloud_watch_logs.log_group #=> String
# resp.cluster_operation_info.target_cluster_info.logging_info.broker_logs.firehose.delivery_stream #=> String
# resp.cluster_operation_info.target_cluster_info.logging_info.broker_logs.firehose.enabled #=> Boolean
# resp.cluster_operation_info.target_cluster_info.logging_info.broker_logs.s3.bucket #=> String
# resp.cluster_operation_info.target_cluster_info.logging_info.broker_logs.s3.enabled #=> Boolean
# resp.cluster_operation_info.target_cluster_info.logging_info.broker_logs.s3.prefix #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/DescribeClusterOperation AWS API Documentation
#
# @overload describe_cluster_operation(params = {})
# @param [Hash] params ({})
def describe_cluster_operation(params = {}, options = {})
req = build_request(:describe_cluster_operation, params)
req.send_request(options)
end
# Returns a description of this MSK configuration.
#
# @option params [required, String] :arn
#
# @return [Types::DescribeConfigurationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeConfigurationResponse#arn #arn} => String
# * {Types::DescribeConfigurationResponse#creation_time #creation_time} => Time
# * {Types::DescribeConfigurationResponse#description #description} => String
# * {Types::DescribeConfigurationResponse#kafka_versions #kafka_versions} => Array<String>
# * {Types::DescribeConfigurationResponse#latest_revision #latest_revision} => Types::ConfigurationRevision
# * {Types::DescribeConfigurationResponse#name #name} => String
# * {Types::DescribeConfigurationResponse#state #state} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_configuration({
# arn: "__string", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.creation_time #=> Time
# resp.description #=> String
# resp.kafka_versions #=> Array
# resp.kafka_versions[0] #=> String
# resp.latest_revision.creation_time #=> Time
# resp.latest_revision.description #=> String
# resp.latest_revision.revision #=> Integer
# resp.name #=> String
# resp.state #=> String, one of "ACTIVE", "DELETING", "DELETE_FAILED"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/DescribeConfiguration AWS API Documentation
#
# @overload describe_configuration(params = {})
# @param [Hash] params ({})
def describe_configuration(params = {}, options = {})
req = build_request(:describe_configuration, params)
req.send_request(options)
end
# Returns a description of this revision of the configuration.
#
# @option params [required, String] :arn
#
# @option params [required, Integer] :revision
#
# @return [Types::DescribeConfigurationRevisionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeConfigurationRevisionResponse#arn #arn} => String
# * {Types::DescribeConfigurationRevisionResponse#creation_time #creation_time} => Time
# * {Types::DescribeConfigurationRevisionResponse#description #description} => String
# * {Types::DescribeConfigurationRevisionResponse#revision #revision} => Integer
# * {Types::DescribeConfigurationRevisionResponse#server_properties #server_properties} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_configuration_revision({
# arn: "__string", # required
# revision: 1, # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.creation_time #=> Time
# resp.description #=> String
# resp.revision #=> Integer
# resp.server_properties #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/DescribeConfigurationRevision AWS API Documentation
#
# @overload describe_configuration_revision(params = {})
# @param [Hash] params ({})
def describe_configuration_revision(params = {}, options = {})
req = build_request(:describe_configuration_revision, params)
req.send_request(options)
end
# A list of brokers that a client application can use to bootstrap.
#
# @option params [required, String] :cluster_arn
#
# @return [Types::GetBootstrapBrokersResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetBootstrapBrokersResponse#bootstrap_broker_string #bootstrap_broker_string} => String
# * {Types::GetBootstrapBrokersResponse#bootstrap_broker_string_tls #bootstrap_broker_string_tls} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_bootstrap_brokers({
# cluster_arn: "__string", # required
# })
#
# @example Response structure
#
# resp.bootstrap_broker_string #=> String
# resp.bootstrap_broker_string_tls #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/GetBootstrapBrokers AWS API Documentation
#
# @overload get_bootstrap_brokers(params = {})
# @param [Hash] params ({})
def get_bootstrap_brokers(params = {}, options = {})
req = build_request(:get_bootstrap_brokers, params)
req.send_request(options)
end
# Gets the Apache Kafka versions to which you can update the MSK
# cluster.
#
# @option params [String] :cluster_arn
#
# @return [Types::GetCompatibleKafkaVersionsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetCompatibleKafkaVersionsResponse#compatible_kafka_versions #compatible_kafka_versions} => Array<Types::CompatibleKafkaVersion>
#
# @example Request syntax with placeholder values
#
# resp = client.get_compatible_kafka_versions({
# cluster_arn: "__string",
# })
#
# @example Response structure
#
# resp.compatible_kafka_versions #=> Array
# resp.compatible_kafka_versions[0].source_version #=> String
# resp.compatible_kafka_versions[0].target_versions #=> Array
# resp.compatible_kafka_versions[0].target_versions[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/GetCompatibleKafkaVersions AWS API Documentation
#
# @overload get_compatible_kafka_versions(params = {})
# @param [Hash] params ({})
def get_compatible_kafka_versions(params = {}, options = {})
req = build_request(:get_compatible_kafka_versions, params)
req.send_request(options)
end
# Returns a list of all the operations that have been performed on the
# specified MSK cluster.
#
# @option params [required, String] :cluster_arn
#
# @option params [Integer] :max_results
#
# @option params [String] :next_token
#
# @return [Types::ListClusterOperationsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListClusterOperationsResponse#cluster_operation_info_list #cluster_operation_info_list} => Array<Types::ClusterOperationInfo>
# * {Types::ListClusterOperationsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_cluster_operations({
# cluster_arn: "__string", # required
# max_results: 1,
# next_token: "__string",
# })
#
# @example Response structure
#
# resp.cluster_operation_info_list #=> Array
# resp.cluster_operation_info_list[0].client_request_id #=> String
# resp.cluster_operation_info_list[0].cluster_arn #=> String
# resp.cluster_operation_info_list[0].creation_time #=> Time
# resp.cluster_operation_info_list[0].end_time #=> Time
# resp.cluster_operation_info_list[0].error_info.error_code #=> String
# resp.cluster_operation_info_list[0].error_info.error_string #=> String
# resp.cluster_operation_info_list[0].operation_steps #=> Array
# resp.cluster_operation_info_list[0].operation_steps[0].step_info.step_status #=> String
# resp.cluster_operation_info_list[0].operation_steps[0].step_name #=> String
# resp.cluster_operation_info_list[0].operation_arn #=> String
# resp.cluster_operation_info_list[0].operation_state #=> String
# resp.cluster_operation_info_list[0].operation_type #=> String
# resp.cluster_operation_info_list[0].source_cluster_info.broker_ebs_volume_info #=> Array
# resp.cluster_operation_info_list[0].source_cluster_info.broker_ebs_volume_info[0].kafka_broker_node_id #=> String
# resp.cluster_operation_info_list[0].source_cluster_info.broker_ebs_volume_info[0].volume_size_gb #=> Integer
# resp.cluster_operation_info_list[0].source_cluster_info.configuration_info.arn #=> String
# resp.cluster_operation_info_list[0].source_cluster_info.configuration_info.revision #=> Integer
# resp.cluster_operation_info_list[0].source_cluster_info.number_of_broker_nodes #=> Integer
# resp.cluster_operation_info_list[0].source_cluster_info.open_monitoring.prometheus.jmx_exporter.enabled_in_broker #=> Boolean
# resp.cluster_operation_info_list[0].source_cluster_info.open_monitoring.prometheus.node_exporter.enabled_in_broker #=> Boolean
# resp.cluster_operation_info_list[0].source_cluster_info.enhanced_monitoring #=> String, one of "DEFAULT", "PER_BROKER", "PER_TOPIC_PER_BROKER"
# resp.cluster_operation_info_list[0].source_cluster_info.kafka_version #=> String
# resp.cluster_operation_info_list[0].source_cluster_info.logging_info.broker_logs.cloud_watch_logs.enabled #=> Boolean
# resp.cluster_operation_info_list[0].source_cluster_info.logging_info.broker_logs.cloud_watch_logs.log_group #=> String
# resp.cluster_operation_info_list[0].source_cluster_info.logging_info.broker_logs.firehose.delivery_stream #=> String
# resp.cluster_operation_info_list[0].source_cluster_info.logging_info.broker_logs.firehose.enabled #=> Boolean
# resp.cluster_operation_info_list[0].source_cluster_info.logging_info.broker_logs.s3.bucket #=> String
# resp.cluster_operation_info_list[0].source_cluster_info.logging_info.broker_logs.s3.enabled #=> Boolean
# resp.cluster_operation_info_list[0].source_cluster_info.logging_info.broker_logs.s3.prefix #=> String
# resp.cluster_operation_info_list[0].target_cluster_info.broker_ebs_volume_info #=> Array
# resp.cluster_operation_info_list[0].target_cluster_info.broker_ebs_volume_info[0].kafka_broker_node_id #=> String
# resp.cluster_operation_info_list[0].target_cluster_info.broker_ebs_volume_info[0].volume_size_gb #=> Integer
# resp.cluster_operation_info_list[0].target_cluster_info.configuration_info.arn #=> String
# resp.cluster_operation_info_list[0].target_cluster_info.configuration_info.revision #=> Integer
# resp.cluster_operation_info_list[0].target_cluster_info.number_of_broker_nodes #=> Integer
# resp.cluster_operation_info_list[0].target_cluster_info.open_monitoring.prometheus.jmx_exporter.enabled_in_broker #=> Boolean
# resp.cluster_operation_info_list[0].target_cluster_info.open_monitoring.prometheus.node_exporter.enabled_in_broker #=> Boolean
# resp.cluster_operation_info_list[0].target_cluster_info.enhanced_monitoring #=> String, one of "DEFAULT", "PER_BROKER", "PER_TOPIC_PER_BROKER"
# resp.cluster_operation_info_list[0].target_cluster_info.kafka_version #=> String
# resp.cluster_operation_info_list[0].target_cluster_info.logging_info.broker_logs.cloud_watch_logs.enabled #=> Boolean
# resp.cluster_operation_info_list[0].target_cluster_info.logging_info.broker_logs.cloud_watch_logs.log_group #=> String
# resp.cluster_operation_info_list[0].target_cluster_info.logging_info.broker_logs.firehose.delivery_stream #=> String
# resp.cluster_operation_info_list[0].target_cluster_info.logging_info.broker_logs.firehose.enabled #=> Boolean
# resp.cluster_operation_info_list[0].target_cluster_info.logging_info.broker_logs.s3.bucket #=> String
# resp.cluster_operation_info_list[0].target_cluster_info.logging_info.broker_logs.s3.enabled #=> Boolean
# resp.cluster_operation_info_list[0].target_cluster_info.logging_info.broker_logs.s3.prefix #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/ListClusterOperations AWS API Documentation
#
# @overload list_cluster_operations(params = {})
# @param [Hash] params ({})
def list_cluster_operations(params = {}, options = {})
req = build_request(:list_cluster_operations, params)
req.send_request(options)
end
# Returns a list of all the MSK clusters in the current Region.
#
# @option params [String] :cluster_name_filter
#
# @option params [Integer] :max_results
#
# @option params [String] :next_token
#
# @return [Types::ListClustersResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListClustersResponse#cluster_info_list #cluster_info_list} => Array<Types::ClusterInfo>
# * {Types::ListClustersResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_clusters({
# cluster_name_filter: "__string",
# max_results: 1,
# next_token: "__string",
# })
#
# @example Response structure
#
# resp.cluster_info_list #=> Array
# resp.cluster_info_list[0].active_operation_arn #=> String
# resp.cluster_info_list[0].broker_node_group_info.broker_az_distribution #=> String, one of "DEFAULT"
# resp.cluster_info_list[0].broker_node_group_info.client_subnets #=> Array
# resp.cluster_info_list[0].broker_node_group_info.client_subnets[0] #=> String
# resp.cluster_info_list[0].broker_node_group_info.instance_type #=> String
# resp.cluster_info_list[0].broker_node_group_info.security_groups #=> Array
# resp.cluster_info_list[0].broker_node_group_info.security_groups[0] #=> String
# resp.cluster_info_list[0].broker_node_group_info.storage_info.ebs_storage_info.volume_size #=> Integer
# resp.cluster_info_list[0].client_authentication.tls.certificate_authority_arn_list #=> Array
# resp.cluster_info_list[0].client_authentication.tls.certificate_authority_arn_list[0] #=> String
# resp.cluster_info_list[0].cluster_arn #=> String
# resp.cluster_info_list[0].cluster_name #=> String
# resp.cluster_info_list[0].creation_time #=> Time
# resp.cluster_info_list[0].current_broker_software_info.configuration_arn #=> String
# resp.cluster_info_list[0].current_broker_software_info.configuration_revision #=> Integer
# resp.cluster_info_list[0].current_broker_software_info.kafka_version #=> String
# resp.cluster_info_list[0].logging_info.broker_logs.cloud_watch_logs.enabled #=> Boolean
# resp.cluster_info_list[0].logging_info.broker_logs.cloud_watch_logs.log_group #=> String
# resp.cluster_info_list[0].logging_info.broker_logs.firehose.delivery_stream #=> String
# resp.cluster_info_list[0].logging_info.broker_logs.firehose.enabled #=> Boolean
# resp.cluster_info_list[0].logging_info.broker_logs.s3.bucket #=> String
# resp.cluster_info_list[0].logging_info.broker_logs.s3.enabled #=> Boolean
# resp.cluster_info_list[0].logging_info.broker_logs.s3.prefix #=> String
# resp.cluster_info_list[0].current_version #=> String
# resp.cluster_info_list[0].encryption_info.encryption_at_rest.data_volume_kms_key_id #=> String
# resp.cluster_info_list[0].encryption_info.encryption_in_transit.client_broker #=> String, one of "TLS", "TLS_PLAINTEXT", "PLAINTEXT"
# resp.cluster_info_list[0].encryption_info.encryption_in_transit.in_cluster #=> Boolean
# resp.cluster_info_list[0].enhanced_monitoring #=> String, one of "DEFAULT", "PER_BROKER", "PER_TOPIC_PER_BROKER"
# resp.cluster_info_list[0].number_of_broker_nodes #=> Integer
# resp.cluster_info_list[0].open_monitoring.prometheus.jmx_exporter.enabled_in_broker #=> Boolean
# resp.cluster_info_list[0].open_monitoring.prometheus.node_exporter.enabled_in_broker #=> Boolean
# resp.cluster_info_list[0].state #=> String, one of "ACTIVE", "CREATING", "UPDATING", "DELETING", "FAILED"
# resp.cluster_info_list[0].tags #=> Hash
# resp.cluster_info_list[0].tags["__string"] #=> String
# resp.cluster_info_list[0].zookeeper_connect_string #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/ListClusters AWS API Documentation
#
# @overload list_clusters(params = {})
# @param [Hash] params ({})
def list_clusters(params = {}, options = {})
req = build_request(:list_clusters, params)
req.send_request(options)
end
# Returns a list of all the revisions of an MSK configuration.
#
# @option params [required, String] :arn
#
# @option params [Integer] :max_results
#
# @option params [String] :next_token
#
# @return [Types::ListConfigurationRevisionsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListConfigurationRevisionsResponse#next_token #next_token} => String
# * {Types::ListConfigurationRevisionsResponse#revisions #revisions} => Array<Types::ConfigurationRevision>
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_configuration_revisions({
# arn: "__string", # required
# max_results: 1,
# next_token: "__string",
# })
#
# @example Response structure
#
# resp.next_token #=> String
# resp.revisions #=> Array
# resp.revisions[0].creation_time #=> Time
# resp.revisions[0].description #=> String
# resp.revisions[0].revision #=> Integer
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/ListConfigurationRevisions AWS API Documentation
#
# @overload list_configuration_revisions(params = {})
# @param [Hash] params ({})
def list_configuration_revisions(params = {}, options = {})
req = build_request(:list_configuration_revisions, params)
req.send_request(options)
end
# Returns a list of all the MSK configurations in this Region.
#
# @option params [Integer] :max_results
#
# @option params [String] :next_token
#
# @return [Types::ListConfigurationsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListConfigurationsResponse#configurations #configurations} => Array<Types::Configuration>
# * {Types::ListConfigurationsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_configurations({
# max_results: 1,
# next_token: "__string",
# })
#
# @example Response structure
#
# resp.configurations #=> Array
# resp.configurations[0].arn #=> String
# resp.configurations[0].creation_time #=> Time
# resp.configurations[0].description #=> String
# resp.configurations[0].kafka_versions #=> Array
# resp.configurations[0].kafka_versions[0] #=> String
# resp.configurations[0].latest_revision.creation_time #=> Time
# resp.configurations[0].latest_revision.description #=> String
# resp.configurations[0].latest_revision.revision #=> Integer
# resp.configurations[0].name #=> String
# resp.configurations[0].state #=> String, one of "ACTIVE", "DELETING", "DELETE_FAILED"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/ListConfigurations AWS API Documentation
#
# @overload list_configurations(params = {})
# @param [Hash] params ({})
def list_configurations(params = {}, options = {})
req = build_request(:list_configurations, params)
req.send_request(options)
end
# Returns a list of Kafka versions.
#
# @option params [Integer] :max_results
#
# @option params [String] :next_token
#
# @return [Types::ListKafkaVersionsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListKafkaVersionsResponse#kafka_versions #kafka_versions} => Array<Types::KafkaVersion>
# * {Types::ListKafkaVersionsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_kafka_versions({
# max_results: 1,
# next_token: "__string",
# })
#
# @example Response structure
#
# resp.kafka_versions #=> Array
# resp.kafka_versions[0].version #=> String
# resp.kafka_versions[0].status #=> String, one of "ACTIVE", "DEPRECATED"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/ListKafkaVersions AWS API Documentation
#
# @overload list_kafka_versions(params = {})
# @param [Hash] params ({})
def list_kafka_versions(params = {}, options = {})
req = build_request(:list_kafka_versions, params)
req.send_request(options)
end
# Returns a list of the broker nodes in the cluster.
#
# @option params [required, String] :cluster_arn
#
# @option params [Integer] :max_results
#
# @option params [String] :next_token
#
# @return [Types::ListNodesResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListNodesResponse#next_token #next_token} => String
# * {Types::ListNodesResponse#node_info_list #node_info_list} => Array<Types::NodeInfo>
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_nodes({
# cluster_arn: "__string", # required
# max_results: 1,
# next_token: "__string",
# })
#
# @example Response structure
#
# resp.next_token #=> String
# resp.node_info_list #=> Array
# resp.node_info_list[0].added_to_cluster_time #=> String
# resp.node_info_list[0].broker_node_info.attached_eni_id #=> String
# resp.node_info_list[0].broker_node_info.broker_id #=> Float
# resp.node_info_list[0].broker_node_info.client_subnet #=> String
# resp.node_info_list[0].broker_node_info.client_vpc_ip_address #=> String
# resp.node_info_list[0].broker_node_info.current_broker_software_info.configuration_arn #=> String
# resp.node_info_list[0].broker_node_info.current_broker_software_info.configuration_revision #=> Integer
# resp.node_info_list[0].broker_node_info.current_broker_software_info.kafka_version #=> String
# resp.node_info_list[0].broker_node_info.endpoints #=> Array
# resp.node_info_list[0].broker_node_info.endpoints[0] #=> String
# resp.node_info_list[0].instance_type #=> String
# resp.node_info_list[0].node_arn #=> String
# resp.node_info_list[0].node_type #=> String, one of "BROKER"
# resp.node_info_list[0].zookeeper_node_info.attached_eni_id #=> String
# resp.node_info_list[0].zookeeper_node_info.client_vpc_ip_address #=> String
# resp.node_info_list[0].zookeeper_node_info.endpoints #=> Array
# resp.node_info_list[0].zookeeper_node_info.endpoints[0] #=> String
# resp.node_info_list[0].zookeeper_node_info.zookeeper_id #=> Float
# resp.node_info_list[0].zookeeper_node_info.zookeeper_version #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/ListNodes AWS API Documentation
#
# @overload list_nodes(params = {})
# @param [Hash] params ({})
def list_nodes(params = {}, options = {})
req = build_request(:list_nodes, params)
req.send_request(options)
end
# Returns a list of the tags associated with the specified resource.
#
# @option params [required, String] :resource_arn
#
# @return [Types::ListTagsForResourceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListTagsForResourceResponse#tags #tags} => Hash<String,String>
#
# @example Request syntax with placeholder values
#
# resp = client.list_tags_for_resource({
# resource_arn: "__string", # required
# })
#
# @example Response structure
#
# resp.tags #=> Hash
# resp.tags["__string"] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/ListTagsForResource AWS API Documentation
#
# @overload list_tags_for_resource(params = {})
# @param [Hash] params ({})
def list_tags_for_resource(params = {}, options = {})
req = build_request(:list_tags_for_resource, params)
req.send_request(options)
end
# Executes a reboot on a broker.
#
# @option params [required, Array<String>] :broker_ids
# The list of broker ids to be rebooted.
#
# @option params [required, String] :cluster_arn
#
# @return [Types::RebootBrokerResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::RebootBrokerResponse#cluster_arn #cluster_arn} => String
# * {Types::RebootBrokerResponse#cluster_operation_arn #cluster_operation_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.reboot_broker({
# broker_ids: ["__string"], # required
# cluster_arn: "__string", # required
# })
#
# @example Response structure
#
# resp.cluster_arn #=> String
# resp.cluster_operation_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/RebootBroker AWS API Documentation
#
# @overload reboot_broker(params = {})
# @param [Hash] params ({})
def reboot_broker(params = {}, options = {})
req = build_request(:reboot_broker, params)
req.send_request(options)
end
# Adds tags to the specified MSK resource.
#
# @option params [required, String] :resource_arn
#
# @option params [required, Hash<String,String>] :tags
# The key-value pair for the resource tag.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.tag_resource({
# resource_arn: "__string", # required
# tags: { # required
# "__string" => "__string",
# },
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/TagResource AWS API Documentation
#
# @overload tag_resource(params = {})
# @param [Hash] params ({})
def tag_resource(params = {}, options = {})
req = build_request(:tag_resource, params)
req.send_request(options)
end
# Removes the tags associated with the keys that are provided in the
# query.
#
# @option params [required, String] :resource_arn
#
# @option params [required, Array<String>] :tag_keys
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.untag_resource({
# resource_arn: "__string", # required
# tag_keys: ["__string"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/UntagResource AWS API Documentation
#
# @overload untag_resource(params = {})
# @param [Hash] params ({})
def untag_resource(params = {}, options = {})
req = build_request(:untag_resource, params)
req.send_request(options)
end
# Updates the number of broker nodes in the cluster. You can use this
# operation to increase the number of brokers in an existing cluster.
# You can't decrease the number of brokers.
#
# @option params [required, String] :cluster_arn
#
# @option params [required, String] :current_version
# The current version of the cluster.
#
# @option params [required, Integer] :target_number_of_broker_nodes
# The number of broker nodes that you want the cluster to have after
# this operation completes successfully.
#
# @return [Types::UpdateBrokerCountResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateBrokerCountResponse#cluster_arn #cluster_arn} => String
# * {Types::UpdateBrokerCountResponse#cluster_operation_arn #cluster_operation_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.update_broker_count({
# cluster_arn: "__string", # required
# current_version: "__string", # required
# target_number_of_broker_nodes: 1, # required
# })
#
# @example Response structure
#
# resp.cluster_arn #=> String
# resp.cluster_operation_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/UpdateBrokerCount AWS API Documentation
#
# @overload update_broker_count(params = {})
# @param [Hash] params ({})
def update_broker_count(params = {}, options = {})
req = build_request(:update_broker_count, params)
req.send_request(options)
end
# Updates the EBS storage associated with MSK brokers.
#
# @option params [required, String] :cluster_arn
#
# @option params [required, String] :current_version
# The version of cluster to update from. A successful operation will
# then generate a new version.
#
# @option params [required, Array<Types::BrokerEBSVolumeInfo>] :target_broker_ebs_volume_info
# Describes the target volume size and the ID of the broker to apply the
# update to.
#
# The value you specify for Target-Volume-in-GiB must be a whole number
# that is greater than 100 GiB.
#
# The storage per broker after the update operation can't exceed 16384
# GiB.
#
# @return [Types::UpdateBrokerStorageResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateBrokerStorageResponse#cluster_arn #cluster_arn} => String
# * {Types::UpdateBrokerStorageResponse#cluster_operation_arn #cluster_operation_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.update_broker_storage({
# cluster_arn: "__string", # required
# current_version: "__string", # required
# target_broker_ebs_volume_info: [ # required
# {
# kafka_broker_node_id: "__string", # required
# volume_size_gb: 1, # required
# },
# ],
# })
#
# @example Response structure
#
# resp.cluster_arn #=> String
# resp.cluster_operation_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/UpdateBrokerStorage AWS API Documentation
#
# @overload update_broker_storage(params = {})
# @param [Hash] params ({})
def update_broker_storage(params = {}, options = {})
req = build_request(:update_broker_storage, params)
req.send_request(options)
end
# Updates an existing MSK configuration. The configuration must be in
# the Active state.
#
# @option params [required, String] :arn
# The Amazon Resource Name (ARN) of the configuration.
#
# @option params [String] :description
# The description of the configuration.
#
# @option params [required, String, StringIO, File] :server_properties
#
# @return [Types::UpdateConfigurationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateConfigurationResponse#arn #arn} => String
# * {Types::UpdateConfigurationResponse#latest_revision #latest_revision} => Types::ConfigurationRevision
#
# @example Request syntax with placeholder values
#
# resp = client.update_configuration({
# arn: "__string", # required
# description: "__string",
# server_properties: "data", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.latest_revision.creation_time #=> Time
# resp.latest_revision.description #=> String
# resp.latest_revision.revision #=> Integer
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/UpdateConfiguration AWS API Documentation
#
# @overload update_configuration(params = {})
# @param [Hash] params ({})
def update_configuration(params = {}, options = {})
req = build_request(:update_configuration, params)
req.send_request(options)
end
# Updates the cluster with the configuration that is specified in the
# request body.
#
# @option params [required, String] :cluster_arn
#
# @option params [required, Types::ConfigurationInfo] :configuration_info
# Represents the configuration that you want MSK to use for the cluster.
#
# @option params [required, String] :current_version
# The version of the cluster that you want to update.
#
# @return [Types::UpdateClusterConfigurationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateClusterConfigurationResponse#cluster_arn #cluster_arn} => String
# * {Types::UpdateClusterConfigurationResponse#cluster_operation_arn #cluster_operation_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.update_cluster_configuration({
# cluster_arn: "__string", # required
# configuration_info: { # required
# arn: "__string", # required
# revision: 1, # required
# },
# current_version: "__string", # required
# })
#
# @example Response structure
#
# resp.cluster_arn #=> String
# resp.cluster_operation_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/UpdateClusterConfiguration AWS API Documentation
#
# @overload update_cluster_configuration(params = {})
# @param [Hash] params ({})
def update_cluster_configuration(params = {}, options = {})
req = build_request(:update_cluster_configuration, params)
req.send_request(options)
end
# Updates the Apache Kafka version for the cluster.
#
# @option params [required, String] :cluster_arn
#
# @option params [Types::ConfigurationInfo] :configuration_info
# Specifies the configuration to use for the brokers.
#
# @option params [required, String] :current_version
# Current cluster version.
#
# @option params [required, String] :target_kafka_version
# Target Kafka version.
#
# @return [Types::UpdateClusterKafkaVersionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateClusterKafkaVersionResponse#cluster_arn #cluster_arn} => String
# * {Types::UpdateClusterKafkaVersionResponse#cluster_operation_arn #cluster_operation_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.update_cluster_kafka_version({
# cluster_arn: "__string", # required
# configuration_info: {
# arn: "__string", # required
# revision: 1, # required
# },
# current_version: "__string", # required
# target_kafka_version: "__string", # required
# })
#
# @example Response structure
#
# resp.cluster_arn #=> String
# resp.cluster_operation_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/UpdateClusterKafkaVersion AWS API Documentation
#
# @overload update_cluster_kafka_version(params = {})
# @param [Hash] params ({})
def update_cluster_kafka_version(params = {}, options = {})
req = build_request(:update_cluster_kafka_version, params)
req.send_request(options)
end
# Updates the monitoring settings for the cluster. You can use this
# operation to specify which Apache Kafka metrics you want Amazon MSK to
# send to Amazon CloudWatch. You can also specify settings for open
# monitoring with Prometheus.
#
# @option params [required, String] :cluster_arn
#
# @option params [required, String] :current_version
# The version of cluster to update from. A successful operation will
# then generate a new version.
#
# @option params [String] :enhanced_monitoring
# Specifies which Apache Kafka metrics Amazon MSK gathers and sends to
# Amazon CloudWatch for this cluster.
#
# @option params [Types::OpenMonitoringInfo] :open_monitoring
# The settings for open monitoring.
#
# @option params [Types::LoggingInfo] :logging_info
# LoggingInfo details.
#
# @return [Types::UpdateMonitoringResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateMonitoringResponse#cluster_arn #cluster_arn} => String
# * {Types::UpdateMonitoringResponse#cluster_operation_arn #cluster_operation_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.update_monitoring({
# cluster_arn: "__string", # required
# current_version: "__string", # required
# enhanced_monitoring: "DEFAULT", # accepts DEFAULT, PER_BROKER, PER_TOPIC_PER_BROKER
# open_monitoring: {
# prometheus: { # required
# jmx_exporter: {
# enabled_in_broker: false, # required
# },
# node_exporter: {
# enabled_in_broker: false, # required
# },
# },
# },
# logging_info: {
# broker_logs: { # required
# cloud_watch_logs: {
# enabled: false, # required
# log_group: "__string",
# },
# firehose: {
# delivery_stream: "__string",
# enabled: false, # required
# },
# s3: {
# bucket: "__string",
# enabled: false, # required
# prefix: "__string",
# },
# },
# },
# })
#
# @example Response structure
#
# resp.cluster_arn #=> String
# resp.cluster_operation_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/UpdateMonitoring AWS API Documentation
#
# @overload update_monitoring(params = {})
# @param [Hash] params ({})
def update_monitoring(params = {}, options = {})
req = build_request(:update_monitoring, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-kafka'
context[:gem_version] = '1.26.0'
Seahorse::Client::Request.new(handlers, context)
end
# @api private
# @deprecated
def waiter_names
[]
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 46.165325 | 165 | 0.682096 |
e92a590483c172de4882c5aa6cd6ce099ac3b68f
| 2,183 |
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require File.expand_path("../../spec_helper", __FILE__)
module Selenium
module WebDriver
module Android
describe Android do
let(:default_url) { URI.parse(Android::Bridge::DEFAULT_URL) }
let(:resp) { {"sessionId" => "foo", "value" => Remote::Capabilities.android.as_json }}
let(:http) { double(Remote::Http::Default, :call => resp).as_null_object }
it "uses the default Android driver URL" do
http.should_receive(:server_url=).with default_url
Bridge.new(:http_client => http)
end
it "uses the user-provided URL" do
http.should_receive(:server_url=).with URI.parse("http://example.com")
Bridge.new(:http_client => http, :url => "http://example.com")
end
it "uses the default HTTP client when none is specified" do
Remote::Http::Default.should_receive(:new).and_return http
Bridge.new
end
it 'takes desired capabilities' do
custom_caps = Remote::Capabilities.new
custom_caps['foo'] = 'bar'
expect(http).to receive(:call) do |_, _, payload|
payload[:desiredCapabilities]['foo'].should == 'bar'
resp
end
Bridge.new(:http_client => http, :desired_capabilities => custom_caps)
end
end
end # IPhone
end # WebDriver
end # Selenium
| 35.786885 | 101 | 0.662849 |
6ae3f65e8e7effd89d2cef99a91fde972c0a9521
| 347 |
class EnrollmentEventProcessingClient < Middleware::Builder
def initialize
super do |b|
b.use Handlers::EnrollmentEventReduceHandler
b.use Handlers::EnrollmentEventEnrichHandler
b.use Handlers::EnrollmentEventPersistHandler
b.use Handlers::EnrollmentEventPublishHandler
end
end
def stack
super
end
end
| 23.133333 | 59 | 0.752161 |
9184e595aed8cfd92764abb636f3ecf00cf066f0
| 408 |
cask 'chatology' do
version '1.1'
sha256 'cf5e9bf958b9cb62e3e194578a43d8c2d9c7bcb3138f0205c3b421f111566c73'
url "http://cdn.flexibits.com/Chatology_#{version}.zip"
appcast 'https://flexibits.com/chatology/appcast.php',
checkpoint: '8de7c950c0f46d3649bd3b0b78c1e787fa302e048da00b0dc384f9cb86821357'
name 'Chatology'
homepage 'https://flexibits.com/chatology'
app 'Chatology.app'
end
| 31.384615 | 88 | 0.779412 |
33fe0910c16e0c8ac569eda99694bce3427cb49e
| 480 |
cask 'qgroundcontrol' do
version '3.5.0'
sha256 '0c6eb7316e895bef1b56981156ce154f6809fcd1094b8ba163ab9024612ea56d'
# github.com/mavlink/qgroundcontrol was verified as official when first introduced to the cask
url "https://github.com/mavlink/qgroundcontrol/releases/download/v#{version}/QGroundControl.dmg"
appcast 'https://github.com/mavlink/qgroundcontrol/releases.atom'
name 'QGroundControl'
homepage 'http://qgroundcontrol.com/'
app 'qgroundcontrol.app'
end
| 36.923077 | 98 | 0.795833 |
380447ca2bfbe77565e4a39fe6a48f4b15bc8b4b
| 185 |
RSpec.describe Jonbrokins do
it "has a version number" do
expect(Jonbrokins::VERSION).not_to be nil
end
it "does something useful" do
expect(false).to eq(true)
end
end
| 18.5 | 45 | 0.708108 |
5d0e7c80b9b954d2854a4c73e5540024af6653d5
| 279 |
require 'test_helper'
class ForgeAuthControllerTest < ActionDispatch::IntegrationTest
test "should get login" do
get forge_login_url
assert_response :success
end
test "should get callback" do
get forge_login_callback_url
assert_response :success
end
end
| 21.461538 | 63 | 0.774194 |
26062e1791d8890d6fc8366e66d74474b2046cd9
| 53 |
module BlinkaReporter
VERSION = '0.4.0'.freeze
end
| 13.25 | 26 | 0.735849 |
61cbbd4f7c732fe5c2fd8719c7aaea1e25605fc8
| 2,956 |
class PassengerMemoryStats < Scout::Plugin
def build_report
cmd = option(:passenger_memory_stats_command) || "passenger-memory-stats"
data = `#{cmd} 2>&1`
if $?.success?
stats = parse_data(data)
report(stats)
stats.each do |name, total|
short_name = name.sub(/_total\z/, "")
max = option("max_#{short_name}").to_f
next unless max.nonzero?
num = total.to_f
mem_name = "#{name}_failure"
human_name = short_name.capitalize.
gsub(/_([a-z])/) { " #{$1.capitalize}"}.
gsub("Vms", "VMS")
if num > max and not memory(mem_name)
alert "Maximum #{human_name} Exceeded (#{total})", ''
remember(mem_name => true)
elsif num < max and memory(mem_name)
alert "Maximum #{human_name} Has Dropped Below Limit (#{total})", ''
memory.delete(mem_name)
else
remember(mem_name => memory(mem_name))
end
end
else
error "Could not get data from command", "Error: #{data}"
end
end
private
def parse_data(data)
table = nil
headers = nil
field_format = nil
stats = { "apache_processes" => 0,
"apache_vmsize_total" => 0.0,
"apache_private_total" => 0.0,
"nginx_processes" => 0,
"nginx_vmsize_total" => 0.0,
"nginx_private_total" => 0.0,
"passenger_processes" => 0,
"passenger_vmsize_total" => 0.0,
"passenger_private_total" => 0.0 }
data.each_line do |line|
line = line.gsub(/\e\[\d+m/,'')
if line =~ /^\s*-+\s+(Apache|Passenger|Nginx)\s+processes/
table = $1.downcase
headers = nil
field_format = nil
elsif table and line =~ /^\s*###\s+Processes:\s*(\d+)/
stats["#{table}_processes"] = $1
elsif table and line =~ /^[A-Za-z]/
headers = line.scan(/\S+\s*/)
field_format = headers.map { |h| "A#{h.size - 1}" }.join("x").
sub(/\d+\z/, "*")
headers.map! { |h| h.strip.downcase }
elsif table and headers and line =~ /^\d/
fields = Hash[*headers.zip(line.strip.unpack(field_format)).flatten]
stats["#{table}_vmsize_total"] += as_mb(fields["vmsize"])
stats["#{table}_private_total"] += as_mb(fields["private"])
end
end
stats.each_key do |field|
if field =~ /(?:vmsize|private)_total\z/
stats[field] = "#{stats[field]} MB"
end
end
stats
end
def as_mb(memory_string)
num = memory_string.to_f
case memory_string
when /\bB/i
num / (1024 * 1024).to_f
when /\bKB/i
num / 1024.0
when /\bGB/i
num * 1024
else
num
end
end
end
| 32.483516 | 78 | 0.507442 |
39d6790caf220f6b57da4732f640463db353d121
| 4,449 |
require 'one_gadget/error'
require 'one_gadget/helper'
module OneGadget
module Emulators
# A {Lambda} object can be:
# 1. +String+ (variable name)
# 2. +Numeric+
# 3. {Lambda} + +Numeric+
# 4. dereferenced {Lambda}
class Lambda
attr_accessor :obj # @return [String, Lambda] The object currently related to.
attr_accessor :immi # @return [Integer] The immidiate value currently added.
attr_accessor :deref_count # @return [Integer] The times of dereference.
# Instantiate a {Lambda} object.
# @param [Lambda, String] obj
def initialize(obj)
@immi = 0
@obj = obj
@deref_count = 0
end
# Implement addition with +Numeric+.
# @param [Numeric] other Value to add.
# @return [Lambda] The result.
def +(other)
raise Error::InstructionArgumentError, "Expect other(#{other}) to be numeric." unless other.is_a?(Numeric)
if deref_count > 0
ret = Lambda.new(self)
else
ret = Lambda.new(obj)
ret.immi = immi
end
ret.immi += other
ret
end
# Implement substract with +Numeric+.
# @param [Numeric] other Value to substract.
# @return [Lambda] The result.
def -(other)
self.+(-other)
end
# Increase dreference count with 1.
# @return [void]
def deref!
@deref_count += 1
end
# Decrease dreference count with 1.
# @return [self]
# @raise [Error::InstrutionArgumentError] When this object cannot be referenced anymore.
def ref!
raise Error::InstructionArgumentError, 'Cannot reference anymore!' if @deref_count <= 0
@deref_count -= 1
self
end
# A new {Lambda} object with dereference count increase 1.
# @return [Lambda]
def deref
ret = Lambda.new(obj)
ret.immi = immi
ret.deref_count = deref_count + 1
ret
end
# Expand the lambda presentation.
# @return [String] The expand result.
def to_s
str = ''
str += '[' * deref_count
str += obj.to_s unless obj.nil?
str += OneGadget::Helper.hex(immi, psign: true) unless immi.zero?
str += ']' * deref_count
str
end
# Eval the value of lambda.
# Only support those like +rsp+0x30+.
# @param [Hash{String => Integer}] context
# The context.
# @return [Integer] Result of evaluation.
def evaluate(context)
raise Error::InstructionArgumentError, "Can't eval #{self}" if deref_count > 0 || (obj && !context.key?(obj))
context[obj] + immi
end
class << self
# Target: parse string like <tt>[rsp+0x50]</tt> into a {Lambda} object.
# @param [String] argument
# @param [Hash{String => Lambda}] predefined
# Predfined values.
# @return [OneGadget::Emulators::Lambda, Integer]
# If +argument+ contains number only, returns the value.
# Otherwise, returns a {Lambda} object.
# @example
# obj = Lambda.parse('[rsp+0x50]')
# #=> #<Lambda @obj='rsp', @immi=80, @deref_count=1>
# Lambda.parse('obj+0x30', predefined: { 'obj' => obj }).to_s
# #=> '[rsp+0x50]+0x30'
# @example
# Lambda.parse('[x0, -104]')
# #=> #<Lambda @obj='x0', @immi=-104, @deref_count=1>
def parse(argument, predefined: {})
arg = argument.dup
return Integer(arg) if OneGadget::Helper.integer?(arg)
# nested []
return parse(arg[1...arg.rindex(']')], predefined: predefined).deref if arg[0] == '['
base, disp = mem_obj(arg)
obj = predefined[base] || Lambda.new(base)
obj += disp unless disp.zero?
obj
end
private
# @return [(String, Integer)]
def mem_obj(arg)
# We have three forms:
# 0. reg
# 1. reg+imm / reg-imm
# 2. reg, imm / reg, -imm
tokens = arg.gsub(/[\+\-]/, ' \0').scan(/[\+\-\w]+/)
return [tokens.first, 0] if tokens.size == 1
raise Error::UnsupportedInstructionArgumentError, arg unless tokens.size == 2
raise Error::UnsupportedInstructionArgumentError, arg unless OneGadget::Helper.integer?(tokens.last)
[tokens.first, Integer(tokens.last)]
end
end
end
end
end
| 31.778571 | 117 | 0.563273 |
1af4ae834149ee335368081d98bb40de8e855a0f
| 847 |
# frozen_string_literal: true
module AttributesSanitizer
class SanitizerProc
include Comparable
attr_reader :id
def initialize(sanitizer)
raise ArgumentError, "No sanitizer given" if sanitizer.nil?
if sanitizer.is_a?(Proc)
setup_lambda_proc(sanitizer)
else
setup_defined_proc(sanitizer)
end
end
def <=>(another_proc)
self.id <=> another_proc.id
end
def call(value)
@proc.inject(value) do |value, proc|
proc.call(value)
end
end
private
def setup_lambda_proc(sanitizer)
@proc = Array(sanitizer)
@id = sanitizer.object_id
end
def setup_defined_proc(sanitizer)
@id = sanitizer
@proc = AttributesSanitizer.bundle(sanitizer) || Array(AttributesSanitizer.find(sanitizer))
end
end
end
| 20.658537 | 99 | 0.645809 |
0325c97571c8f15e92922cd466379ecce136026b
| 525 |
# coding: utf-8
# frozen_string_literal: true
module Stealth
module Services
module Facebook
class MessagingReferralEvent
attr_reader :service_message, :params
def initialize(service_message:, params:)
@service_message = service_message
@params = params
end
def process
fetch_referral
end
private
def fetch_referral
service_message.referral = params['referral']
end
end
end
end
end
| 16.40625 | 57 | 0.60381 |
f7346e4354695709afd5d3e6924751f081aba503
| 11,900 |
=begin
Copyright 2010-2014 Tasos Laskos <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'addressable/uri'
require 'digest/sha2'
require 'cgi'
module Arachni
#
# Includes some useful methods for the system, the modules etc.
#
# @author Tasos "Zapotek" Laskos <[email protected]>
#
module Utilities
# @return [String] random HEX (SHA2) string
def seed
@@seed ||= Digest::SHA2.hexdigest( srand( 1000 ).to_s )
end
# @see Arachni::Element::Form.from_response
def forms_from_response( *args )
Form.from_response( *args )
end
# @see Arachni::Element::Form.from_document
def forms_from_document( *args )
Form.from_document( *args )
end
# @see Arachni::Element::Form.encode
def form_encode( *args )
Form.encode( *args )
end
# @see Arachni::Element::Form.decode
def form_decode( *args )
Form.decode( *args )
end
# @see Arachni::Element::Form.parse_request_body
def form_parse_request_body( *args )
Form.parse_request_body( *args )
end
alias :parse_request_body :form_parse_request_body
# @see Arachni::Element::Link.from_response
def links_from_response( *args )
Link.from_response( *args )
end
# @see Arachni::Element::Link.from_document
def links_from_document( *args )
Link.from_document( *args )
end
# @see Arachni::Element::Link.parse_query_vars
def parse_url_vars( *args )
Link.parse_query_vars( *args )
end
def parse_query( *args )
Link.parse_query_vars( *args )
end
# @see Arachni::Element::Cookie.from_response
def cookies_from_response( *args )
Cookie.from_response( *args )
end
# @see Arachni::Element::Cookie.from_document
def cookies_from_document( *args )
Cookie.from_document( *args )
end
# @see Arachni::Element::Cookie.parse_set_cookie
def parse_set_cookie( *args )
Cookie.parse_set_cookie( *args )
end
# @see Arachni::Element::Cookie.from_file
def cookies_from_file( *args )
Cookie.from_file( *args )
end
# @see Arachni::Element::Cookie.encode
def cookie_encode( *args )
Cookie.encode( *args )
end
# @see Arachni::Page.from_response
def page_from_response( *args )
Page.from_response( *args )
end
# @see Arachni::Page.from_url
def page_from_url( *args, &block )
Page.from_url( *args, &block )
end
def html_decode( str )
::CGI.unescapeHTML( str.to_s )
end
alias :html_unescape :html_decode
def html_encode( str )
::CGI.escapeHTML( str.to_s )
end
alias :html_escape :html_encode
# @return [URI::Parser] cached URI parser
def uri_parser
URI.parser
end
# @see URI.parse
def uri_parse( url )
URI.parse( url )
end
# @see URI.encode
def uri_encode( string, bad_characters = nil )
URI.encode( string, bad_characters )
end
# @see URI.encode
def uri_decode( url )
URI.decode( url )
end
# @see URI.to_absolute
def to_absolute( relative_url, reference_url = Options.instance.url.to_s )
URI.to_absolute( relative_url, reference_url )
end
# @see URI.normalize
def normalize_url( url )
URI.normalize( url )
end
# @see normalize_url
def url_sanitize( url )
normalize_url( url )
end
#
# @param [String] url
#
# @return [String] path
# Full URL up to the path component (no resource, query etc.).
#
# @see URI.up_to_path
#
def get_path( url )
uri_parse( url ).up_to_path
end
#
# @param [String] url
#
# @return [String] Domain name.
#
# @see URI.domain
#
def extract_domain( url )
uri_parse( url ).domain
end
#
# @param [String] url
#
# @return [Bool]
# `true` is the path exceeds the framework limit, `false` otherwise.
#
# @see URI.too_deep?
# @see Options#depth_limit
#
def path_too_deep?( url )
uri_parse( url ).too_deep?( Options.depth_limit )
end
#
# Compares 2 urls in order to decide whether or not they belong to the same domain.
#
# @param [String] url
# @param [String] reference
#
# @return [Bool]
# `true` if self is in the same domain as the `reference` URL, false otherwise.
#
# @see URI.in_domain?
# @see Options#follow_subdomains
#
def path_in_domain?( url, reference = Options.url )
uri_parse( url ).in_domain?( !Options.follow_subdomains, reference )
end
#
# Decides whether the given `url` matches any framework exclusion rules.
#
# @param [String] url
#
# @return [Bool]
#
# @see URI.exclude?
# @see Options#exclude
#
def exclude_path?( url )
uri_parse( url ).exclude?( Options.exclude )
end
#
# Decides whether the given `url` matches any framework inclusion rules.
#
# @param [String] url
#
# @return [Bool]
#
# @see URI.include?
# @see Options#include
#
def include_path?( url )
uri_parse( url ).include?( Options.include )
end
#
# Checks if the provided URL matches a redundant filter
# and decreases its counter if so.
#
# If a filter's counter has reached 0 the method returns true.
#
# @param [String] url
#
# @return [Bool] `true` if the `url` is redundant, `false` otherwise.
#
# @see Options#redundant?
#
def redundant_path?( url, &block )
Options.redundant?( url, &block )
end
#
# Decides whether the given `url` has an acceptable protocol.
#
# @param [String] url
# @param [String] reference Reference URL.
#
# @return [Bool]
#
# @see Options#https_only
# @see Options#https_only?
#
def follow_protocol?( url, reference = Options.url )
return true if !reference
check_scheme = uri_parse( url ).scheme
return false if !%(http https).include?( check_scheme.to_s.downcase )
ref_scheme = uri_parse( reference ).scheme
return true if ref_scheme && ref_scheme != 'https'
return true if ref_scheme == check_scheme
!Options.https_only?
end
#
# Decides whether or not the provided `path` should be skipped based on:
#
# * {#include_path?}
# * {#exclude_path?}
# * {#path_too_deep?}
# * {#path_in_domain?}
#
# @note Does **not** call {#redundant_path?}.
#
# @param [Arachni::URI, ::URI, Hash, String] path
#
# @return [Bool]
#
def skip_path?( path )
return true if !path
parsed = uri_parse( path.to_s )
begin
return true if !follow_protocol?( parsed )
return true if !path_in_domain?( parsed )
return true if path_too_deep?( parsed )
return true if !include_path?( parsed )
return true if exclude_path?( parsed )
false
rescue => e
ap e
ap e.backtrace
true
end
end
#
# Determines whether or not a given {Arachni::Page} or {Typhoeus::Response}
# should be ignored.
#
# @param [Page,Typhoeus::Response,#body] page_or_response
#
# @return [Bool]
# `true` if the `#body` of the given object matches any of the exclusion
# patterns, `false` otherwise.
#
# @see #skip_path?
# @see Options#exclude_binaries?
# @see Options#exclude_page?
#
def skip_page?( page_or_response )
(Options.exclude_binaries? && !page_or_response.text?) ||
skip_path?( page_or_response.url ) ||
Options.exclude_page?( page_or_response.body )
end
alias :skip_response? :skip_page?
#
# Determines whether or not the given `resource` should be ignored
# depending on its type and content.
#
# @param [Page,Typhoeus::Response,String] resource
# If given a:
#
# * {Page}: both its URL and body will be examined.
# * {Typhoeus::Response}: both its effective URL and body will be examined.
# * {String}: if multi-line it will be treated as a response body,
# otherwise as a path.
#
# @return [Bool]
# `true` if the resource should be ignore,`false` otherwise.
#
# @see skip_path?
# @see ignore_page?
# @see ignore_response?
# @see Options#ignore?
#
def skip_resource?( resource )
case resource
when Page
skip_page?( resource )
when Typhoeus::Response
skip_response?( resource )
else
if (s = resource.to_s) =~ /[\r\n]/
Options.exclude_page? s
else
skip_path? s
end
end
end
# @return [Fixnum] Random available port number.
def available_port
nil while !port_available?( port = rand_port )
port
end
# @return [Integer] Random port within the user specified range.
# @see Options#rpc_instance_port_range
def rand_port
first, last = Options.rpc_instance_port_range
range = (first..last).to_a
range[ rand( range.last - range.first ) ]
end
def generate_token
secret = ''
1000.times { secret << rand( 9999 ).to_s }
Digest::SHA2.hexdigest( secret )
end
#
# Checks whether the port number is available.
#
# @param [Fixnum] port
#
# @return [Bool]
#
def port_available?( port )
begin
socket = Socket.new( :INET, :STREAM, 0 )
socket.bind( Addrinfo.tcp( '127.0.0.1', port ) )
socket.close
true
rescue
false
end
end
#
# Wraps the "block" in exception handling code and runs it.
#
# @param [Bool] raise_exception re-raise exception
# @param [Block] block to call
#
def exception_jail( raise_exception = true, &block )
block.call
rescue Exception => e
begin
print_error e.inspect
print_error_backtrace e
print_error
print_error 'Parent:'
print_error self.class.to_s
print_error
print_error 'Block:'
print_error block.to_s
print_error
print_error 'Caller:'
::Kernel.caller.each { |l| print_error l }
print_error '-' * 80
rescue
end
raise e if raise_exception
end
def remove_constants( mod, skip = [], children_only = true )
return if skip.include?( mod )
return if !(mod.is_a?( Class ) || !mod.is_a?( Module )) ||
!mod.to_s.start_with?( 'Arachni' )
parent = Object
mod.to_s.split( '::' )[0..-2].each do |ancestor|
parent = parent.const_get( ancestor.to_sym )
end
mod.constants.each { |m| mod.send( :remove_const, m ) }
return if children_only
parent.send( :remove_const, mod.to_s.split( ':' ).last.to_sym )
end
extend self
end
end
| 26.039387 | 87 | 0.587143 |
28c0faf9d596c91fc2a71fc07b82816bf974c4f1
| 1,182 |
# -*- encoding: utf-8 -*-
# stub: deep_merge 1.0.1 ruby lib
Gem::Specification.new do |s|
s.name = "deep_merge".freeze
s.version = "1.0.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Steve Midgley".freeze]
s.date = "2011-07-28"
s.description = "Recursively merge hashes. Now works with Ruby 1.9 and ActiveSupport".freeze
s.email = "[email protected]".freeze
s.extra_rdoc_files = ["README.md".freeze]
s.files = ["README.md".freeze]
s.homepage = "http://github.com/danielsdeleo/deep_merge".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "2.7.4".freeze
s.summary = "Merge Deeply Nested Hashes".freeze
s.installed_by_version = "2.7.4" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake>.freeze, ["~> 10.1"])
else
s.add_dependency(%q<rake>.freeze, ["~> 10.1"])
end
else
s.add_dependency(%q<rake>.freeze, ["~> 10.1"])
end
end
| 33.771429 | 112 | 0.678511 |
edd375dccfcce870d4d991042e1002e0bf26e484
| 75 |
$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "srunr"
| 25 | 58 | 0.72 |
f783511d83b89548b533a6164ada825b4466b755
| 3,365 |
require 'spec_helper'
require 'ostruct'
describe Atrium::Configuration do
Given(:main_app_config) {
OpenStruct.new(application_name: default_application_name )
}
Given(:configuration) { Atrium::Configuration.new(main_app_config) }
Given(:default_application_name) { "Good-Bye World"}
Given(:expected_application_name) { 'Hello World' }
context '#saved_search_class' do
context 'when set' do
Given(:saved_search_class_name) { 'Object' }
When { configuration.saved_search_class = saved_search_class_name }
Then { configuration.saved_search_class.should == Object }
end
context 'when not set' do
Then {
expect {
configuration.saved_search_class
}.to raise_error(Atrium::ConfigurationNotSet)
}
end
end
context '#saved_items_class' do
context 'when set' do
Given(:saved_items_class_name) { 'Object' }
When { configuration.saved_items_class = saved_items_class_name }
Then { configuration.saved_items_class.should == Object }
end
context 'when not set' do
Then {
expect {
configuration.saved_items_class
}.to raise_error(Atrium::ConfigurationNotSet)
}
end
end
context "#query_param_beautifier=" do
Then('raise exception if not callable') {
expect {
configuration.query_param_beautifier = 1
}.to raise_error(Atrium::ConfigurationExpectation)
}
Then('raise exception if improper arity') {
expect {
Atrium.query_param_beautifier = lambda {}
}.to raise_error(Atrium::ConfigurationExpectation)
}
end
context "#query_param_beautifier" do
Given(:params) { ['a', 'b'] }
Given(:context) { Object.new }
context 'without override' do
Then {
configuration.query_param_beautifier(context, params).
should == params.inspect
}
end
context 'with override' do
Given(:expected_output) { "#{context.class}: #{params.reverse.inspect}" }
When {
configuration.query_param_beautifier =
lambda { |context,params| expected_output }
}
Then {
configuration.query_param_beautifier(context, params).
should == expected_output
}
end
end
context '#label_for_facet' do
Given(:facet_name) { 'Name' }
context 'facet registered' do
Given(:expected_label) { 'My Label' }
Given(:facet) { double("Facet", :label => expected_label)}
Given(:main_app_config) {
OpenStruct.new(facet_fields: {facet_name => facet})
}
Then {
configuration.label_for_facet(facet_name).should == expected_label
}
end
context 'facet missing' do
Then {
configuration.label_for_facet(facet_name).should == facet_name
}
end
end
context '#application_name' do
context 'override via config' do
When { configuration.application_name = expected_application_name }
Then { configuration.application_name.should == expected_application_name }
end
context 'defer to main app config' do
Then { configuration.application_name.should == default_application_name }
end
context 'gracefully handle not having application name set' do
Given(:main_app_config) { OpenStruct.new }
Then { configuration.application_name.should == '' }
end
end
end
| 29.26087 | 81 | 0.666568 |
bf2a141834ccc6c4ee19ba7ca58f9a5916ab0746
| 209 |
class CreateUfs < ActiveRecord::Migration
def self.up
create_table :ufs do |t|
t.string :nome
t.string :sigla
t.timestamps
end
end
def self.down
drop_table :ufs
end
end
| 13.933333 | 41 | 0.631579 |
792bf50d60364ddd96219bcdf80b82a7ae6c624a
| 324 |
module Devise
module Models
module ClientOwnable
extend ActiveSupport::Concern
included do
has_many :oauth_clients,
class_name: "Devise::Oauth::Client",
foreign_key: "owner_id",
dependent: :destroy
end
end
end
end
| 21.6 | 57 | 0.537037 |
26a1088744ee5588c14b09250245d7b9f83590f8
| 94 |
require 'spec_helper'
describe docker_container('haproxy') do
it { should be_running }
end
| 15.666667 | 39 | 0.765957 |
87534a46a90cfb6da9d55258aaf64cc62808323a
| 1,399 |
class ApacheZeppelin < Formula
desc "Web-based notebook that enables interactive data analytics"
homepage "https://zeppelin.apache.org"
url "https://www.apache.org/dyn/closer.lua?path=zeppelin/zeppelin-0.7.1/zeppelin-0.7.1-bin-all.tgz"
sha256 "eaa7c34bb7e4dd0e722f330fe524e361cd8f05980dad214f92f5d6701861f3d8"
head "https://github.com/apache/zeppelin.git"
bottle :unneeded
def install
rm_f Dir["bin/*.cmd"]
libexec.install Dir["*"]
bin.write_exec_script Dir["#{libexec}/bin/*"]
end
test do
begin
ENV["ZEPPELIN_LOG_DIR"] = "logs"
ENV["ZEPPELIN_PID_DIR"] = "pid"
ENV["ZEPPELIN_CONF_DIR"] = "#{testpath}/conf"
conf = testpath/"conf"
conf.mkdir
(conf/"zeppelin-env.sh").write <<-EOF.undent
export ZEPPELIN_WAR_TEMPDIR="#{testpath}/webapps"
export ZEPPELIN_PORT=9999
export ZEPPELIN_NOTEBOOK_DIR="#{testpath}/notebooks"
export ZEPPELIN_MEM="-Xms256m -Xmx1024m -XX:MaxPermSize=256m"
EOF
ln_s "#{libexec}/conf/log4j.properties", conf
ln_s "#{libexec}/conf/shiro.ini", conf
system "#{bin}/zeppelin-daemon.sh", "start"
begin
sleep 25
json_text = shell_output("curl -s http://localhost:9999/api/notebook/")
assert_equal JSON.parse(json_text)["status"], "OK"
ensure
system "#{bin}/zeppelin-daemon.sh", "stop"
end
end
end
end
| 33.309524 | 101 | 0.663331 |
3964fe6c0cff206626f61ad59da382d6757210f3
| 793 |
# frozen_string_literal: true
require "spec_helper"
require "dependabot/lein/file_fetcher"
require_common_spec "file_fetchers/shared_examples_for_file_fetchers"
RSpec.describe Dependabot::Lein::FileFetcher, :vcr do
it_behaves_like "a dependency file fetcher"
let(:credentials) { github_credentials }
let(:source) do
Dependabot::Source.new(
provider: "github",
repo: "technomancy/leiningen",
directory: "/"
)
end
let(:file_fetcher_instance) do
Dependabot::Lein::FileFetcher.new(source: source, credentials: credentials)
end
it "fetches the project.clj and generates the pom.xml" do
expect(file_fetcher_instance.files.count).to eq(2)
expect(file_fetcher_instance.files.map(&:name)).
to match_array(%w(project.clj pom.xml))
end
end
| 28.321429 | 79 | 0.738966 |
9164a613b3073a2cc177acd510584da5e7938095
| 1,900 |
module Intrigue
module Task
class SsrfBruteParameters < BaseTask
def self.metadata
{
:name => "vuln/ssrf_brute_parameters",
:pretty_name => "Vuln Check - Brute Parameters for SSRF",
:authors => ["jcran"],
:identifiers => [
{ "cve" => false },
{ "cwe" => "CWE-918" }
],
:description => "Generic SSRF Payload Tester",
:references => [],
:type => "vuln_check",
:passive => false,
:allowed_types => ["Uri"],
:example_entities => [
{"type" => "Uri", "details" => {"name" => "https://intrigue.io"}}
],
:allowed_options => [
{:name => "ssrf_target_uri", :regex => "alpha_numeric_list", :default => "http://172.19.131.128:55555" },
{:name => "parameter_list", :regex => "alpha_numeric_list", :default => "redirect,url,uri,location,host,next,referer" }
],
:created_types => []
}
end
## Default method, subclasses must override this
def run
super
uri = _get_entity_name
ssrf_target_uri = _get_option("ssrf_target_uri")
parameter_list = _get_option("parameter_list").split(",")
_log "Starting SSRF Responder server"
Intrigue::Task::Server::SsrfResponder.start_and_background
parameter_list.each do |parameter|
# make the request and collect the response
# https://stackoverflow.com/questions/7012810/url-encoding-ampersand-problem
payload = "#{ssrf_target_uri}?int_id=#{@task_result.id}%26int_param=#{parameter}"
generated_test_uri = "#{uri}?#{parameter}=#{payload}"
response = http_request :get, generated_test_uri
_log "Sent: (#{generated_test_uri}), Got: #{response.code} for parameter #{parameter}"
_log "Response: #{response.body}"
end
# Future work... actually exfil data (enrichment?)
#"http://169.254.169.254/latest/meta-data/", # AWS Metadata
end
end
end
end
| 32.20339 | 127 | 0.623684 |
bbbdb816d4dcda1eb539f69c2be52d29a76c11c9
| 8,201 |
require 'test_helper'
class UsaEpayTransactionTest < Test::Unit::TestCase
include CommStub
def setup
@gateway = UsaEpayTransactionGateway.new(:login => 'LOGIN')
@credit_card = credit_card('4242424242424242')
@options = {
:billing_address => address,
:shipping_address => address
}
@amount = 100
end
def test_urls
assert_equal 'https://www.usaepay.com/gate', UsaEpayTransactionGateway.live_url
assert_equal 'https://sandbox.usaepay.com/gate', UsaEpayTransactionGateway.test_url
end
def test_request_url_live
gateway = UsaEpayTransactionGateway.new(:login => 'LOGIN', :test => false)
gateway.expects(:ssl_post).
with('https://www.usaepay.com/gate', purchase_request).
returns(successful_purchase_response)
assert response = gateway.purchase(@amount, @credit_card, @options)
end
def test_request_url_test
@gateway.expects(:ssl_post).
with('https://sandbox.usaepay.com/gate', purchase_request).
returns(successful_purchase_response)
assert response = @gateway.purchase(@amount, @credit_card, @options)
end
def test_successful_request
@gateway.expects(:ssl_post).returns(successful_purchase_response)
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert_equal '55074409', response.authorization
assert response.test?
end
def test_unsuccessful_request
@gateway.expects(:ssl_post).returns(unsuccessful_purchase_response)
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_failure response
assert response.test?
end
def test_successful_purchase_passing_extra_info
response = stub_comms do
@gateway.purchase(@amount, @credit_card, @options.merge(:order_id => "1337", :description => "socool"))
end.check_request do |endpoint, data, headers|
assert_match(/UMinvoice=1337/, data)
assert_match(/UMdescription=socool/, data)
end.respond_with(successful_purchase_response)
assert_success response
end
def test_successful_purchase_split_payment
response = stub_comms do
@gateway.purchase(@amount, @credit_card, @options.merge(
:split_payments => [
{ :key => 'abc123', :amount => 199, :description => 'Second payee' },
{ :key => 'def456', :amount => 911, :description => 'Third payee' },
]
))
end.check_request do |endpoint, data, headers|
assert_match /UM02key=abc123/, data
assert_match /UM02amount=1.99/, data
assert_match /UM02description=Second\+payee/, data
assert_match /UM03key=def456/, data
assert_match /UM03amount=9.11/, data
assert_match /UM03description=Third\+payee/, data
assert_match /UMonError=Void/, data
end.respond_with(successful_purchase_response)
assert_success response
end
def test_successful_purchase_split_payment_with_custom_on_error
response = stub_comms do
@gateway.purchase(@amount, @credit_card, @options.merge(
:split_payments => [
{ :key => 'abc123', :amount => 199, :description => 'Second payee' }
],
:on_error => 'Continue'
))
end.check_request do |endpoint, data, headers|
assert_match /UMonError=Continue/, data
end.respond_with(successful_purchase_response)
assert_success response
end
def test_address_key_prefix
assert_equal 'bill', @gateway.send(:address_key_prefix, :billing)
assert_equal 'ship', @gateway.send(:address_key_prefix, :shipping)
assert_nil @gateway.send(:address_key_prefix, :vacation)
end
def test_address_key
assert_equal :shipfname, @gateway.send(:address_key, 'ship', 'fname')
end
def test_add_address
post = {}
@gateway.send(:add_address, post, @credit_card, @options)
assert_address(:shipping, post)
assert_equal 20, post.keys.size
end
def test_add_billing_address
post = {}
@gateway.send(:add_address, post, @credit_card, @options)
assert_address(:billing, post)
assert_equal 20, post.keys.size
end
def test_add_billing_and_shipping_addresses
post = {}
@gateway.send(:add_address, post, @credit_card, @options)
assert_address(:shipping, post)
assert_address(:billing, post)
assert_equal 20, post.keys.size
end
def test_amount_style
assert_equal '10.34', @gateway.send(:amount, 1034)
assert_raise(ArgumentError) do
@gateway.send(:amount, '10.34')
end
end
def test_supported_countries
assert_equal ['US'], UsaEpayTransactionGateway.supported_countries
end
def test_supported_card_types
assert_equal [:visa, :master, :american_express], UsaEpayTransactionGateway.supported_cardtypes
end
def test_avs_result
@gateway.expects(:ssl_post).returns(successful_purchase_response)
response = @gateway.purchase(@amount, @credit_card, @options)
assert_equal 'Y', response.avs_result['code']
assert_equal 'Y', response.avs_result['street_match']
assert_equal 'Y', response.avs_result['postal_match']
end
def test_cvv_result
@gateway.expects(:ssl_post).returns(successful_purchase_response)
response = @gateway.purchase(@amount, @credit_card, @options)
assert_equal 'M', response.cvv_result['code']
end
def test_does_not_raise_error_on_missing_values
@gateway.expects(:ssl_post).returns("status")
assert_nothing_raised do
response = @gateway.purchase(@amount, @credit_card, @options)
assert_failure response
end
end
private
def assert_address(type, post)
prefix = key_prefix(type)
assert_equal @credit_card.first_name, post[key(prefix, 'fname')]
assert_equal @credit_card.last_name, post[key(prefix, 'lname')]
assert_equal @options[:billing_address][:company], post[key(prefix, 'company')]
assert_equal @options[:billing_address][:address1], post[key(prefix, 'street')]
assert_equal @options[:billing_address][:address2], post[key(prefix, 'street2')]
assert_equal @options[:billing_address][:city], post[key(prefix, 'city')]
assert_equal @options[:billing_address][:state], post[key(prefix, 'state')]
assert_equal @options[:billing_address][:zip], post[key(prefix, 'zip')]
assert_equal @options[:billing_address][:country], post[key(prefix, 'country')]
assert_equal @options[:billing_address][:phone], post[key(prefix, 'phone')]
end
def key_prefix(type)
@gateway.send(:address_key_prefix, type)
end
def key(prefix, key)
@gateway.send(:address_key, prefix, key)
end
def purchase_request
"UMamount=1.00&UMinvoice=&UMdescription=&UMcard=4242424242424242&UMcvv2=123&UMexpir=09#{@credit_card.year.to_s[-2..-1]}&UMname=Longbob+Longsen&UMbillfname=Longbob&UMbilllname=Longsen&UMbillcompany=Widgets+Inc&UMbillstreet=1234+My+Street&UMbillstreet2=Apt+1&UMbillcity=Ottawa&UMbillstate=ON&UMbillzip=K1C2N6&UMbillcountry=CA&UMbillphone=%28555%29555-5555&UMshipfname=Longbob&UMshiplname=Longsen&UMshipcompany=Widgets+Inc&UMshipstreet=1234+My+Street&UMshipstreet2=Apt+1&UMshipcity=Ottawa&UMshipstate=ON&UMshipzip=K1C2N6&UMshipcountry=CA&UMshipphone=%28555%29555-5555&UMstreet=1234+My+Street&UMzip=K1C2N6&UMcommand=cc%3Asale&UMkey=LOGIN&UMsoftware=Active+Merchant&UMtestmode=0"
end
def successful_purchase_response
"UMversion=2.9&UMstatus=Approved&UMauthCode=001716&UMrefNum=55074409&UMavsResult=Address%3A%20Match%20%26%205%20Digit%20Zip%3A%20Match&UMavsResultCode=Y&UMcvv2Result=Match&UMcvv2ResultCode=M&UMresult=A&UMvpasResultCode=&UMerror=Approved&UMerrorcode=00000&UMcustnum=&UMbatch=596&UMisDuplicate=N&UMconvertedAmount=&UMconvertedAmountCurrency=840&UMconversionRate=&UMcustReceiptResult=No%20Receipt%20Sent&UMfiller=filled"
end
def unsuccessful_purchase_response
"UMversion=2.9&UMstatus=Declined&UMauthCode=000000&UMrefNum=55076060&UMavsResult=Address%3A%20Match%20%26%205%20Digit%20Zip%3A%20Match&UMavsResultCode=Y&UMcvv2Result=Not%20Processed&UMcvv2ResultCode=P&UMvpasResultCode=&UMresult=D&UMerror=Card%20Declined&UMerrorcode=10127&UMbatch=596&UMfiller=filled"
end
end
| 39.427885 | 678 | 0.727594 |
7922123206b93e76b918d31ced87d30afa5f0fa2
| 1,351 |
require_relative 'lib/garden_planner/version'
Gem::Specification.new do |spec|
spec.name = "garden_planner"
spec.version = GardenPlanner::VERSION
spec.authors = ["torishillcutt"]
spec.email = ["[email protected]"]
spec.summary = %q{TODO: Write a short summary, because RubyGems requires one.}
spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "https://github.com/torishillcutt/garden-planner-cli"
spec.license = "MIT"
spec.required_ruby_version = Gem::Requirement.new(">= 2.3.0")
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
end
| 45.033333 | 87 | 0.670614 |
bb8592781cfac00f2ac80d75f857b3b8e3a46655
| 4,735 |
require 'spec_helper'
describe Mongoid::Tree::Traversal do
subject { OrderedNode }
describe '#traverse' do
subject { Node.new }
[:depth_first, :breadth_first].each do |method|
it "should support #{method} traversal" do
expect { subject.traverse(method) {} }.to_not raise_error
end
end
it "should complain about unsupported traversal methods" do
expect { subject.traverse('non_existing') {} }.to raise_error
end
it "should default to depth_first traversal" do
expect(subject).to receive(:depth_first_traversal)
subject.traverse {}
end
end
describe 'depth first traversal' do
describe 'with unmodified tree' do
before do
setup_tree <<-ENDTREE
node1:
- node2:
- node3
- node4:
- node5
- node6
- node7
ENDTREE
end
it "should traverse correctly" do
result = []
node(:node1).traverse(:depth_first) { |node| result << node }
expect(result.collect { |n| n.name.to_sym }).to eq([:node1, :node2, :node3, :node4, :node5, :node6, :node7])
end
it "should return and array containing the results of the block for each node" do
result = node(:node1).traverse(:depth_first) { |n| n.name.to_sym }
expect(result).to eq([:node1, :node2, :node3, :node4, :node5, :node6, :node7])
end
end
describe 'with merged trees' do
before do
setup_tree <<-ENDTREE
- node4:
- node5
- node6:
- node7
- node1:
- node2:
- node3
ENDTREE
node(:node1).children << node(:node4)
end
it "should traverse correctly" do
result = node(:node1).traverse(:depth_first) { |n| n.name.to_sym }
expect(result).to eq([:node1, :node2, :node3, :node4, :node5, :node6, :node7])
end
end
describe 'with reordered nodes' do
before do
setup_tree <<-ENDTREE
node1:
- node2:
- node3
- node4:
- node6
- node5
- node7
ENDTREE
node(:node5).move_above(node(:node6))
end
it 'should iterate through the nodes in the correct order' do
result = []
node(:node1).traverse(:depth_first) { |node| result << node }
expect(result.collect { |n| n.name.to_sym }).to eq([:node1, :node2, :node3, :node4, :node5, :node6, :node7])
end
it 'should return the nodes in the correct order' do
result = node(:node1).traverse(:depth_first)
expect(result.collect { |n| n.name.to_sym }).to eq([:node1, :node2, :node3, :node4, :node5, :node6, :node7])
end
end
end
describe 'breadth first traversal' do
before do
setup_tree <<-ENDTREE
node1:
- node2:
- node5
- node3:
- node6
- node7
- node4
ENDTREE
end
it "should traverse correctly" do
result = []
node(:node1).traverse(:breadth_first) { |n| result << n }
expect(result.collect { |n| n.name.to_sym }).to eq([:node1, :node2, :node3, :node4, :node5, :node6, :node7])
end
it "should return and array containing the results of the block for each node" do
result = node(:node1).traverse(:breadth_first) { |n| n.name.to_sym }
expect(result).to eq([:node1, :node2, :node3, :node4, :node5, :node6, :node7])
end
end
describe '.traverse' do
before :each do
setup_tree <<-ENDTREE
- root1
- root2
ENDTREE
@root1 = node(:root1)
@root2 = node(:root2)
Node.stub(:roots).and_return [@root1, @root2]
end
it 'should grab each root' do
expect(Node).to receive(:roots).and_return []
expect(Node.traverse).to eq([])
end
it 'should default the "type" arg to :depth_first' do
expect(@root1).to receive(:traverse).with(:depth_first).and_return([])
expect(@root2).to receive(:traverse).with(:depth_first).and_return([])
expect(Node.traverse).to eq([])
end
it 'should traverse each root' do
expect(@root1).to receive(:traverse).and_return([1, 2])
expect(@root2).to receive(:traverse).and_return([3, 4])
expect(Node.traverse).to eq([1, 2, 3, 4])
end
describe 'when the "type" arg is :breadth_first' do
it 'should traverse breadth-first' do
expect(@root1).to receive(:traverse).with(:breadth_first).and_return([])
expect(@root2).to receive(:traverse).with(:breadth_first).and_return([])
Node.traverse :breadth_first
end
end
end
end
| 26.601124 | 116 | 0.571911 |
ff40ead436f98b23b4c2900d80a659192ba38d7c
| 136 |
module AvantUtils
module Version
MAJOR = 0
MINOR = 1
PATCH = 0
STRING = "#{MAJOR}.#{MINOR}.#{PATCH}".freeze
end
end
| 15.111111 | 48 | 0.588235 |
edfb73e5c8ac3eb3cb5ec85ea3e14d3e67900541
| 2,971 |
require 'spec_helper'
describe Capybara::Server do
it "should spool up a rack server" do
@app = proc { |env| [200, {}, "Hello Server!"]}
@server = Capybara::Server.new(@app).boot
@res = Net::HTTP.start(@server.host, @server.port) { |http| http.get('/') }
@res.body.should include('Hello Server')
end
it "should do nothing when no server given" do
running do
@server = Capybara::Server.new(nil).boot
end.should_not raise_error
end
it "should use specified port" do
Capybara.server_port = 22789
@app = proc { |env| [200, {}, "Hello Server!"]}
@server = Capybara::Server.new(@app).boot
@res = Net::HTTP.start(@server.host, 22789) { |http| http.get('/') }
@res.body.should include('Hello Server')
Capybara.server_port = nil
end
it "should find an available port" do
@app1 = proc { |env| [200, {}, "Hello Server!"]}
@app2 = proc { |env| [200, {}, "Hello Second Server!"]}
@server1 = Capybara::Server.new(@app1).boot
@server2 = Capybara::Server.new(@app2).boot
@res1 = Net::HTTP.start(@server1.host, @server1.port) { |http| http.get('/') }
@res1.body.should include('Hello Server')
@res2 = Net::HTTP.start(@server2.host, @server2.port) { |http| http.get('/') }
@res2.body.should include('Hello Second Server')
end
it "should use the server if it already running" do
@app1 = proc { |env| [200, {}, "Hello Server!"]}
@app2 = proc { |env| [200, {}, "Hello Second Server!"]}
@server1a = Capybara::Server.new(@app1).boot
@server1b = Capybara::Server.new(@app1).boot
@server2a = Capybara::Server.new(@app2).boot
@server2b = Capybara::Server.new(@app2).boot
@res1 = Net::HTTP.start(@server1b.host, @server1b.port) { |http| http.get('/') }
@res1.body.should include('Hello Server')
@res2 = Net::HTTP.start(@server2b.host, @server2b.port) { |http| http.get('/') }
@res2.body.should include('Hello Second Server')
@server1a.port.should == @server1b.port
@server2a.port.should == @server2b.port
end
it "should wait specified time for the app to boot" do
pending 'this test does not work: https://groups.google.com/d/msg/ruby-capybara/QrSKTbjh5rY/egvcVFYiWZMJ'
@slow_app = proc { |env| sleep(1); [200, {}, "Hello Slow Server!"] }
Capybara.server_boot_timeout = 1.5
@server = Capybara::Server.new(@slow_app).boot
@res = Net::HTTP.start(@server.host, @server.port) { |http| http.get('/') }
@res.body.should include('Hello Slow Server')
end
it "should raise an exception if boot timeout is exceeded" do
pending 'this test does not work: https://groups.google.com/d/msg/ruby-capybara/QrSKTbjh5rY/egvcVFYiWZMJ'
@slow_app = proc { |env| sleep(1); [200, {}, "Hello Slow Server!"] }
Capybara.server_boot_timeout = 0.5
server = Capybara::Server.new(@slow_app)
server.stub(:exit).and_return(:timeout)
server.stub(:puts)
server.boot.should == :timeout
end
end
| 33.011111 | 109 | 0.643218 |
4a4be6233f8690a153f98af214db52d5f547060f
| 870 |
Pod::Spec.new do |s|
s.name = 'DRACOON-Crypto-SDK'
s.version = '2.1.0'
s.summary = 'Official DRACOON Crypto SDK'
s.description = <<-DESC
This SDK implements client-side encryption for DRACOON.
DESC
s.homepage = 'https://github.com/dracoon/dracoon-swift-crypto-sdk'
s.license = { :type => 'Apache 2.0', :file => 'LICENSE' }
s.author = { 'Mathias Schreiner' => '[email protected]' }
s.source = { :git => 'https://github.com/dracoon/dracoon-swift-crypto-sdk.git', :tag => "v" + s.version.to_s }
s.module_name = 'crypto_sdk'
s.ios.deployment_target = '11.4'
s.swift_version = '5.3'
s.pod_target_xcconfig = { 'VALID_ARCHS' => 'x86_64 arm64' }
s.source_files = 'crypto-sdk/**/*'
s.vendored_frameworks = 'OpenSSL/openssl.framework'
end
| 36.25 | 122 | 0.588506 |
38207d9e0add35b4d6c9957d8e692cb126b16117
| 1,516 |
# frozen_string_literal: true
module Neo
module Utils
# Utility class for reading serialized data
class DataReader
attr_reader :io
def initialize(data, hex = true)
@io = data_to_readable(data, hex)
end
def read_uint8
read 1, 'C'
end
alias read_byte read_uint8
def read_uint16
read 2, 'v'
end
def read_uint32
read 4, 'V'
end
def read_uint64
read 8, 'Q<'
end
def read_bool
!read_byte.zero?
end
def read_vint
length = read_byte
case length
when 0xfd then read_uint16
when 0xfe then read_uint32
when 0xff then read_uint64
else length
end
end
def read_string
@io.read read_vint
end
def read_hex(length = nil, reverse = false)
hex = read length || read_vint, 'H*'
reverse ? Utils.reverse_hex_string(hex) : hex
end
def read_fixed8
read_uint64 / 100_000_000.0
end
def read_time
Time.at read_uint32
end
def read(size, format)
@io.read(size).unpack(format).first
end
def inspect
@io.string.unpack('H*').first
end
def move_to(position)
@io.seek(position, IO::SEEK_SET)
end
private
def data_to_readable(data, hex)
return data if data.respond_to? :read
StringIO.new hex ? [data].pack('H*') : data
end
end
end
end
| 18.26506 | 53 | 0.561346 |
7941f239de115fe6512566d0ca4e8188888220e7
| 1,042 |
# Copyright (c) 2009-2012 VMware, Inc.
module Bosh::Agent
class HeartbeatProcessor
MAX_OUTSTANDING_HEARTBEATS = 2
def enable(interval)
unless EM.reactor_running?
raise Bosh::Agent::HeartbeatError, "Event loop must be running in order to enable heartbeats"
end
if @timer
Config.logger.warn("Heartbeat timer already running, canceling")
disable
end
@pending = 0
@timer = EM.add_periodic_timer(interval) do
beat
end
end
def disable
Config.logger.info("Disabled heartbeats")
@timer.cancel if @timer
@timer = nil
end
def beat
raise HeartbeatError, "#{@pending} outstanding heartbeat(s)" if @pending > MAX_OUTSTANDING_HEARTBEATS
Heartbeat.new.send_via_mbus do
@pending -= 1
end
@pending += 1
rescue => e
Config.logger.warn("Error sending heartbeat: #{e}")
Config.logger.warn(e.backtrace.join("\n"))
raise e if @pending > MAX_OUTSTANDING_HEARTBEATS
end
end
end
| 22.652174 | 107 | 0.640115 |
26aac9e98b8db67d0b7e4d8d50ad5f69e4293e3c
| 8,520 |
# Sonic Mines - Minesweeper for Sonic Pi
# Created for Novation Launchpad Mini Mk3
use_debug false
use_midi_logging false
# Randomize seed and starting point
use_random_seed = Time.now.to_i
SecureRandom.random_number(1000).times { rand }
# Midi ports for the launchpad
launchpad_in = "/midi:midiin2_(lpminimk3_midi)_1:1/*"
launchpad_out = "midiout2_(lpminimk3_midi)_2"
midi_clock_beat 0.5, port: launchpad_out
# Set novation mini to programmer mode
define :set_programmer_mode do
midi_sysex 0xf0, 0x00, 0x20, 0x29, 0x02, 0x0D, 0x0E, 0x01, 0xf7
end
# Light up multiple leds from novation launchpad
define :led_sysex do |values|
midi_sysex 0xf0, 0x00, 0x20, 0x29, 0x02, 0x0d, 0x03, *values, 0xf7, port: launchpad_out
end
# Stop scrolling text
define :stop_text do
midi_sysex 0xf0, 0x00, 0x20, 0x29, 0x02, 0x0d, 0x07, 0xf7
end
# Helper method for defining midi rgb
# Nice color picker: https://www.rapidtables.com/web/color/html-color-codes.html
define :rgb do |r,g,b|
[((127*r)/255),((127*g/255)),((127*b)/255)]
end
# Scroll text on novation launchpad
define :scroll_text do |text, loop=0x01,speed=0x07,rgb=[127,127,127]|
text = text.chars.map { |b| b.ord }
midi_sysex 0xf0, 0x00, 0x20, 0x29, 0x02, 0x0d, 0x07, loop, speed, 0x01, *rgb, *text, 0xf7
end
# Rainbow colors for the sides
live_loop :rainbow do
rainbows = (ring rgb(255, 0, 0), rgb(255, 128, 0), rgb(255, 255, 0), rgb(128, 255, 0), rgb(0, 255, 0), rgb(0, 255, 128), rgb(0, 255, 255), rgb(0, 128, 255), rgb(0, 0, 255),rgb(128, 0, 255), rgb(255, 0, 255), rgb(255, 0, 128))
cells = [91,92,93,94,95,96,97,98,89,79,69,59,49,39,29,19]
pad_colors = []
n = tick
cells.each.with_index do |c,i|
pad_colors+=[0x03, c, *rainbows[n-i]]
end
led_sysex pad_colors
sleep 0.1
end
# Set single cell flashing
define :set_cell_flash do |x, y, c1, c2|
cell = (x.to_s+y.to_s).to_i
values = [0x01, cell, c1, c2]
led_sysex values
end
# Flash multiple cells from color palette
define :flash_cells do |cells, a, b|
cells.each do |cell|
set_cell_flash cell[:x], cell[:y], a, b
end
end
# Set single cell color
define :set_cell_color do |x, y, rgb|
cell = (x.to_s+y.to_s).to_i
values = [0x03, cell, *rgb]
led_sysex values
end
# Set multiple cells
define :set_colors_from_palette do |arr, number|
pad_colors = []
arr.each do |cell|
cell_color = [0x00, cell, number]
pad_colors = pad_colors+cell_color
end
led_sysex pad_colors
end
# Set colors for the whole matrix
define :set_pad_colors do |matrix,rgb|
pad_colors = []
matrix.length.times do |x|
row = matrix[x]
row.length.times do |y|
cell = matrix[x][y]
cell_color = [0x03, ((matrix.length-x).to_s+(y+1).to_s).to_i, *rgb]
pad_colors = pad_colors+cell_color
end
end
led_sysex pad_colors
end
# Creates rgb from probability based on the color scheme
define :prob_to_color do |prob|
# Try with different color schemes
colors = [
rgb(255, 0, 0), rgb(255, 128, 0), rgb(255, 0, 128), rgb(255, 0, 255), rgb(128, 0, 255), rgb(0, 0, 255)
# rgb(255,0,0), rgb(255,0,255), rgb(55,55,55)
]
index = colors.index.with_index do |col,i|
prob <= i.to_f/(colors.length-1)
end
lower = colors[index-1]
upper = colors[index]
upperProb = index.to_f/(colors.length-1)
lowerProb = (index-1).to_f/(colors.length-1)
u = (prob - lowerProb) / (upperProb - lowerProb)
l = 1 - u
[(lower[0]*l + upper[0]*u).to_i, (lower[1]*l + upper[1]*u).to_i, (lower[2]*l + upper[2]*u).to_i].map {|color| ((127*color)/255) }
end
define :set_neighbor_colors do |matrix, x, y|
n = []
(x-1).upto(x+1) do |a|
(y-1).upto(y+1) do |b|
n.push([a,b]) if !(a==x and b==y) and matrix[a] and matrix[a][b]
end
end
#TODO: Color neighbors based on lowest value or something
#l = n.min {|a,b| matrix[a[0]][a[1]][:value] <=> matrix[b[0]][b[1]][:value] }
#lowest = matrix[l[0]][l[1]][:value] if l
n.each do |xy|
prob = matrix[xy[0]][xy[1]][:value]
set_cell_color xy[0]+1, xy[1]+1, prob_to_color(prob) if prob
end
end
# Get sync type from the midi call
define :sync_type do |address|
v = get_event(address).to_s.split(",")[6]
if v != nil
return v[3..-2].split("/")[1]
else
return "error"
end
end
# Explode mine
define :explode do |x,y|
sample :ambi_choir, attack: 1.5, decay: 3.0, beat_stretch: 4
sample :misc_cineboom, start: 0.2
sample :vinyl_rewind
set_cell_flash x, y, 72, 6
end
# Evade mine
define :evade do |x,y|
sample :guit_harmonics, amp: 3
sample :mehackit_robot3
set_cell_color x, y, rgb(0,255,0)
set_neighbor_colors $game[:board], x-1, y-1
end
define :start_game do
# Change this to make game harder
chance_to_explode = 0.15
# Init new game
stop_text # Stop texts if running
set_programmer_mode # Set programmer mode
board = (1..8).map {|x| (1..8).map {|y| {x: x, y: y, value: rand}}} # Create new board
set_pad_colors board, rgb(0,0,0) # Set color
set_colors_from_palette [91,92,93,94,95,96,97,98,89,79,69,59,49,39,29,19], 45
set :state, :relax
set :game_over, false
mines = board.map{|row| row.select {|x| x[:value]<0.15 }}.flatten
new_game = {board: board, hits: 0, explode_prob: chance_to_explode, hits_to_win: 64-mines.length, mines: mines }
new_game
end
# Start a new game
$game = start_game
# Thread for listening events from the novation launchpad
live_loop :sonicmines do
use_real_time
# midi note is touch position 11, 12, 13 ...
# midi velocity is touch 127=on 0=off
pad, touch = sync launchpad_in
# note_on = pads, control_change = options
type = sync_type launchpad_in
xy = pad.to_s.chars
x = xy[0].to_i
y = xy[1].to_i
if type=="note_on"
cell_prob = $game[:board][x-1][y-1][:value]
if touch==0 # Touch off
if cell_prob # Visited cell
if cell_prob < $game[:explode_prob]
cue :game_event, type: :explosion, x: x, y: y
else
cue :game_event, type: :evade, x: x, y: y
end
end
else # Touch on
if cell_prob
set_cell_flash x, y, 18, 5
set :state, :exited
end
end
elsif type=="control_change"
if get_pad_status(19) and get_pad_status(91) and
$game = start_game # Start new game
end
if touch==0
set_pad_status pad, false
else
set_pad_status pad, true
end
end
end
define :set_pad_status do |id, bool|
set ("pad_"+id.to_s).to_sym, bool
end
define :get_pad_status do |id|
get ("pad_"+id.to_s).to_sym
end
# Thread for keeping up the score
live_loop :check_events do
use_real_time
event = sync :game_event # Get game event
x = event[:x]
y = event[:y]
game_over = get :game_over
if !game_over then
in_thread do
sleep 2 # Exitement
if event[:type] == :explosion then
explode x, y
set :game_over, true
flash_cells $game[:mines], 72, 6
sleep 3
if get(:game_over) then # If new game hasnt started yet
scroll_text "BOOM !", 1, 15, rgb(178,34,34)
sleep 3
$game = start_game
end
else
evade x, y
$game[:hits]+=1
$game[:board][x-1][y-1][:value] = nil # Add visit to matrix
if $game[:hits]>=$game[:hits_to_win] then
set :game_over, true
set :state, :happy
sleep 3
if (get :game_over) then
scroll_text "WINNER! \^.^/", 1, 15, rgb(255,255,0)
sleep 3
$game = start_game
end
else
print "Hits remaining: "+($game[:hits_to_win]-$game[:hits]).to_s
set :state, :relax
end
end
end
end
end
exited = (ring 75,76,77,76)
relax = (scale :a3, :gong).shuffle
happy = (scale :a4, :major_pentatonic).shuffle
sad = (scale :a3, :acem_asiran).shuffle
# Thread for creating exiting music from the game state
live_loop :music do
state = get(:state)
tick
synth :dull_bell, note: exited.look if state==:exited
synth :pretty_bell, amp: 0.5, note: relax.look if state==:relax
synth :chiplead, note: happy.look if state==:happy
synth :dark_ambience, note: sad.look if state==:explosion
sample :drum_heavy_kick if spread(1,4).look
sample :drum_tom_hi_soft, amp: 0.5 if spread(4,23).rotate(1).look
sample :glitch_perc3, amp: 0.5 if spread(1,36).rotate(-6).look
sample :elec_pop, amp: 0.5 if spread(1, 16).rotate(3).look
sleep 0.25
if rand>0.5
relax = relax.shuffle
happy = happy.shuffle
sad = sad.shuffle
end
end
| 27.220447 | 227 | 0.642254 |
acfd31d03bbe57e873389240f6da585558705be0
| 74 |
# frozen_string_literal: true
module Callcounter
VERSION = '0.1.4'
end
| 12.333333 | 29 | 0.743243 |
266deb6499da3cc8e8a1415465bc73fcf1ec10cd
| 478 |
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::Macie
class Resource
# @param options ({})
# @option options [Client] :client
def initialize(options = {})
@client = options[:client] || Client.new(options)
end
# @return [Client]
def client
@client
end
end
end
| 19.916667 | 74 | 0.667364 |
26086ed6d54beb087c15f794759771ff32843d1b
| 27,833 |
# A module with bindings between the new evaluator and the 3x runtime.
# The intention is to separate all calls into scope, compiler, resource, etc. in this module
# to make it easier to later refactor the evaluator for better implementations of the 3x classes.
#
# @api private
module Puppet::Pops::Evaluator::Runtime3Support
NAME_SPACE_SEPARATOR = '::'.freeze
# Fails the evaluation of _semantic_ with a given issue.
#
# @param issue [Puppet::Pops::Issue] the issue to report
# @param semantic [Puppet::Pops::ModelPopsObject] the object for which evaluation failed in some way. Used to determine origin.
# @param options [Hash] hash of optional named data elements for the given issue
# @return [!] this method does not return
# @raise [Puppet::ParseError] an evaluation error initialized from the arguments (TODO: Change to EvaluationError?)
#
def fail(issue, semantic, options={}, except=nil)
optionally_fail(issue, semantic, options, except)
# an error should have been raised since fail always fails
raise ArgumentError, "Internal Error: Configuration of runtime error handling wrong: should have raised exception"
end
# Optionally (based on severity) Fails the evaluation of _semantic_ with a given issue
# If the given issue is configured to be of severity < :error it is only reported, and the function returns.
#
# @param issue [Puppet::Pops::Issue] the issue to report
# @param semantic [Puppet::Pops::ModelPopsObject] the object for which evaluation failed in some way. Used to determine origin.
# @param options [Hash] hash of optional named data elements for the given issue
# @return [!] this method does not return
# @raise [Puppet::ParseError] an evaluation error initialized from the arguments (TODO: Change to EvaluationError?)
#
def optionally_fail(issue, semantic, options={}, except=nil)
if except.nil?
# Want a stacktrace, and it must be passed as an exception
begin
raise EvaluationError.new()
rescue EvaluationError => e
except = e
end
end
diagnostic_producer.accept(issue, semantic, options, except)
end
# Binds the given variable name to the given value in the given scope.
# The reference object `o` is intended to be used for origin information - the 3x scope implementation
# only makes use of location when there is an error. This is now handled by other mechanisms; first a check
# is made if a variable exists and an error is raised if attempting to change an immutable value. Errors
# in name, numeric variable assignment etc. have also been validated prior to this call. In the event the
# scope.setvar still raises an error, the general exception handling for evaluation of the assignment
# expression knows about its location. Because of this, there is no need to extract the location for each
# setting (extraction is somewhat expensive since 3x requires line instead of offset).
#
def set_variable(name, value, o, scope)
# Scope also checks this but requires that location information are passed as options.
# Those are expensive to calculate and a test is instead made here to enable failing with better information.
# The error is not specific enough to allow catching it - need to check the actual message text.
# TODO: Improve the messy implementation in Scope.
#
if scope.bound?(name)
if Puppet::Parser::Scope::RESERVED_VARIABLE_NAMES.include?(name)
fail(Puppet::Pops::Issues::ILLEGAL_RESERVED_ASSIGNMENT, o, {:name => name} )
else
fail(Puppet::Pops::Issues::ILLEGAL_REASSIGNMENT, o, {:name => name} )
end
end
scope.setvar(name, value)
end
# Returns the value of the variable (nil is returned if variable has no value, or if variable does not exist)
#
def get_variable_value(name, o, scope)
# Puppet 3x stores all variables as strings (then converts them back to numeric with a regexp... to see if it is a match variable)
# Not ideal, scope should support numeric lookup directly instead.
# TODO: consider fixing scope
catch(:undefined_variable) {
x = scope.lookupvar(name.to_s)
# Must convert :undef back to nil - this can happen when an undefined variable is used in a
# parameter's default value expression - there nil must be :undef to work with the rest of 3x.
# Now that the value comes back to 4x it is changed to nil.
return (x == :undef) ? nil : x
}
# It is always ok to reference numeric variables even if they are not assigned. They are always undef
# if not set by a match expression.
#
unless name =~ Puppet::Pops::Patterns::NUMERIC_VAR_NAME
fail(Puppet::Pops::Issues::UNKNOWN_VARIABLE, o, {:name => name})
end
end
# Returns true if the variable of the given name is set in the given most nested scope. True is returned even if
# variable is bound to nil.
#
def variable_bound?(name, scope)
scope.bound?(name.to_s)
end
# Returns true if the variable is bound to a value or nil, in the scope or it's parent scopes.
#
def variable_exists?(name, scope)
scope.exist?(name.to_s)
end
def set_match_data(match_data, scope)
# See set_variable for rationale for not passing file and line to ephemeral_from.
# NOTE: The 3x scope adds one ephemeral(match) to its internal stack per match that succeeds ! It never
# clears anything. Thus a context that performs many matches will get very deep (there simply is no way to
# clear the match variables without rolling back the ephemeral stack.)
# This implementation does not attempt to fix this, it behaves the same bad way.
unless match_data.nil?
scope.ephemeral_from(match_data)
end
end
# Creates a local scope with vairalbes set from a hash of variable name to value
#
def create_local_scope_from(hash, scope)
# two dummy values are needed since the scope tries to give an error message (can not happen in this
# case - it is just wrong, the error should be reported by the caller who knows in more detail where it
# is in the source.
#
raise ArgumentError, "Internal error - attempt to create a local scope without a hash" unless hash.is_a?(Hash)
scope.ephemeral_from(hash)
end
# Creates a nested match scope
def create_match_scope_from(scope)
# Create a transparent match scope (for future matches)
scope.new_match_scope(nil)
end
def get_scope_nesting_level(scope)
scope.ephemeral_level
end
def set_scope_nesting_level(scope, level)
# Yup, 3x uses this method to reset the level, it also supports passing :all to destroy all
# ephemeral/local scopes - which is a sure way to create havoc.
#
scope.unset_ephemeral_var(level)
end
# Adds a relationship between the given `source` and `target` of the given `relationship_type`
# @param source [Puppet:Pops::Types::PCatalogEntryType] the source end of the relationship (from)
# @param target [Puppet:Pops::Types::PCatalogEntryType] the target end of the relationship (to)
# @param relationship_type [:relationship, :subscription] the type of the relationship
#
def add_relationship(source, target, relationship_type, scope)
# The 3x way is to record a Puppet::Parser::Relationship that is evaluated at the end of the compilation.
# This means it is not possible to detect any duplicates at this point (and signal where an attempt is made to
# add a duplicate. There is also no location information to signal the original place in the logic. The user will have
# to go fish.
# The 3.x implementation is based on Strings :-o, so the source and target must be transformed. The resolution is
# done by Catalog#resource(type, title). To do that, it creates a Puppet::Resource since it is responsible for
# translating the name/type/title and create index-keys used by the catalog. The Puppet::Resource has bizarre parsing of
# the type and title (scan for [] that is interpreted as type/title (but it gets it wrong).
# Moreover if the type is "" or "component", the type is Class, and if the type is :main, it is :main, all other cases
# undergo capitalization of name-segments (foo::bar becomes Foo::Bar). (This was earlier done in the reverse by the parser).
# Further, the title undergoes the same munging !!!
#
# That bug infested nest of messy logic needs serious Exorcism!
#
# Unfortunately it is not easy to simply call more intelligent methods at a lower level as the compiler evaluates the recorded
# Relationship object at a much later point, and it is responsible for invoking all the messy logic.
#
# TODO: Revisit the below logic when there is a sane implementation of the catalog, compiler and resource. For now
# concentrate on transforming the type references to what is expected by the wacky logic.
#
# HOWEVER, the Compiler only records the Relationships, and the only method it calls is @relationships.each{|x| x.evaluate(catalog) }
# Which means a smarter Relationship class could do this right. Instead of obtaining the resource from the catalog using
# the borked resource(type, title) which creates a resource for the purpose of looking it up, it needs to instead
# scan the catalog's resources
#
# GAAAH, it is even worse!
# It starts in the parser, which parses "File['foo']" into an AST::ResourceReference with type = File, and title = foo
# This AST is evaluated by looking up the type/title in the scope - causing it to be loaded if it exists, and if not, the given
# type name/title is used. It does not search for resource instances, only classes and types. It returns symbolic information
# [type, [title, title]]. From this, instances of Puppet::Resource are created and returned. These only have type/title information
# filled out. One or an array of resources are returned.
# This set of evaluated (empty reference) Resource instances are then passed to the relationship operator. It creates a
# Puppet::Parser::Relationship giving it a source and a target that are (empty reference) Resource instances. These are then remembered
# until the relationship is evaluated by the compiler (at the end). When evaluation takes place, the (empty reference) Resource instances
# are converted to String (!?! WTF) on the simple format "#{type}[#{title}]", and the catalog is told to find a resource, by giving
# it this string. If it cannot find the resource it fails, else the before/notify parameter is appended with the target.
# The search for the resource begin with (you guessed it) again creating an (empty reference) resource from type and title (WTF?!?!).
# The catalog now uses the reference resource to compute a key [r.type, r.title.to_s] and also gets a uniqueness key from the
# resource (This is only a reference type created from title and type). If it cannot find it with the first key, it uses the
# uniqueness key to lookup.
#
# This is probably done to allow a resource type to munge/translate the title in some way (but it is quite unclear from the long
# and convoluted path of evaluation.
# In order to do this in a way that is similar to 3.x two resources are created to be used as keys.
#
# And if that is not enough, a source/target may be a Collector (a baked query that will be evaluated by the
# compiler - it is simply passed through here for processing by the compiler at the right time).
#
if source.is_a?(Puppet::Parser::Collector)
# use verbatim - behavior defined by 3x
source_resource = source
else
# transform into the wonderful String representation in 3x
type, title = catalog_type_to_split_type_title(source)
source_resource = Puppet::Resource.new(type, title)
end
if target.is_a?(Puppet::Parser::Collector)
# use verbatim - behavior defined by 3x
target_resource = target
else
# transform into the wonderful String representation in 3x
type, title = catalog_type_to_split_type_title(target)
target_resource = Puppet::Resource.new(type, title)
end
# Add the relationship to the compiler for later evaluation.
scope.compiler.add_relationship(Puppet::Parser::Relationship.new(source_resource, target_resource, relationship_type))
end
# Coerce value `v` to numeric or fails.
# The given value `v` is coerced to Numeric, and if that fails the operation
# calls {#fail}.
# @param v [Object] the value to convert
# @param o [Object] originating instruction
# @param scope [Object] the (runtime specific) scope where evaluation of o takes place
# @return [Numeric] value `v` converted to Numeric.
#
def coerce_numeric(v, o, scope)
unless n = Puppet::Pops::Utils.to_n(v)
fail(Puppet::Pops::Issues::NOT_NUMERIC, o, {:value => v})
end
n
end
def call_function(name, args, o, scope)
Puppet::Util::Profiler.profile("Called #{name}", [:functions, name]) do
# Call via 4x API if the function exists there
loaders = scope.compiler.loaders
# find the loader that loaded the code, or use the private_environment_loader (sees env + all modules)
adapter = Puppet::Pops::Utils.find_adapter(o, Puppet::Pops::Adapters::LoaderAdapter)
loader = adapter.nil? ? loaders.private_environment_loader : adapter.loader
if loader && func = loader.load(:function, name)
return func.call(scope, *args)
end
# Call via 3x API if function exists there
fail(Puppet::Pops::Issues::UNKNOWN_FUNCTION, o, {:name => name}) unless Puppet::Parser::Functions.function(name)
# Arguments must be mapped since functions are unaware of the new and magical creatures in 4x.
# NOTE: Passing an empty string last converts nil/:undef to empty string
mapped_args = args.map {|a| convert(a, scope, '') }
result = scope.send("function_#{name}", mapped_args)
# Prevent non r-value functions from leaking their result (they are not written to care about this)
Puppet::Parser::Functions.rvalue?(name) ? result : nil
end
end
# The o is used for source reference
def create_resource_parameter(o, scope, name, value, operator)
file, line = extract_file_line(o)
Puppet::Parser::Resource::Param.new(
:name => name,
:value => convert(value, scope, nil), # converted to 3x since 4x supports additional objects / types
:source => scope.source, :line => line, :file => file,
:add => operator == :'+>'
)
end
CLASS_STRING = 'class'.freeze
def create_resources(o, scope, virtual, exported, type_name, resource_titles, evaluated_parameters)
# TODO: Unknown resource causes creation of Resource to fail with ArgumentError, should give
# a proper Issue. Now the result is "Error while evaluating a Resource Statement" with the message
# from the raised exception. (It may be good enough).
# resolve in scope.
fully_qualified_type, resource_titles = scope.resolve_type_and_titles(type_name, resource_titles)
# Not 100% accurate as this is the resource expression location and each title is processed separately
# The titles are however the result of evaluation and they have no location at this point (an array
# of positions for the source expressions are required for this to work).
# TODO: Revisit and possible improve the accuracy.
#
file, line = extract_file_line(o)
# Build a resource for each title
resource_titles.map do |resource_title|
resource = Puppet::Parser::Resource.new(
fully_qualified_type, resource_title,
:parameters => evaluated_parameters,
:file => file,
:line => line,
:exported => exported,
:virtual => virtual,
# WTF is this? Which source is this? The file? The name of the context ?
:source => scope.source,
:scope => scope,
:strict => true
)
if resource.resource_type.is_a? Puppet::Resource::Type
resource.resource_type.instantiate_resource(scope, resource)
end
scope.compiler.add_resource(scope, resource)
scope.compiler.evaluate_classes([resource_title], scope, false, true) if fully_qualified_type == CLASS_STRING
# Turn the resource into a PType (a reference to a resource type)
# weed out nil's
resource_to_ptype(resource)
end
end
# Defines default parameters for a type with the given name.
#
def create_resource_defaults(o, scope, type_name, evaluated_parameters)
# Note that name must be capitalized in this 3x call
# The 3x impl creates a Resource instance with a bogus title and then asks the created resource
# for the type of the name.
# Note, locations are available per parameter.
#
scope.define_settings(capitalize_qualified_name(type_name), evaluated_parameters)
end
# Capitalizes each segment of a qualified name
#
def capitalize_qualified_name(name)
name.split(/::/).map(&:capitalize).join(NAME_SPACE_SEPARATOR)
end
# Creates resource overrides for all resource type objects in evaluated_resources. The same set of
# evaluated parameters are applied to all.
#
def create_resource_overrides(o, scope, evaluated_resources, evaluated_parameters)
# Not 100% accurate as this is the resource expression location and each title is processed separately
# The titles are however the result of evaluation and they have no location at this point (an array
# of positions for the source expressions are required for this to work.
# TODO: Revisit and possible improve the accuracy.
#
file, line = extract_file_line(o)
evaluated_resources.each do |r|
unless r.is_a?(Puppet::Pops::Types::PResourceType) && r.type_name != 'class'
fail(Puppet::Pops::Issues::ILLEGAL_OVERRIDEN_TYPE, o, {:actual => r} )
end
resource = Puppet::Parser::Resource.new(
r.type_name, r.title,
:parameters => evaluated_parameters,
:file => file,
:line => line,
# WTF is this? Which source is this? The file? The name of the context ?
:source => scope.source,
:scope => scope
)
scope.compiler.add_override(resource)
end
end
# Finds a resource given a type and a title.
#
def find_resource(scope, type_name, title)
scope.compiler.findresource(type_name, title)
end
# Returns the value of a resource's parameter by first looking up the parameter in the resource
# and then in the defaults for the resource. Since the resource exists (it must in order to look up its
# parameters, any overrides have already been applied). Defaults are not applied to a resource until it
# has been finished (which typically has not taken place when this is evaluated; hence the dual lookup).
#
def get_resource_parameter_value(scope, resource, parameter_name)
# This gets the parameter value, or nil (for both valid parameters and parameters that do not exist).
val = resource[parameter_name]
# Sometimes the resource is a Puppet::Parser::Resource and sometimes it is
# a Puppet::Resource. The Puppet::Resource case occurs when puppet language
# is evaluated against an already completed catalog (where all instances of
# Puppet::Parser::Resource are converted to Puppet::Resource instances).
# Evaluating against an already completed catalog is really only found in
# the language specification tests, where the puppet language is used to
# test itself.
if resource.is_a?(Puppet::Parser::Resource)
# The defaults must be looked up in the scope where the resource was created (not in the given
# scope where the lookup takes place.
resource_scope = resource.scope
if val.nil? && resource_scope && defaults = resource_scope.lookupdefaults(resource.type)
# NOTE: 3x resource keeps defaults as hash using symbol for name as key to Parameter which (again) holds
# name and value.
# NOTE: meta parameters that are unset ends up here, and there are no defaults for those encoded
# in the defaults, they may receive hardcoded defaults later (e.g. 'tag').
param = defaults[parameter_name.to_sym]
# Some parameters (meta parameters like 'tag') does not return a param from which the value can be obtained
# at all times. Instead, they return a nil param until a value has been set.
val = param.nil? ? nil : param.value
end
end
val
end
# Returns true, if the given name is the name of a resource parameter.
#
def is_parameter_of_resource?(scope, resource, name)
return false unless name.is_a?(String)
resource.valid_parameter?(name)
end
def resource_to_ptype(resource)
nil if resource.nil?
# inference returns the meta type since the 3x Resource is an alternate way to describe a type
type_calculator.infer(resource).type
end
# This is the same type of "truth" as used in the current Puppet DSL.
#
def is_true? o
# Is the value true? This allows us to control the definition of truth
# in one place.
case o
# Support :undef since it may come from a 3x structure
when :undef
false
else
!!o
end
end
# Utility method for TrueClass || FalseClass
# @param x [Object] the object to test if it is instance of TrueClass or FalseClass
def is_boolean? x
x.is_a?(TrueClass) || x.is_a?(FalseClass)
end
def initialize
@@convert_visitor ||= Puppet::Pops::Visitor.new(self, "convert", 2, 2)
@@convert2_visitor ||= Puppet::Pops::Visitor.new(self, "convert2", 2, 2)
end
# Converts 4x supported values to 3x values. This is required because
# resources and other objects do not know about the new type system, and does not support
# regular expressions. Unfortunately this has to be done for array and hash as well.
# A complication is that catalog types needs to be resolved against the scope.
#
def convert(o, scope, undef_value)
@@convert_visitor.visit_this_2(self, o, scope, undef_value)
end
# Converts nested 4x supported values to 3x values. This is required because
# resources and other objects do not know about the new type system, and does not support
# regular expressions. Unfortunately this has to be done for array and hash as well.
# A complication is that catalog types needs to be resolved against the scope.
#
def convert2(o, scope, undef_value)
@@convert2_visitor.visit_this_2(self, o, scope, undef_value)
end
def convert_NilClass(o, scope, undef_value)
undef_value
end
def convert2_NilClass(o, scope, undef_value)
:undef
end
def convert_String(o, scope, undef_value)
# although wasteful, needed because user code may mutate these strings in Resources
o.frozen? ? o.dup : o
end
alias convert2_String :convert_String
def convert_Object(o, scope, undef_value)
o
end
alias :convert2_Object :convert_Object
def convert_Array(o, scope, undef_value)
o.map {|x| convert2(x, scope, undef_value) }
end
alias :convert2_Array :convert_Array
def convert_Hash(o, scope, undef_value)
result = {}
o.each {|k,v| result[convert2(k, scope, undef_value)] = convert2(v, scope, undef_value) }
result
end
alias :convert2_Hash :convert_Hash
def convert_Regexp(o, scope, undef_value)
# Puppet 3x cannot handle parameter values that are reqular expressions. Turn into regexp string in
# source form
o.inspect
end
alias :convert2_Regexp :convert_Regexp
def convert_Symbol(o, scope, undef_value)
case o
# Support :undef since it may come from a 3x structure
when :undef
undef_value # 3x wants undef as either empty string or :undef
else
o # :default, and all others are verbatim since they are new in future evaluator
end
end
# The :undef symbol should not be converted when nested in arrays or hashes
def convert2_Symbol(o, scope, undef_value)
o
end
def convert_PAnyType(o, scope, undef_value)
o
end
alias :convert2_PAnyType :convert_PAnyType
def convert_PCatalogEntryType(o, scope, undef_value)
# Since 4x does not support dynamic scoping, all names are absolute and can be
# used as is (with some check/transformation/mangling between absolute/relative form
# due to Puppet::Resource's idiosyncratic behavior where some references must be
# absolute and others cannot be.
# Thus there is no need to call scope.resolve_type_and_titles to do dynamic lookup.
Puppet::Resource.new(*catalog_type_to_split_type_title(o))
end
alias :convert2_PCatalogEntryType :convert_PCatalogEntryType
private
# Produces an array with [type, title] from a PCatalogEntryType
# This method is used to produce the arguments for creation of reference resource instances
# (used when 3x is operating on a resource).
# Ensures that resources are *not* absolute.
#
def catalog_type_to_split_type_title(catalog_type)
split_type = catalog_type.is_a?(Puppet::Pops::Types::PType) ? catalog_type.type : catalog_type
case split_type
when Puppet::Pops::Types::PHostClassType
class_name = split_type.class_name
['class', class_name.nil? ? nil : class_name.sub(/^::/, '')]
when Puppet::Pops::Types::PResourceType
type_name = split_type.type_name
title = split_type.title
if type_name =~ /^(::)?[Cc]lass/
['class', title.nil? ? nil : title.sub(/^::/, '')]
else
# Ensure that title is '' if nil
# Resources with absolute name always results in error because tagging does not support leading ::
[type_name.nil? ? nil : type_name.sub(/^::/, ''), title.nil? ? '' : title]
end
else
raise ArgumentError, "Cannot split the type #{catalog_type.class}, it represents neither a PHostClassType, nor a PResourceType."
end
end
def extract_file_line(o)
source_pos = Puppet::Pops::Utils.find_closest_positioned(o)
return [nil, -1] unless source_pos
[source_pos.locator.file, source_pos.line]
end
def find_closest_positioned(o)
return nil if o.nil? || o.is_a?(Puppet::Pops::Model::Program)
o.offset.nil? ? find_closest_positioned(o.eContainer) : Puppet::Pops::Adapters::SourcePosAdapter.adapt(o)
end
# Creates a diagnostic producer
def diagnostic_producer
Puppet::Pops::Validation::DiagnosticProducer.new(
ExceptionRaisingAcceptor.new(), # Raises exception on all issues
SeverityProducer.new(), # All issues are errors
Puppet::Pops::Model::ModelLabelProvider.new())
end
# Configure the severity of failures
class SeverityProducer < Puppet::Pops::Validation::SeverityProducer
Issues = Puppet::Pops::Issues
def initialize
super
p = self
# Issues triggering warning only if --debug is on
if Puppet[:debug]
p[Issues::EMPTY_RESOURCE_SPECIALIZATION] = :warning
else
p[Issues::EMPTY_RESOURCE_SPECIALIZATION] = :ignore
end
# Store config issues, ignore or warning
p[Issues::RT_NO_STORECONFIGS_EXPORT] = Puppet[:storeconfigs] ? :ignore : :warning
p[Issues::RT_NO_STORECONFIGS] = Puppet[:storeconfigs] ? :ignore : :warning
end
end
# An acceptor of diagnostics that immediately raises an exception.
class ExceptionRaisingAcceptor < Puppet::Pops::Validation::Acceptor
def accept(diagnostic)
super
Puppet::Pops::IssueReporter.assert_and_report(self, {:message => "Evaluation Error:", :emit_warnings => true })
if errors?
raise ArgumentError, "Internal Error: Configuration of runtime error handling wrong: should have raised exception"
end
end
end
class EvaluationError < StandardError
end
end
| 46.234219 | 141 | 0.71426 |
339fd56f08dcafbee3b111d70c5bc4b4c9521c19
| 267 |
class AddColumnToAuctions < ActiveRecord::Migration
def change
add_column :auctions, :checked, :boolean, :default => false
add_column :auctions, :winner, :integer, :default => nil
add_index :auctions, :checked
add_index :auctions, :winner
end
end
| 29.666667 | 63 | 0.719101 |
f72682da2004aed487b758e9ba6ec14b7d9e4474
| 4,677 |
# frozen_string_literal: true
require_relative 'event/adapters/active_support_notifications'
require_relative 'event/configuration'
require_relative 'event/subscriber'
module Spree
module Event
extend self
# Allows to trigger events that can be subscribed using #subscribe. An
# optional block can be passed that will be executed immediately. The
# actual code implementation is delegated to the adapter.
#
# @param [String] event_name the name of the event. The suffix ".spree"
# will be added automatically if not present
# @param [Hash] opts a list of options to be passed to the triggered event
#
# @example Trigger an event named 'order_finalized'
# Spree::Event.fire 'order_finalized', order: @order do
# @order.finalize!
# end
def fire(event_name, opts = {})
adapter.fire normalize_name(event_name), opts do
yield opts if block_given?
end
end
# Subscribe to an event with the given name. The provided block is executed
# every time the subscribed event is fired.
#
# @param [String, Regexp] event_name the name of the event.
# When String, the suffix ".spree" will be added automatically if not present,
# when using the default adapter for ActiveSupportNotifications.
# When Regexp, due to the unpredictability of all possible regexp combinations,
# adding the suffix is developer's responsibility (if you don't, you will
# subscribe to all notifications, including internal Rails notifications
# as well).
#
# @see Spree::Event::Adapters::ActiveSupportNotifications#normalize_name
#
# @return a subscription object that can be used as reference in order
# to remove the subscription
#
# @example Subscribe to the `order_finalized` event
# Spree::Event.subscribe 'order_finalized' do |event|
# order = event.payload[:order]
# Spree::Mailer.order_finalized(order).deliver_later
# end
#
# @see Spree::Event#unsubscribe
def subscribe(event_name, &block)
name = normalize_name(event_name)
listener_names << name
adapter.subscribe(name, &block)
end
# Unsubscribes a whole event or a specific subscription object
#
# @param [String, Object] subscriber the event name as a string (with
# or without the ".spree" suffix) or the subscription object
#
# @example Unsubscribe a single subscription
# subscription = Spree::Event.fire 'order_finalized'
# Spree::Event.unsubscribe(subscription)
# @example Unsubscribe all `order_finalized` event subscriptions
# Spree::Event.unsubscribe('order_finalized')
# @example Unsubscribe an event by name with explicit prefix
# Spree::Event.unsubscribe('order_finalized.spree')
def unsubscribe(subscriber)
name_or_subscriber = subscriber.is_a?(String) ? normalize_name(subscriber) : subscriber
adapter.unsubscribe(name_or_subscriber)
end
# Lists all subscriptions currently registered under the ".spree"
# namespace. Actual implementation is delegated to the adapter
#
# @return [Hash] an hash with event names as keys and arrays of subscriptions
# as values
#
# @example Current subscriptions
# Spree::Event.listeners
# # => {"order_finalized.spree"=> [#<ActiveSupport...>],
# "reimbursement_reimbursed.spree"=> [#<ActiveSupport...>]}
def listeners
adapter.listeners_for(listener_names)
end
# The adapter used by Spree::Event, defaults to
# Spree::Event::Adapters::ActiveSupportNotifications
#
# @example Change the adapter
# Spree::Config.events.adapter = "Spree::EventBus.new"
#
# @see Spree::AppConfiguration
def adapter
Spree::Config.events.adapter
end
# The suffix used for namespacing Solidus events, defaults to
# `.spree`
#
# @see Spree::Event::Configuration#suffix
def suffix
Spree::Deprecation.warn "This method is deprecated and will be removed. Please use Event::Adapters::ActiveSupportNotifications#suffix"
Spree::Config.events.suffix
end
# @!attribute [r] subscribers
# @return [Array<Spree::Event::Subscriber>] A list of subscribers used to support class reloading for Spree::Event::Subscriber instances
def subscribers
Spree::Config.events.subscribers
end
private
def normalize_name(name)
adapter.normalize_name(name)
end
def listener_names
@listeners_names ||= Set.new
end
end
end
| 37.119048 | 143 | 0.676288 |
28e896f77736f1b2711e33811480dfaede0e8f77
| 386 |
# Temporary workaround to resolve circular dependency between rspec-rails' spec
# suite and ammeter.
require 'rspec/rails/matchers'
module RSpec
module Rails
module RailsExampleGroup
extend ActiveSupport::Concern
include RSpec::Rails::SetupAndTeardownAdapter
include RSpec::Rails::TestUnitAssertionAdapter
include RSpec::Rails::Matchers
end
end
end
| 25.733333 | 79 | 0.759067 |
ff334989e47a70680406824a072e43f7338be167
| 1,194 |
# -*- encoding: utf-8 -*-
=begin
#OpenAPI Extension generating aliases to maps and arrays as models
#This specification shows how to generate aliases to maps and arrays as models.
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.0.1
=end
$:.push File.expand_path("../lib", __FILE__)
require "petstore/version"
Gem::Specification.new do |s|
s.name = "petstore"
s.version = Petstore::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["OpenAPI-Generator"]
s.email = [""]
s.homepage = "https://openapi-generator.tech"
s.summary = "OpenAPI Extension generating aliases to maps and arrays as models Ruby Gem"
s.description = "This specification shows how to generate aliases to maps and arrays as models."
s.license = "Unlicense"
s.required_ruby_version = ">= 2.4"
s.add_runtime_dependency 'typhoeus', '~> 1.0', '>= 1.0.1'
s.add_development_dependency 'rspec', '~> 3.6', '>= 3.6.0'
s.files = `find *`.split("\n").uniq.sort.select { |f| !f.empty? }
s.test_files = `find spec/*`.split("\n")
s.executables = []
s.require_paths = ["lib"]
end
| 30.615385 | 98 | 0.661642 |
e96dce95f469f1dda17a0b0fcd6391f59cdbb823
| 7,653 |
#!/bin/env ruby
# encoding: utf-8
require 'test/unit'
RUBY_VERSION < '1.9' ? require('lib/week_of_month') : require_relative('../../../week_of_month')
class TestWeekForTime < Test::Unit::TestCase
def test_week_of_month
assert_equal 5, Time.new(2013, 1, 31).week_of_month
assert_equal 5, Time.new(2013, 2, 28).week_of_month
assert_equal 6, Time.new(2013, 3, 31).week_of_month
assert_equal 5, Time.new(2013, 4, 30).week_of_month
assert_equal 5, Time.new(2013, 5, 31).week_of_month
assert_equal 6, Time.new(2013, 6, 30).week_of_month
assert_equal 5, Time.new(2013, 7, 31).week_of_month
assert_equal 5, Time.new(2013, 8, 31).week_of_month
assert_equal 5, Time.new(2013, 9, 30).week_of_month
assert_equal 5, Time.new(2013, 10, 31).week_of_month
assert_equal 5, Time.new(2013, 11, 30).week_of_month
assert_equal 5, Time.new(2013, 12, 31).week_of_month
end
def test_general_week_of_month
assert_equal 4, Time.new(2013, 1, 31).general_week_of_month
assert_equal 4, Time.new(2013, 2, 28).general_week_of_month
assert_equal 5, Time.new(2013, 3, 31).general_week_of_month
assert_equal 4, Time.new(2013, 4, 30).general_week_of_month
assert_equal 4, Time.new(2013, 5, 31).general_week_of_month
assert_equal 5, Time.new(2013, 6, 30).general_week_of_month
assert_equal 4, Time.new(2013, 7, 31).general_week_of_month
assert_equal 4, Time.new(2013, 8, 31).general_week_of_month
assert_equal 5, Time.new(2013, 9, 30).general_week_of_month
assert_equal 4, Time.new(2013, 10, 31).general_week_of_month
assert_equal 4, Time.new(2013, 11, 30).general_week_of_month
assert_equal 5, Time.new(2013, 12, 31).general_week_of_month
end
def test_week_split
object = Time.new(2013, 1, 10)
split_for_january = [[nil, nil, 1, 2, 3, 4, 5],
[6, 7, 8, 9, 10, 11, 12],
[13, 14, 15, 16, 17, 18, 19],
[20, 21, 22, 23, 24, 25, 26],
[27, 28, 29, 30, 31]]
assert_kind_of Array, object.week_split
assert_equal split_for_january, object.week_split
object = Time.new(2013, 2, 15)
split_for_october = [[nil, nil, nil, nil, nil, 1, 2],
[3, 4, 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14, 15, 16],
[17, 18, 19, 20, 21, 22, 23],
[24, 25, 26, 27, 28]]
assert_kind_of Array, object.week_split
assert_equal split_for_october, object.week_split
end
def test_first_week?
assert Time.new(2012, 1, 1).first_week?
assert !Time.new(2012, 1, 31).first_week?
end
def test_second_week?
assert Time.new(2013, 1, 6).second_week?
assert !Time.new(2013, 1, 2).second_week?
end
def test_last_week?
assert Time.new(2012, 10, 31).last_week?
assert !Time.new(2012, 10, 20).last_week?
end
def test_total_weeks
assert_equal 5, Time.new(2012, 10, 31).total_weeks
assert_equal 6, Time.new(2012, 12, 20).total_weeks
end
def test_week_of_month_in_eng
assert_equal 'First', Time.new(2012, 12, 1).week_of_month_in_eng
assert_equal 'Second', Time.new(2012, 12, 4).week_of_month_in_eng
assert_equal 'Third', Time.new(2012, 12, 9).week_of_month_in_eng
assert_equal 'Fourth', Time.new(2012, 12, 16).week_of_month_in_eng
assert_equal 'Fifth', Time.new(2012, 12, 24).week_of_month_in_eng
assert_equal 'Sixth', Time.new(2012, 12, 31).week_of_month_in_eng
end
def test_week_of_month_in_fr
assert_equal 'Premier', Time.new(2012, 12, 1).week_of_month_in_fr
assert_equal 'Deuxième', Time.new(2012, 12, 4).week_of_month_in_fr
assert_equal 'Troisième', Time.new(2012, 12, 9).week_of_month_in_fr
assert_equal 'Quatrième', Time.new(2012, 12, 16).week_of_month_in_fr
assert_equal 'Cinquième', Time.new(2012, 12, 24).week_of_month_in_fr
assert_equal 'Sixième', Time.new(2012, 12, 31).week_of_month_in_fr
end
def test_week_of_month_in_ger
assert_equal 'First', Time.new(2012, 12, 1).week_of_month_in_ger
assert_equal 'Second', Time.new(2012, 12, 4).week_of_month_in_ger
assert_equal 'Dritten', Time.new(2012, 12, 9).week_of_month_in_ger
assert_equal 'Vierte', Time.new(2012, 12, 16).week_of_month_in_ger
assert_equal 'Fünfte', Time.new(2012, 12, 24).week_of_month_in_ger
assert_equal 'Sechste', Time.new(2012, 12, 31).week_of_month_in_ger
end
def test_week_of_month_in_ja
assert_equal '第一', Time.new(2012, 12, 1).week_of_month_in_ja
assert_equal '第二', Time.new(2012, 12, 4).week_of_month_in_ja
assert_equal '第三', Time.new(2012, 12, 9).week_of_month_in_ja
assert_equal '第四', Time.new(2012, 12, 16).week_of_month_in_ja
assert_equal '第五', Time.new(2012, 12, 24).week_of_month_in_ja
assert_equal '第六', Time.new(2012, 12, 31).week_of_month_in_ja
end
def test_week_end?
assert !Time.new(2012, 10, 1).week_end?
assert !Time.new(2012, 10, 31).week_end?
assert Time.new(2012, 10, 6).week_end?
assert Time.new(2012, 10, 7).week_end?
end
def test_working_day?
assert Time.new(2012, 10, 1).working_day?
assert Time.new(2012, 10, 31).working_day?
assert !Time.new(2012, 10, 6).working_day?
assert !Time.new(2012, 10, 7).working_day?
end
def test_days_past_in_week
assert_equal 2, Time.new(2013, 1, 1).days_past_in_week
end
def test_days_left_in_week
assert_equal 5, Time.new(2013, 1, 1).days_left_in_week
end
def test_starting_of_week
assert_equal Time.new(2012, 11, 25), Time.new(2012, 11, 25).starting_of_week
assert_equal Time.new(2012, 11, 25), Time.new(2012, 11, 26).starting_of_week
assert_equal Time.new(2012, 11, 25), Time.new(2012, 11, 30).starting_of_week
end
def test_ending_of_week
assert_equal Time.new(2012, 12, 1), Time.new(2012, 12, 1).ending_of_week
assert_equal Time.new(2012, 12, 8), Time.new(2012, 12, 2).ending_of_week
assert_equal Time.new(2012, 12, 8), Time.new(2012, 12, 3).ending_of_week
assert_equal Time.new(2012, 12, 8), Time.new(2012, 12, 7).ending_of_week
end
def test_wom_next_week
assert_equal Time.new(2012, 12, 8), Time.new(2012, 12, 1).wom_next_week
assert_equal Time.new(2012, 12, 22), Time.new(2012, 12, 15).wom_next_week
assert_equal Time.new(2013, 1, 5), Time.new(2012, 12, 29).wom_next_week
assert_equal Time.new(2012, 12, 26), Time.new(2012, 12, 19).wom_next_week
end
def test_previous_week
assert_equal Time.new(2012, 12, 1), Time.new(2012, 12, 8).previous_week
assert_equal Time.new(2012, 12, 15), Time.new(2012, 12, 22).previous_week
assert_equal Time.new(2012, 12, 29), Time.new(2013, 1, 5).previous_week
assert_equal Time.new(2012, 12, 19), Time.new(2012, 12, 26).previous_week
end
def test_monday_configured_starting_of_week
WeekOfMonth.configuration.monday_active = true
assert_equal Time.new(2012, 11, 19), Time.new(2012, 11, 25).starting_of_week
assert_equal Time.new(2012, 11, 26), Time.new(2012, 11, 26).starting_of_week
assert_equal Time.new(2012, 11, 26), Time.new(2012, 11, 30).starting_of_week
WeekOfMonth.configuration.monday_active = false
end
def test_monday_configured_ending_of_week
WeekOfMonth.configuration.monday_active = true
assert_equal Time.new(2012, 12, 2), Time.new(2012, 12, 1).ending_of_week
assert_equal Time.new(2012, 12, 2), Time.new(2012, 12, 2).ending_of_week
assert_equal Time.new(2012, 12, 9), Time.new(2012, 12, 3).ending_of_week
assert_equal Time.new(2012, 12, 9), Time.new(2012, 12, 7).ending_of_week
WeekOfMonth.configuration.monday_active = false
end
end
| 37.886139 | 96 | 0.695283 |
391575e85336ff71cd7bd0504945a1f1f646d3e9
| 2,943 |
#
# Copyright (c) 2010-2011 RightScale Inc
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
module RightScale
module Test
module MockAuditorProxy
extend self
def mock_chef_log(logger)
flexmock(Chef::Log).should_receive(:debug).and_return { |m| logger.debug_text << m << "\n" }
flexmock(Chef::Log).should_receive(:error).and_return { |m| logger.error_text << m << "\n" }
flexmock(Chef::Log).should_receive(:fatal).and_return { |m| logger.fatal_text << m << "\n" }
flexmock(Chef::Log).should_receive(:info).and_return { |m| logger.info_text << m << "\n" }
flexmock(Chef::Log).should_receive(:warn).and_return { |m| logger.warn_text << m << "\n" }
flexmock(Chef::Log.logger).should_receive(:create_new_section).and_return { |m| }
end
def mock_right_link_log(logger)
flexmock(RightScale::Log).should_receive(:debug).and_return { |m| logger.debug_text << m << "\n" }
flexmock(RightScale::Log).should_receive(:error).and_return do |m, e|
logger.error_text << m << "\n"
logger.error_text << e.inspect << "\n" if e
end
flexmock(RightScale::Log).should_receive(:fatal).and_return { |m| logger.fatal_text << m << "\n" }
flexmock(RightScale::Log).should_receive(:info).and_return { |m| logger.info_text << m << "\n" }
flexmock(RightScale::Log).should_receive(:warn).and_return { |m| logger.warn_text << m << "\n" }
end
end
class MockLogger
attr_accessor :debug_text, :error_text, :fatal_text, :info_text, :warn_text,
:audit_info, :audit_output, :audit_status, :audit_section
def initialize
@debug_text = ""
@error_text = ""
@fatal_text = ""
@info_text = ""
@warn_text = ""
@audit_info = ""
@audit_output = ""
@audit_status = ""
@audit_section = ""
end
end
end
end
| 44.590909 | 106 | 0.668366 |
7a13d2dbf3ecfc0560a42cc1d009ec08d99de518
| 502 |
class Net6 < Formula
desc "C++ library for network-based applications"
homepage "http://gobby.0x539.de"
url "http://releases.0x539.de/net6/net6-1.3.14.tar.gz"
sha256 "155dd82cbe1f8354205c79ab2bb54af4957047422250482596a34b0e0cc61e21"
revision 2
depends_on "pkg-config" => :build
depends_on "gnutls"
depends_on "libsigc++"
needs :cxx11
def install
ENV.cxx11
system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}"
system "make", "install"
end
end
| 25.1 | 79 | 0.711155 |
91c71abde18d73f8485d1ae3459afcbb3f0d8159
| 1,587 |
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "active_id/version"
Gem::Specification.new do |s|
s.name = "activeid"
s.version = ActiveID::VERSION
s.authors = ["Ribose Inc."]
s.email = ["[email protected]"]
s.homepage = "https://github.com/riboseinc/activeid"
s.summary = "Support for binary UUIDs in ActiveRecord"
s.description = "Support for binary UUIDs in ActiveRecord"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map { |f| File.basename(f) }
s.require_paths = ["lib"]
s.add_development_dependency "activesupport"
s.add_development_dependency "database_cleaner"
s.add_development_dependency "fabrication"
s.add_development_dependency "forgery"
s.add_development_dependency "pry"
s.add_development_dependency "rake"
s.add_development_dependency "rspec", "~> 3.5"
s.add_development_dependency "rspec-its"
s.add_development_dependency "solid_assert", "~> 1.0"
if RUBY_ENGINE == "jruby"
s.add_development_dependency "activerecord-jdbcmysql-adapter"
s.add_development_dependency "activerecord-jdbcpostgresql-adapter"
s.add_development_dependency "activerecord-jdbcsqlite3-adapter"
else
s.add_development_dependency "mysql2"
s.add_development_dependency "pg"
s.add_development_dependency "sqlite3", "~> 1.3.6"
end
s.add_runtime_dependency "activerecord", ">= 5.0", "< 6.0"
s.add_runtime_dependency "uuidtools"
end
| 36.906977 | 84 | 0.706364 |
629136722dd289c8c42dd8d362c75d3ae77b2f5b
| 2,167 |
require 'one_gadget/gadget'
# https://gitlab.com/david942j/libcdb/blob/master/libc/libc0.1-i686-2.19-18/lib/i386-kfreebsd-gnu/i686/cmov/libc-2.19.so
#
# Intel 80386
#
# GNU C Library (Debian GLIBC 2.19-18) stable release version 2.19, by Roland McGrath et al.
# Copyright (C) 2014 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions.
# There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
# Compiled by GNU CC version 4.8.4.
# Available extensions:
# crypt add-on version 2.1 by Michael Glad and others
# Native POSIX Threads Library by Ulrich Drepper et al
# GNU Libidn by Simon Josefsson
# BIND-8.2.3-T5B
# libc ABIs: UNIQUE
# For bug reporting instructions, please see:
# <http://www.debian.org/Bugs/>.
build_id = File.basename(__FILE__, '.rb').split('-').last
OneGadget::Gadget.add(build_id, 248615,
constraints: ["ebx is the GOT address of libc", "[esp+0x38] == NULL"],
effect: "execve(\"/bin/sh\", esp+0x38, environ)")
OneGadget::Gadget.add(build_id, 248651,
constraints: ["ebx is the GOT address of libc", "[eax] == NULL || eax == NULL", "[[esp+0x8]] == NULL || [esp+0x8] == NULL"],
effect: "execve(\"/bin/sh\", eax, [esp+0x8])")
OneGadget::Gadget.add(build_id, 248655,
constraints: ["ebx is the GOT address of libc", "[[esp+0x4]] == NULL || [esp+0x4] == NULL", "[[esp+0x8]] == NULL || [esp+0x8] == NULL"],
effect: "execve(\"/bin/sh\", [esp+0x4], [esp+0x8])")
OneGadget::Gadget.add(build_id, 406404,
constraints: ["ebx is the GOT address of libc", "[esp+0x8] == NULL"],
effect: "execl(\"/bin/sh\", \"sh\", [esp+0x8])")
OneGadget::Gadget.add(build_id, 406410,
constraints: ["ebx is the GOT address of libc", "eax == NULL"],
effect: "execl(\"/bin/sh\", eax)")
OneGadget::Gadget.add(build_id, 406414,
constraints: ["ebx is the GOT address of libc", "[esp+0x4] == NULL"],
effect: "execl(\"/bin/sh\", [esp+0x4])")
| 52.853659 | 158 | 0.597139 |
21fe6c98e9c328872a4eb12c42d2f9035b3ad104
| 502 |
FactoryBot.define do
factory :api_v3_countries_com_trade_indicator, class: 'Api::V3::CountriesComTradeIndicator' do
association :commodity, factory: :api_v3_commodity
sequence(:iso2) { |n| ('AA'..'ZZ').to_a[n] }
sequence(:iso3) { |n| ('AA'..'ZZ').to_a[n] }
sequence(:year) { |n| n }
sequence(:commodity_code) { |n| "Name#{n}" }
value { rand(11.2...76.9) }
quantity { rand(11.2...76.9) }
sequence(:value_rank) { |n| n }
sequence(:quantity_rank) { |n| n }
end
end
| 35.857143 | 96 | 0.61753 |
1c0f95ac82f0df226f99884adc0b5c9888c6238d
| 937 |
# frozen_string_literal: true
module Minter
class BuyCoinTx < Transaction
attr_accessor :coin_to_buy, :value_to_buy, :coin_to_sell, :maximum_value_to_sell, :nonce, :chain_id, :gas_coin, :gas_price
def initialize(coin_to_buy:, value_to_buy:, coin_to_sell:, maximum_value_to_sell:, nonce:, chain_id:, gas_coin:, gas_price:) # rubocop:disable Metrics/ParameterLists
@coin_to_buy = coin_to_buy
@value_to_buy = value_to_buy
@coin_to_sell = coin_to_sell
@maximum_value_to_sell = maximum_value_to_sell
@nonce = nonce
@chain_id = chain_id
@gas_coin = gas_coin
@gas_price = gas_price
end
def to_params
{ CoinToBuy: coin_to_buy,
ValueToBuy: value_to_buy,
CoinToSell: coin_to_sell,
MaximumValueToSell: maximum_value_to_sell,
Nonce: nonce,
ChainId: chain_id,
GasCoin: gas_coin,
GasPrice: gas_price }
end
end
end
| 31.233333 | 169 | 0.694771 |
acd879597d43a96d64266e9611baf6396bd5c1c1
| 2,139 |
#
# Author:: Adam Jacob (<[email protected]>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
require "support/shared/unit/resource/static_provider_resolution"
describe Chef::Resource::DpkgPackage, "initialize" do
static_provider_resolution(
resource: Chef::Resource::DpkgPackage,
provider: Chef::Provider::Package::Dpkg,
name: :dpkg_package,
action: :install,
os: "linux"
)
describe Chef::Resource::DpkgPackage, "defaults" do
let(:resource) { Chef::Resource::DpkgPackage.new("fakey_fakerton") }
it "sets the default action as :install" do
expect(resource.action).to eql([:install])
end
it "accepts a string for the response file" do
resource.response_file "something"
expect(resource.response_file).to eql("something")
end
it "accepts a hash for response file template variables" do
resource.response_file_variables({ variables: true })
expect(resource.response_file_variables).to eql({ variables: true })
end
it "supports :install, :lock, :purge, :reconfig, :remove, :unlock, :upgrade actions" do
expect { resource.action :install }.not_to raise_error
expect { resource.action :lock }.not_to raise_error
expect { resource.action :purge }.not_to raise_error
expect { resource.action :reconfig }.not_to raise_error
expect { resource.action :remove }.not_to raise_error
expect { resource.action :unlock }.not_to raise_error
expect { resource.action :upgrade }.not_to raise_error
end
end
end
| 35.065574 | 91 | 0.719963 |
ab98703a4c60a41d823122bf0eb93d1b2ec83db7
| 632 |
require "json"
package = JSON.parse(File.read(File.join(__dir__, "package.json")))
Pod::Spec.new do |s|
s.name = "proba-sdk-react-native"
s.version = package["version"]
s.summary = package["description"]
s.homepage = package["homepage"]
s.license = package["license"]
s.authors = package["author"]
s.platforms = { :ios => "9.0" }
s.source = { :git => "https://github.com/proba-ai/proba-sdk-react-native.git", :tag => "#{s.version}" }
s.source_files = "ios/**/*.{h,m,mm,swift}"
s.swift_version = "5.0"
s.dependency "React"
s.dependency "Proba", "0.2.7"
end
| 26.333333 | 111 | 0.587025 |
f8a037189cfd4c479734979e4a59f2742289f08f
| 6,041 |
require "set"
module ActionController
# See <tt>Renderers.add</tt>
def self.add_renderer(key, &block)
Renderers.add(key, &block)
end
# See <tt>Renderers.remove</tt>
def self.remove_renderer(key)
Renderers.remove(key)
end
# See <tt>Responder#api_behavior</tt>
class MissingRenderer < LoadError
def initialize(format)
super "No renderer defined for format: #{format}"
end
end
module Renderers
extend ActiveSupport::Concern
# A Set containing renderer names that correspond to available renderer procs.
# Default values are <tt>:json</tt>, <tt>:js</tt>, <tt>:xml</tt>.
RENDERERS = Set.new
included do
class_attribute :_renderers
self._renderers = Set.new.freeze
end
# Used in <tt>ActionController::Base</tt>
# and <tt>ActionController::API</tt> to include all
# renderers by default.
module All
extend ActiveSupport::Concern
include Renderers
included do
self._renderers = RENDERERS
end
end
# Adds a new renderer to call within controller actions.
# A renderer is invoked by passing its name as an option to
# <tt>AbstractController::Rendering#render</tt>. To create a renderer
# pass it a name and a block. The block takes two arguments, the first
# is the value paired with its key and the second is the remaining
# hash of options passed to +render+.
#
# Create a csv renderer:
#
# ActionController::Renderers.add :csv do |obj, options|
# filename = options[:filename] || 'data'
# str = obj.respond_to?(:to_csv) ? obj.to_csv : obj.to_s
# send_data str, type: Mime[:csv],
# disposition: "attachment; filename=#{filename}.csv"
# end
#
# Note that we used Mime[:csv] for the csv mime type as it comes with Rails.
# For a custom renderer, you'll need to register a mime type with
# <tt>Mime::Type.register</tt>.
#
# To use the csv renderer in a controller action:
#
# def show
# @csvable = Csvable.find(params[:id])
# respond_to do |format|
# format.html
# format.csv { render csv: @csvable, filename: @csvable.name }
# end
# end
def self.add(key, &block)
define_method(_render_with_renderer_method_name(key), &block)
RENDERERS << key.to_sym
end
# This method is the opposite of add method.
#
# To remove a csv renderer:
#
# ActionController::Renderers.remove(:csv)
def self.remove(key)
RENDERERS.delete(key.to_sym)
method_name = _render_with_renderer_method_name(key)
remove_method(method_name) if method_defined?(method_name)
end
def self._render_with_renderer_method_name(key)
"_render_with_renderer_#{key}"
end
module ClassMethods
# Adds, by name, a renderer or renderers to the +_renderers+ available
# to call within controller actions.
#
# It is useful when rendering from an <tt>ActionController::Metal</tt> controller or
# otherwise to add an available renderer proc to a specific controller.
#
# Both <tt>ActionController::Base</tt> and <tt>ActionController::API</tt>
# include <tt>ActionController::Renderers::All</tt>, making all renderers
# available in the controller. See <tt>Renderers::RENDERERS</tt> and <tt>Renderers.add</tt>.
#
# Since <tt>ActionController::Metal</tt> controllers cannot render, the controller
# must include <tt>AbstractController::Rendering</tt>, <tt>ActionController::Rendering</tt>,
# and <tt>ActionController::Renderers</tt>, and have at lest one renderer.
#
# Rather than including <tt>ActionController::Renderers::All</tt> and including all renderers,
# you may specify which renderers to include by passing the renderer name or names to
# +use_renderers+. For example, a controller that includes only the <tt>:json</tt> renderer
# (+_render_with_renderer_json+) might look like:
#
# class MetalRenderingController < ActionController::Metal
# include AbstractController::Rendering
# include ActionController::Rendering
# include ActionController::Renderers
#
# use_renderers :json
#
# def show
# render json: record
# end
# end
#
# You must specify a +use_renderer+, else the +controller.renderer+ and
# +controller._renderers+ will be <tt>nil</tt>, and the action will fail.
def use_renderers(*args)
renderers = _renderers + args
self._renderers = renderers.freeze
end
alias use_renderer use_renderers
end
# Called by +render+ in <tt>AbstractController::Rendering</tt>
# which sets the return value as the +response_body+.
#
# If no renderer is found, +super+ returns control to
# <tt>ActionView::Rendering.render_to_body</tt>, if present.
def render_to_body(options)
_render_to_body_with_renderer(options) || super
end
def _render_to_body_with_renderer(options)
_renderers.each do |name|
if options.key?(name)
_process_options(options)
method_name = Renderers._render_with_renderer_method_name(name)
return send(method_name, options.delete(name), options)
end
end
nil
end
add :json do |json, options|
json = json.to_json(options) unless json.kind_of?(String)
if options[:callback].present?
if content_type.nil? || content_type == Mime[:json]
self.content_type = Mime[:js]
end
"/**/#{options[:callback]}(#{json})"
else
self.content_type ||= Mime[:json]
json
end
end
add :js do |js, options|
self.content_type ||= Mime[:js]
js.respond_to?(:to_js) ? js.to_js(options) : js
end
add :xml do |xml, options|
self.content_type ||= Mime[:xml]
xml.respond_to?(:to_xml) ? xml.to_xml(options) : xml
end
end
end
| 33.375691 | 100 | 0.646582 |
f86b3e1815ab25b9981edf8ec96ae3dcc6a6bf41
| 1,809 |
require 'rails_helper'
RSpec.describe Admin::PetitionStatisticsController, type: :controller, admin: true do
let!(:petition) { FactoryBot.create(:open_petition) }
context "when not logged in" do
describe "PATCH /admin/petitions/:petition_id/statistics" do
before do
patch :update, params: { petition_id: petition.id }
end
it "redirects to the login page" do
expect(response).to redirect_to("https://moderate.petitions.parliament.scot/admin/login")
end
end
end
context "when logged in as a moderator" do
let(:moderator) { FactoryBot.create(:moderator_user) }
before { login_as(moderator) }
describe "PATCH /admin/petitions/:petition_id/statistics" do
before do
patch :update, params: { petition_id: petition.id }
end
it "redirects to the admin hub page" do
expect(response).to redirect_to("https://moderate.petitions.parliament.scot/admin")
end
end
end
context "when logged in as a sysadmin" do
let(:sysadmin) { FactoryBot.create(:sysadmin_user) }
before { login_as(sysadmin) }
describe "PATCH /admin/petitions/:petition_id/statistics" do
before do
patch :update, params: { petition_id: petition.id }
end
it "redirects to the petition page" do
expect(response).to redirect_to("https://moderate.petitions.parliament.scot/admin/petitions/#{petition.to_param}")
end
it "sets the flash notice message" do
expect(flash[:notice]).to eq("Updating the petition statistics - please wait a few minutes and then refresh this page")
end
it "enqueues a UpdatePetitionStatisticsJob" do
expect(UpdatePetitionStatisticsJob).to have_been_enqueued.on_queue(:low_priority).with(petition)
end
end
end
end
| 32.303571 | 127 | 0.689331 |
edcc7273da71239c6f1a6295b976733b665311b3
| 175 |
class AddAgreementAcceptedToQuestionnaires < ActiveRecord::Migration[4.2]
def change
add_column :questionnaires, :agreement_accepted, :boolean, default: false
end
end
| 29.166667 | 77 | 0.8 |
385ba2e6ff0eb565dc392dffa86cc43ca5a63845
| 377 |
module Project::Operation
class Destroy < Base::Operation::Base
step :model!
step Policy::Pundit(ProjectPolicy, :destroy?)
step :destroy!
def model!(ctx, current_user:, resource_params:, params:, **)
ctx[:model] = scope(user: current_user, res: Project).find(params[:id])
end
def destroy!(ctx, **)
ctx[:model].destroy!
end
end
end
| 23.5625 | 77 | 0.64191 |
e88862bfb4a7833f9266b13eb27daa05dc14e2a1
| 5,953 |
# frozen_string_literal: true
module ActiveSupport
# A typical module looks like this:
#
# module M
# def self.included(base)
# base.extend ClassMethods
# base.class_eval do
# scope :disabled, -> { where(disabled: true) }
# end
# end
#
# module ClassMethods
# ...
# end
# end
#
# By using <tt>ActiveSupport::Concern</tt> the above module could instead be
# written as:
#
# require "active_support/concern"
#
# module M
# extend ActiveSupport::Concern
#
# included do
# scope :disabled, -> { where(disabled: true) }
# end
#
# class_methods do
# ...
# end
# end
#
# Moreover, it gracefully handles module dependencies. Given a +Foo+ module
# and a +Bar+ module which depends on the former, we would typically write the
# following:
#
# module Foo
# def self.included(base)
# base.class_eval do
# def self.method_injected_by_foo
# ...
# end
# end
# end
# end
#
# module Bar
# def self.included(base)
# base.method_injected_by_foo
# end
# end
#
# class Host
# include Foo # We need to include this dependency for Bar
# include Bar # Bar is the module that Host really needs
# end
#
# But why should +Host+ care about +Bar+'s dependencies, namely +Foo+? We
# could try to hide these from +Host+ directly including +Foo+ in +Bar+:
#
# module Bar
# include Foo
# def self.included(base)
# base.method_injected_by_foo
# end
# end
#
# class Host
# include Bar
# end
#
# Unfortunately this won't work, since when +Foo+ is included, its <tt>base</tt>
# is the +Bar+ module, not the +Host+ class. With <tt>ActiveSupport::Concern</tt>,
# module dependencies are properly resolved:
#
# require "active_support/concern"
#
# module Foo
# extend ActiveSupport::Concern
# included do
# def self.method_injected_by_foo
# ...
# end
# end
# end
#
# module Bar
# extend ActiveSupport::Concern
# include Foo
#
# included do
# self.method_injected_by_foo
# end
# end
#
# class Host
# include Bar # It works, now Bar takes care of its dependencies
# end
#
# === Prepending concerns
#
# Just like <tt>include</tt>, concerns also support <tt>prepend</tt> with a corresponding
# <tt>prepended do</tt> callback. <tt>module ClassMethods</tt> or <tt>class_methods do</tt> are
# prepended as well.
#
# <tt>prepend</tt> is also used for any dependencies.
module Concern
class MultipleIncludedBlocks < StandardError #:nodoc:
def initialize
super "Cannot define multiple 'included' blocks for a Concern"
end
end
class MultiplePrependBlocks < StandardError #:nodoc:
def initialize
super "Cannot define multiple 'prepended' blocks for a Concern"
end
end
def self.extended(base) #:nodoc:
base.instance_variable_set(:@_dependencies, [])
end
def append_features(base) #:nodoc:
if base.instance_variable_defined?(:@_dependencies)
base.instance_variable_get(:@_dependencies) << self
false
else
return false if base < self
@_dependencies.each { |dep| base.include(dep) }
super
base.extend const_get(:ClassMethods) if const_defined?(:ClassMethods)
base.class_eval(&@_included_block) if instance_variable_defined?(:@_included_block)
end
end
def prepend_features(base) #:nodoc:
if base.instance_variable_defined?(:@_dependencies)
base.instance_variable_get(:@_dependencies).unshift self
false
else
return false if base < self
@_dependencies.each { |dep| base.prepend(dep) }
super
base.singleton_class.prepend const_get(:ClassMethods) if const_defined?(:ClassMethods)
base.class_eval(&@_prepended_block) if instance_variable_defined?(:@_prepended_block)
end
end
# Evaluate given block in context of base class,
# so that you can write class macros here.
# When you define more than one +included+ block, it raises an exception.
def included(base = nil, &block)
if base.nil?
if instance_variable_defined?(:@_included_block)
if @_included_block.source_location != block.source_location
raise MultipleIncludedBlocks
end
else
@_included_block = block
end
else
super
end
end
# Evaluate given block in context of base class,
# so that you can write class macros here.
# When you define more than one +prepended+ block, it raises an exception.
def prepended(base = nil, &block)
if base.nil?
if instance_variable_defined?(:@_prepended_block)
if @_prepended_block.source_location != block.source_location
raise MultiplePrependBlocks
end
else
@_prepended_block = block
end
else
super
end
end
# Define class methods from given block.
# You can define private class methods as well.
#
# module Example
# extend ActiveSupport::Concern
#
# class_methods do
# def foo; puts 'foo'; end
#
# private
# def bar; puts 'bar'; end
# end
# end
#
# class Buzz
# include Example
# end
#
# Buzz.foo # => "foo"
# Buzz.bar # => private method 'bar' called for Buzz:Class(NoMethodError)
def class_methods(&class_methods_module_definition)
mod = const_defined?(:ClassMethods, false) ?
const_get(:ClassMethods) :
const_set(:ClassMethods, Module.new)
mod.module_eval(&class_methods_module_definition)
end
end
end
| 27.560185 | 97 | 0.61112 |
1c626aaa5d552d19d74aebf6335cd9173792803b
| 13,931 |
require 'spec_helper'
shared_examples_for 'an edge with a mixin' do
its(:route_mixin_method) { should be_true }
its(:edge_mixin_method) { should be_true }
it 'should not include the Vertex module' do
expect { subject.vertex_mixin_method }.to raise_error(NoMethodError)
end
end
shared_examples_for 'a vertex with a mixin' do
its(:route_mixin_method) { should be_true }
its(:vertex_mixin_method) { should be_true }
it 'should not include the Edge module' do
expect { subject.edge_mixin_method }.to raise_error(NoMethodError)
end
end
Run.all :read_write do
describe Pacer::PacerGraph do
use_simple_graph_data
before { setup_data }
describe '#vertex' do
context 'not found' do
subject { graph.vertex '-1' }
it { should be_nil }
end
subject { graph.vertex v0.element_id }
its(:element_id) { should == v0.element_id }
its(:graph) { should == graph }
context 'with mixins' do
subject { graph.vertex v0.element_id, Tackle::SimpleMixin }
its(:element_id) { should == v0.element_id }
it_behaves_like 'a vertex with a mixin'
end
context 'with a wrapper' do
let(:wrapper) { Pacer.vertex_wrapper Tackle::SimpleMixin }
subject { graph.vertex v0.element_id, wrapper }
its(:element_id) { should == v0.element_id }
its(:class) { should == wrapper }
it_behaves_like 'a vertex with a mixin'
end
context 'with a wrapper and a mixin' do
let(:orig_wrapper) { Pacer.vertex_wrapper Tackle::SimpleMixin }
let(:wrapper) { Pacer.vertex_wrapper Tackle::SimpleMixin, TP::Person }
subject { graph.vertex v0.element_id, TP::Person, orig_wrapper }
its(:element_id) { should == v0.element_id }
its(:class) { should_not == orig_wrapper }
its(:class) { should == wrapper }
it_behaves_like 'a vertex with a mixin'
end
end
describe '#edge' do
context 'not found' do
subject { graph.edge '-1' }
it { should be_nil }
end
subject { graph.edge e0.element_id }
its(:element_id) { should == e0.element_id }
its(:graph) { should == graph }
context 'with mixins' do
subject { graph.edge e0.element_id, Tackle::SimpleMixin }
its(:element_id) { should == e0.element_id }
it_behaves_like 'an edge with a mixin'
end
context 'with a wrapper' do
let(:wrapper) { Pacer.edge_wrapper Tackle::SimpleMixin }
subject { graph.edge e0.element_id, wrapper }
its(:element_id) { should == e0.element_id }
its(:class) { should == wrapper }
it_behaves_like 'an edge with a mixin'
end
context 'with a wrapper and a mixin' do
let(:orig_wrapper) { Pacer.edge_wrapper Tackle::SimpleMixin }
let(:wrapper) { Pacer.edge_wrapper Tackle::SimpleMixin, TP::Wrote }
subject { graph.edge e0.element_id, orig_wrapper, TP::Wrote }
its(:element_id) { should == e0.element_id }
its(:class) { should_not == orig_wrapper }
its(:class) { should == wrapper }
it_behaves_like 'an edge with a mixin'
end
end
describe '#create_vertex' do
let(:use_id) { rand 1000000 }
before do
c = example.metadata[:graph_commit]
c.call if c
end
context 'existing' do
it 'should raise an exception' do
unless graph.features.ignoresSuppliedIds
expect { graph.create_vertex v0.element_id }.to raise_error(Pacer::ElementExists)
end
end
end
context 'with properties' do
subject { graph.create_vertex :name => 'Frank' }
it { subject[:name].should == 'Frank' }
its(:element_id) { should_not be_nil }
context 'and an id' do
subject { graph.create_vertex use_id, :name => 'Steve' }
it { subject[:name].should == 'Steve' }
its('element_id.to_s') do
if graph.respond_to? :id_prefix
should == graph.id_prefix + use_id.to_s
elsif not graph.features.ignoresSuppliedIds
should == use_id.to_s
end
end
context 'and mixins' do
subject { graph.create_vertex use_id, Tackle::SimpleMixin, :name => 'John' }
it { subject[:name].should == 'John' }
its('element_id.to_s') do
if graph.respond_to? :id_prefix
should == graph.id_prefix + use_id.to_s
elsif not graph.features.ignoresSuppliedIds
should == use_id.to_s
end
end
it_behaves_like 'a vertex with a mixin'
end
end
end
context 'with an id' do
subject { graph.create_vertex use_id }
its('element_id.to_s') do
if graph.respond_to? :id_prefix
should == graph.id_prefix + use_id.to_s
elsif not graph.features.ignoresSuppliedIds
should == use_id.to_s
end
end
context 'and mixins' do
subject { graph.create_vertex use_id, Tackle::SimpleMixin }
its('element_id.to_s') do
if graph.respond_to? :id_prefix
should == graph.id_prefix + use_id.to_s
elsif not graph.features.ignoresSuppliedIds
should == use_id.to_s
end
end
it_behaves_like 'a vertex with a mixin'
end
end
context 'with mixins' do
subject { graph.create_vertex Tackle::SimpleMixin }
it_behaves_like 'a vertex with a mixin'
end
end
describe '#create_edge' do
let(:use_id) { rand 1000000 }
let(:from) { v0 }
let(:to) { v1 }
before do
c = example.metadata[:graph_commit]
c.call if c
end
it 'should find the vertex' do
graph.vertex(v0.element_id).should == v0
end
context 'existing' do
it 'should raise an exception' do
if not graph.features.ignoresSuppliedIds
expect { graph.create_edge e0.element_id, from, to, :connects }.to raise_error(Pacer::ElementExists)
end
end
end
context 'with properties' do
subject { graph.create_edge nil, from, to, :connects, :name => 'Frank' }
it { subject[:name].should == 'Frank' }
its(:label) { should == 'connects' }
its(:element_id) { should_not be_nil }
context 'and an id' do
subject { graph.create_edge use_id, from, to, :connects, :name => 'Steve' }
it { subject[:name].should == 'Steve' }
its(:label) { should == 'connects' }
its('element_id.to_i') { should == use_id unless graph.features.ignoresSuppliedIds }
context 'and mixins' do
subject { graph.create_edge use_id, from, to, :connects, Tackle::SimpleMixin, :name => 'John' }
it { subject[:name].should == 'John' }
its(:label) { should == 'connects' }
its('element_id.to_i') { should == use_id unless graph.features.ignoresSuppliedIds }
it_behaves_like 'an edge with a mixin'
end
end
end
context 'with an id' do
subject { graph.create_edge use_id, from, to, :connects }
its(:label) { should == 'connects' }
its('element_id.to_i') { should == use_id unless graph.features.ignoresSuppliedIds }
context 'and mixins' do
subject { graph.create_edge use_id, from, to, :connects, Tackle::SimpleMixin }
its(:label) { should == 'connects' }
its('element_id.to_i') { should == use_id unless graph.features.ignoresSuppliedIds }
it_behaves_like 'an edge with a mixin'
end
end
context 'with mixins' do
subject { graph.create_edge nil, from, to, :connects, Tackle::SimpleMixin }
its(:label) { should == 'connects' }
it_behaves_like 'an edge with a mixin'
end
end
describe '#bulk_job_size' do
subject { graph.bulk_job_size }
describe 'default' do
it { should == 5000 }
end
describe 'custom' do
before { graph.bulk_job_size = 12 }
it { should == 12 }
end
end
describe '#in_bulk_job?' do
subject { graph.in_bulk_job? }
it { should be_false }
context 'in bulk job' do
around do |spec|
graph.v[0].bulk_job do
spec.call
end
end
it { should be_true }
end
end
describe '#load_vertices' do
context 'invalid' do
subject { graph.load_vertices [v0.element_id, nil, v0.element_id, 'missing'] }
it { should == [v0, v0] }
end
context 'valid' do
subject { graph.load_vertices [v0.element_id, v1.element_id] }
it { should == [v0, v1] }
end
end
describe '#load_edges' do
before do
c = example.metadata[:graph_commit]
c.call if c
end
it 'should only find valid edges' do
graph.load_edges([e0.element_id.to_s, nil, e0.element_id, 'missing']).should == [e0, e0]
end
end
describe '#index' do
it 'should have no indices' do
graph.indices.count.should == 0 if graph.features.supportsKeyIndices
end
context 'missing' do
around { |spec| spec.run if graph.features.supportsIndices }
subject { graph.index 'invalid' }
it { should be_nil }
context 'edge' do
before do
graph.drop_index 'missing_edge' rescue nil
graph.index('missing_edge').should be_nil
end
subject { graph.index 'missing_edge', :edge, :create => true }
its(:name) { should == 'missing_edge' }
after do
graph.transaction(nesting: true) do
graph.drop_index 'missing_edge'
end
end
end
context 'vertex' do
before do
graph.drop_index 'missing_vertex' rescue nil
graph.index('missing_vertex').should be_nil
end
subject { graph.index 'missing_vertex', :vertex, :create => true }
its(:name) { should == 'missing_vertex' }
after do
graph.transaction(nesting: true) do
graph.drop_index 'missing_vertex'
end
end
end
end
end
describe '#graph' do
subject { graph.graph }
it { should == graph }
end
describe '#vertex_name' do
before { graph.vertex_name = :some_proc }
subject { graph.vertex_name }
it { should == :some_proc }
after { graph.vertex_name = nil }
end
describe '#edge_name' do
before { graph.edge_name = :some_proc }
subject { graph.edge_name }
it { should == :some_proc }
after { graph.edge_name = nil }
end
describe '#import' do
it 'should load the data into an empty graph' do
graph2.v.delete!
graph2.v.count.should == 0
Pacer::GraphML.import graph2, 'spec/data/pacer.graphml'
graph2.v.count.should == 7
graph2.e.count.should == 14
end
it 'should not load the data into a graph with conflicting vertex ids' do
unless graph.features.ignoresSuppliedIds
graph.create_vertex '0' unless graph.vertex '0'
c = example.metadata[:graph_commit]
c.call if c
expect { Pacer::GraphML.import graph, 'spec/data/pacer.graphml' }.to raise_error(Pacer::ElementExists)
end
end
end
describe '#export' do
it 'should create a file that can be read back' do
graph.v.count.should == 2
graph.e.count.should == 2
Pacer::GraphML.export graph, '/tmp/graph_mixin_spec_export.graphml'
graph2.e.delete!
graph2.v.delete!
graph2.v.count.should == 0
graph2.e.count.should == 0
Pacer::GraphML.import graph2, '/tmp/graph_mixin_spec_export.graphml'
puts File.read '/tmp/graph_mixin_spec_export.graphml'
graph2.v.count.should == 2
graph2.e.count.should == 2
end
end
describe '#indices' do
subject { graph.indices.to_a }
it { should be_empty }
end
describe '#element_type' do
context 'invalid' do
it { expect { graph.element_type(:nothing) }.to raise_error(ArgumentError) }
end
context ':vertex' do
subject { graph.element_type(:vertex) }
it { should == :vertex }
end
context 'a vertex' do
subject { graph.element_type(v0) }
it { should == :vertex }
end
context ':edge' do
subject { graph.element_type(:edge) }
it { should == :edge }
end
context 'an edge' do
subject { graph.element_type(e0) }
it { should == :edge }
end
context ':mixed' do
subject { graph.element_type(:mixed) }
it { should == :mixed }
end
context ':object' do
subject { graph.element_type(:object) }
it { should == :object }
end
context 'from element_type' do
context ':vertex' do
subject { graph.element_type(graph.element_type :vertex) }
it { should == :vertex }
end
context ':edge' do
subject { graph.element_type(graph.element_type :edge) }
it { should == :edge }
end
context ':mixed' do
subject { graph.element_type(graph.element_type :mixed) }
it { should == :mixed }
end
context ':object' do
subject { graph.element_type(graph.element_type :object) }
it { should == :object }
end
end
context 'from index_class' do
context ':vertex' do
subject { graph.element_type(graph.index_class :vertex) }
it { should == :vertex }
end
end
end
end
end
| 31.235426 | 112 | 0.583519 |
bff39e49809cb16fc152aee18cc22c22929dad71
| 997 |
require 'rails_helper'
RSpec.describe 'Sessions', :type => :request do
describe 'with correct credentials' do
before {
allow(SecureRandom).to receive(:hex).and_return('superRandom')
}
let (:password) {'super-secret-123'}
let (:user) {User.create(email: '[email protected]', password: password, time_zone: 'Berlin')}
let (:endpoint) {'/sessions'}
xit 'responds with a newly generated access token' do
post endpoint, {email: user.email, password: password }.to_json, headers: { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
expect(response.body).to eq({access_token: 'superRandom'})
end
xit 'responds with an existing access token' do
user.token = 'Foobarrr123'
user.save!
post endpoint, {email: user.email, password: password }.to_json, headers: { 'CONTENT_TYPE' => 'application/json', 'ACCEPT' => 'application/json' }
expect(response.body).to eq({access_token: 'Foobarrr123'})
end
end
end
| 34.37931 | 152 | 0.668004 |
6a30b5497390fb74ad8566b83b8b8cbbecd97244
| 2,770 |
# Don't change this file!
# Configure your app in config/environment.rb and config/environments/*.rb
RAILS_ROOT = "#{File.dirname(__FILE__)}/.." unless defined?(RAILS_ROOT)
module Rails
class << self
def boot!
unless booted?
preinitialize
pick_boot.run
end
end
def booted?
defined? Rails::Initializer
end
def pick_boot
(vendor_rails? ? VendorBoot : GemBoot).new
end
def vendor_rails?
File.exist?("#{RAILS_ROOT}/vendor/rails")
end
def preinitialize
load(preinitializer_path) if File.exist?(preinitializer_path)
end
def preinitializer_path
"#{RAILS_ROOT}/config/preinitializer.rb"
end
end
class Boot
def run
load_initializer
Rails::Initializer.run(:set_load_path)
end
end
class VendorBoot < Boot
def load_initializer
require "#{RAILS_ROOT}/vendor/rails/railties/lib/initializer"
Rails::Initializer.run(:install_gem_spec_stubs)
end
end
class GemBoot < Boot
def load_initializer
self.class.load_rubygems
load_rails_gem
require 'initializer'
end
def load_rails_gem
if version = self.class.gem_version
gem 'rails', version
else
gem 'rails'
end
rescue Gem::LoadError => load_error
$stderr.puts %(Missing the Rails #{version} gem. Please `gem install -v=#{version} rails`, update your RAILS_GEM_VERSION setting in config/environment.rb for the Rails version you do have installed, or comment out RAILS_GEM_VERSION to use the latest version installed.)
exit 1
end
class << self
def rubygems_version
Gem::RubyGemsVersion if defined? Gem::RubyGemsVersion
end
def gem_version
if defined? RAILS_GEM_VERSION
RAILS_GEM_VERSION
elsif ENV.include?('RAILS_GEM_VERSION')
ENV['RAILS_GEM_VERSION']
else
parse_gem_version(read_environment_rb)
end
end
def load_rubygems
require 'rubygems'
min_version = '1.1.1'
unless rubygems_version >= min_version
$stderr.puts %Q(Rails requires RubyGems >= #{min_version} (you have #{rubygems_version}). Please `gem update --system` and try again.)
exit 1
end
rescue LoadError
$stderr.puts %Q(Rails requires RubyGems >= #{min_version}. Please install RubyGems and try again: http://rubygems.rubyforge.org)
exit 1
end
def parse_gem_version(text)
$1 if text =~ /^[^#]*RAILS_GEM_VERSION\s*=\s*["']([!~<>=]*\s*[\d.]+)["']/
end
private
def read_environment_rb
File.read("#{RAILS_ROOT}/config/environment.rb")
end
end
end
end
# All that for this:
Rails.boot!
| 25.181818 | 275 | 0.641516 |
f869609d43562e65bbf4f6ad31058d3a1180b478
| 1,182 |
#!/usr/bin/ruby
require 'mxx_ru/cpp'
require 'restinio/boost_helper.rb'
MxxRu::Cpp::composite_target( MxxRu::BUILD_ROOT ) {
toolset.force_cpp14
global_include_path "."
global_include_path "clara" # It is necessary after merging
# PR#47: https://github.com/Stiffstream/restinio/pull/47
if not $sanitizer_build
if 'gcc' == toolset.name || 'clang' == toolset.name
global_linker_option '-pthread'
global_linker_option '-static-libstdc++'
global_linker_option "-Wl,-rpath='$ORIGIN'"
end
# If there is local options file then use it.
if FileTest.exist?( "local-build.rb" )
required_prj "local-build.rb"
else
default_runtime_mode( MxxRu::Cpp::RUNTIME_RELEASE )
MxxRu::enable_show_brief
global_obj_placement MxxRu::Cpp::PrjAwareRuntimeSubdirObjPlacement.new(
'target', MxxRu::Cpp::PrjAwareRuntimeSubdirObjPlacement::USE_COMPILER_ID )
end
end
if "mswin" == toolset.tag( "target_os" ) && 'vc' == toolset.name && "" != RestinioBoostHelper.detect_boost_root
RestinioBoostHelper.add_boost_root_path_msvc( self )
end
required_prj 'test/build_tests.rb'
required_prj 'sample/build_samples.rb'
required_prj 'benches/build_benches.rb'
}
| 29.55 | 112 | 0.742809 |
e97618107e8ed3370797938d19e325cb96d9f307
| 1,475 |
require 'spec_helper'
# Locking is not implemented in etcd v3.1.X
unless $instance.version < Gem::Version.new("3.2.0")
describe Etcdv3::Namespace::Lock do
let(:stub) { local_namespace_stub(Etcdv3::Namespace::Lock, 1, '/namespace/') }
let(:lease_stub) { local_stub(Etcdv3::Lease, 1) }
# NOTE: this was running duplicate tests against Etcdv3::Lock before, but it
# doesn't work with Etcdv3::Namespace::Lock
#
# it_should_behave_like "a method with a GRPC timeout", described_class, :unlock, :unlock, 'foo'
# it_should_behave_like "a method with a GRPC timeout", described_class, :lock, :lock, 'foo'
describe '#lock' do
it 'returns a response' do
lease_id = lease_stub.lease_grant(10)['ID']
expect(stub.lock('example1', lease_id)).to(
be_an_instance_of(V3lockpb::LockResponse)
)
end
it 'passes metadata correctly' do
lease_id = lease_stub.lease_grant(10)['ID']
stub = expect_metadata_passthrough_namespace(described_class, :lock, :lock, '/namespace/')
stub.lock('example2', lease_id)
end
end
describe '#unlock' do
it 'returns a response' do
expect(stub.unlock('example3')).to be_an_instance_of(V3lockpb::UnlockResponse)
end
it 'passes metadata correctly' do
stub = expect_metadata_passthrough_namespace(described_class, :unlock, :unlock, '/namespace/')
stub.unlock('example4')
end
end
end
end
| 34.302326 | 102 | 0.667797 |
26534e96c1391d64161d26f0a5856c1aa05cf055
| 3,740 |
class OpencvAT3 < Formula
desc "Open source computer vision library"
homepage "https://opencv.org/"
url "https://github.com/opencv/opencv/archive/3.4.12.tar.gz"
sha256 "c8919dfb5ead6be67534bf794cb0925534311f1cd5c6680f8164ad1813c88d13"
license "BSD-3-Clause"
bottle do
sha256 "ddd575264b09a0c2082f37b8a07ce1782ef0b0e72d67245a6fbe177a3382938b" => :catalina
sha256 "907fb90b36f3a4db48a278a7f06d258b778c29a614d074b42eac9de7db9e4e8c" => :mojave
sha256 "4ea1d46fdcc64cf34583fd4a8f2d9eedbacd05b53cad79fc458701d857fbdd36" => :high_sierra
end
keg_only :versioned_formula
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "ceres-solver"
depends_on "eigen"
depends_on "ffmpeg"
depends_on "gflags"
depends_on "glog"
depends_on "jpeg"
depends_on "libpng"
depends_on "libtiff"
depends_on "numpy"
depends_on "openexr"
depends_on "[email protected]"
depends_on "tbb"
resource "contrib" do
url "https://github.com/opencv/opencv_contrib/archive/3.4.12.tar.gz"
sha256 "b207024589674dd2efc7c25740ef192ee4f3e0783e773e2d49a198c37e3e7570"
end
def install
ENV.cxx11
resource("contrib").stage buildpath/"opencv_contrib"
# Reset PYTHONPATH, workaround for https://github.com/Homebrew/homebrew-science/pull/4885
ENV.delete("PYTHONPATH")
args = std_cmake_args + %W[
-DCMAKE_OSX_DEPLOYMENT_TARGET=
-DBUILD_JASPER=OFF
-DBUILD_JPEG=ON
-DBUILD_OPENEXR=OFF
-DBUILD_PERF_TESTS=OFF
-DBUILD_PNG=OFF
-DBUILD_TESTS=OFF
-DBUILD_TIFF=OFF
-DBUILD_ZLIB=OFF
-DBUILD_opencv_hdf=OFF
-DBUILD_opencv_java=OFF
-DBUILD_opencv_text=OFF
-DOPENCV_ENABLE_NONFREE=ON
-DOPENCV_EXTRA_MODULES_PATH=#{buildpath}/opencv_contrib/modules
-DWITH_1394=OFF
-DWITH_CUDA=OFF
-DWITH_EIGEN=ON
-DWITH_FFMPEG=ON
-DWITH_GPHOTO2=OFF
-DWITH_GSTREAMER=OFF
-DWITH_JASPER=OFF
-DWITH_OPENEXR=ON
-DWITH_OPENGL=OFF
-DWITH_QT=OFF
-DWITH_TBB=ON
-DWITH_VTK=OFF
-DBUILD_opencv_python2=OFF
-DBUILD_opencv_python3=ON
-DPYTHON3_EXECUTABLE=#{Formula["[email protected]"].opt_bin}/python3
]
args << "-DENABLE_AVX=OFF" << "-DENABLE_AVX2=OFF"
args << "-DENABLE_SSE41=OFF" << "-DENABLE_SSE42=OFF" unless MacOS.version.requires_sse42?
mkdir "build" do
system "cmake", "..", *args
if OS.mac?
inreplace "modules/core/version_string.inc", "#{HOMEBREW_SHIMS_PATH}/mac/super/", ""
else
inreplace "modules/core/version_string.inc", "#{HOMEBREW_SHIMS_PATH}/linux/super/", ""
end
system "make"
system "make", "install"
system "make", "clean"
system "cmake", "..", "-DBUILD_SHARED_LIBS=OFF", *args
if OS.mac?
inreplace "modules/core/version_string.inc", "#{HOMEBREW_SHIMS_PATH}/mac/super/", ""
else
inreplace "modules/core/version_string.inc", "#{HOMEBREW_SHIMS_PATH}/linux/super/", ""
end
system "make"
lib.install Dir["lib/*.a"]
lib.install Dir["3rdparty/**/*.a"]
end
end
test do
(testpath/"test.cpp").write <<~EOS
#include <opencv/cv.h>
#include <iostream>
int main() {
std::cout << CV_VERSION << std::endl;
return 0;
}
EOS
system ENV.cxx, "test.cpp", "-I#{include}", "-L#{lib}", "-o", "test"
assert_equal `./test`.strip, version.to_s
py3_version = Language::Python.major_minor_version Formula["[email protected]"].opt_bin/"python3"
ENV["PYTHONPATH"] = lib/"python#{py3_version}/site-packages"
output = shell_output(Formula["[email protected]"].opt_bin/"python3 -c 'import cv2; print(cv2.__version__)'")
assert_equal version.to_s, output.chomp
end
end
| 31.428571 | 106 | 0.678342 |
f797a472c683939a2fcd05d63f39cc400117d216
| 1,465 |
module ActiveMerchant #:nodoc:
module Shipping
class TrackingResponse < Response
attr_reader :carrier # symbol
attr_reader :carrier_name # string
attr_reader :status # symbol
attr_reader :status_code # string
attr_reader :status_description #string
attr_reader :scheduled_delivery_date # time
attr_reader :tracking_number # string
attr_reader :shipment_events # array of ShipmentEvents in chronological order
attr_reader :origin, :destination
def initialize(success, message, params = {}, options = {})
@carrier = options[:carrier].parameterize.to_sym
@carrier_name = options[:carrier]
@status = options[:status]
@status_code = options[:status_code]
@status_description = options[:status_description]
@scheduled_delivery_date = options[:scheduled_delivery_date]
@tracking_number = options[:tracking_number]
@shipment_events = Array(options[:shipment_events])
@origin, @destination = options[:origin], options[:destination]
super
end
def latest_event
@shipment_events.last
end
def is_delivered?
@status == :delivered
end
def has_exception?
@status == :exception
end
alias_method(:exception_event, :latest_event)
alias_method(:delivered?, :is_delivered?)
alias_method(:exception?, :has_exception?)
end
end
end
| 31.170213 | 83 | 0.665529 |
394aa60d58f9b0f17a7a8dfe36d239ab1e79a384
| 239 |
class CreateArticles < ActiveRecord::Migration
def change
create_table :articles do |t|
t.string :title
t.datetime :published_at
t.text :body
t.string :comment
t.timestamps null: false
end
end
end
| 19.916667 | 46 | 0.656904 |
ed9757b17f6a089d508ae144ce032a680dbc62fe
| 1,404 |
require 'rubygems'
require 'sinatra'
require 'sinatra/reloader'
require 'sqlite3'
def get_db
return SQLite3::Database.new 'base.db'
db.results_as_hash = true
return db
end
configure do
db = get_db
db.execute ' CREATE TABLE IF NOT EXISTS
"Users"
(
"Id" INTEGER,
"Username" TEXT,
"Phone" TEXT,
"DateStamp" TEXT,
"barber" TEXT,
"color" TEXT,
PRIMARY KEY("Id" AUTOINCREMENT) )'
db.close
end
get '/' do
erb "Hello! <a href=\"https://github.com/bootstrap-ruby/sinatra-bootstrap\">Original</a> pattern has been modified for <a href=\"http://rubyschool.us/\">Ruby School</a>"
end
get '/about' do
erb :about
end
get '/visit' do
erb :visit
end
post '/visit' do
@username = params[:username]
@phone = params[:phone]
@datetime = params[:datetime]
@barber = params[:barber]
@color = params[:color]
# хеш
hh = { :username => 'Введите имя',
:phone => 'Введите телефон',
:datetime => 'Введите дату и время' }
@error = hh.select {|key,_| params[key] == ""}.values.join(", ")
if @error != ''
return erb :visit
end
db = get_db
db.execute 'INSERT INTO Users (Username, Phone, DateStamp, barber, color)
VALUES (?, ?, ?, ?, ?)', [@username, @phone, @email, @option, @comment]
db.close
erb "OK, username is #{@username}, #{@phone}, #{@datetime}, #{@barber}, #{@color}"
end
get '/showusers' do
erb :showusers
end
| 20.057143 | 173 | 0.626781 |
39621543d32e87a57a4bbdd0bb3eb109a79b0436
| 571 |
module RequestHelpers
def auth_headers(some_user = user)
{ 'Authorization' => AuthToken.token(some_user) }
end
def errors
json[:errors]
end
def first_error_message
errors.first[:message]
end
def json
raise 'Response is nil. Are you sure you made a request?' unless response
JSON.parse(response.body, symbolize_names: true)
end
def graphql_request(request, variables: {}, headers: nil)
GraphqlUtils.validate!(request.to_s)
post(graphql_path, params: { query: request, variables: variables }, headers: headers)
end
end
| 21.961538 | 90 | 0.711033 |
11307af0310cbe4048be1afeb3b406b45cf2f5e9
| 907 |
cask 'image2icon' do
version '2.7'
sha256 '377399ee8f7fbb3fea16fa4da8bcd5cd6e9d7caf46d15e48212061eb627967ae'
# sf-applications.s3.amazonaws.com was verified as official when first introduced to the cask
url "https://sf-applications.s3.amazonaws.com/Image2Icon/app-releases/Image2icon#{version}.zip"
name 'Image2Icon'
homepage 'http://www.img2icnsapp.com/'
depends_on macos: '>= :mavericks'
app 'Image2Icon.app'
zap delete: [
'~/Library/Caches/net.shinyfrog.image2icon',
'~/Library/Preferences/net.shinyfrog.image2icon.plist',
'~/Library/Containers/net.shinyfrog.image2icon',
'~/Library/Containers/net.shinyfrog.image2icon.templateRenderer',
'~/Library/Containers/net.shinyfrog.templateRenderer',
'~/Library/Saved Application State/net.shinyfrog.image2icon.savedState',
]
end
| 39.434783 | 97 | 0.68688 |
28fc0fb8e3ba712771b696351cc0ab98ba8a7e57
| 2,362 |
# Copyright © 2011-2020 MUSC Foundation for Research Development~
# All rights reserved.~
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:~
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.~
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following~
# disclaimer in the documentation and/or other materials provided with the distribution.~
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products~
# derived from this software without specific prior written permission.~
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,~
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT~
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL~
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS~
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR~
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.~
module NewsFeed
class PageParser < NewsFeed::Base
def initialize
@post_selector = Setting.get_value("news_feed_post_selector")
@title_selector = Setting.get_value("news_feed_title_selector")
@link_selector = Setting.get_value("news_feed_link_selector")
@date_selector = Setting.get_value("news_feed_date_selector")
end
def get
Nokogiri::HTML(open(NewsFeed::Base::BASE_URL, open_timeout: 5))
end
def posts
begin
self.get.css.take(NewsFeed::Base::POST_LIMIT).map do |article|
{
title: article.at_css(@title_selector),
link: article.at_css(@link_selector),
date: article.at_css(@date_selector)
}
end
rescue Net::OpenTimeout
[]
end
end
end
end
| 48.204082 | 146 | 0.742168 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.