hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e285082b64b82573b8546528fcd1f4a8a1fb78bf | 869 | # frozen_string_literal: true
require 'feedjira'
require 'sanitize'
require_relative '../provider'
# Provider for Trello.
class Trello < Provider
STATUS_FEED_URL = 'https://trello.status.atlassian.com/history.atom'
def initialize
super
@icon = 'trello'
@name = 'Trello'
end
def latest_update
feed = Feedjira.parse(RestClient.get(STATUS_FEED_URL))
raise "Unable to retrieve Atom feed from #{STATUS_FEED_URL}" unless feed
return nil if feed.entries.empty?
latest = feed.entries.first
StatusFeedUpdate.new(id: latest.entry_id.partition('/').last,
timestamp: latest.published.to_datetime.to_s,
metadata: latest.title,
text: Sanitize.fragment(latest.content, whitespace_elements: WHITESPACE_ELEMENTS),
uri: latest.url)
end
end
| 28.966667 | 107 | 0.660529 |
5d0f17985c99ac621c3a81f59828ae6b778c487e | 90 | # desc "Explaining what the task does"
# task :tabler_rails do
# # Task goes here
# end
| 18 | 38 | 0.688889 |
e8fc67acf0549463260eabff39146af11735b8f7 | 189 | require 'spec_helper'
require 'lib/gitlab/cycle_analytics/shared_stage_spec'
describe Gitlab::CycleAnalytics::CodeStage do
let(:stage_name) { :code }
it_behaves_like 'base stage'
end
| 21 | 54 | 0.78836 |
e840f1d4870351dd3a8bcf578379da06f4f4c253 | 8,509 | require 'zlib'
require 'rack/request'
require 'rack/response'
require 'rack/utils'
require 'time'
class GitHttp
class App
SERVICES = [
["POST", 'service_rpc', "(.*?)/git-upload-pack$", 'upload-pack'],
["POST", 'service_rpc', "(.*?)/git-receive-pack$", 'receive-pack'],
["GET", 'get_info_refs', "(.*?)/info/refs$"],
["GET", 'get_text_file', "(.*?)/HEAD$"],
["GET", 'get_text_file', "(.*?)/objects/info/alternates$"],
["GET", 'get_text_file', "(.*?)/objects/info/http-alternates$"],
["GET", 'get_info_packs', "(.*?)/objects/info/packs$"],
["GET", 'get_text_file', "(.*?)/objects/info/[^/]*$"],
["GET", 'get_loose_object', "(.*?)/objects/[0-9a-f]{2}/[0-9a-f]{38}$"],
["GET", 'get_pack_file', "(.*?)/objects/pack/pack-[0-9a-f]{40}\\.pack$"],
["GET", 'get_idx_file', "(.*?)/objects/pack/pack-[0-9a-f]{40}\\.idx$"],
]
def initialize(config = false)
set_config(config)
end
def set_config(config)
@config = config || {}
end
def set_config_setting(key, value)
@config[key] = value
end
def call(env)
@env = env
@req = Rack::Request.new(env)
return render_list_or_repos if @req.path_info == "" or @req.path_info == "/"
cmd, path, @reqfile, @rpc = match_routing
return render_method_not_allowed if cmd == 'not_allowed'
return render_not_found if !cmd
@dir = get_git_dir(path)
return render_not_found if !@dir
Dir.chdir(@dir) do
self.method(cmd).call()
end
end
# ---------------------------------
# actual command handling functions
# ---------------------------------
def service_rpc
return render_no_access if !has_access(@rpc, true)
input = read_body
@res = Rack::Response.new
@res.status = 200
@res["Content-Type"] = "application/x-git-%s-result" % @rpc
@res.finish do
command = git_command("#{@rpc} --stateless-rpc #{@dir}")
IO.popen(command, File::RDWR) do |pipe|
pipe.write(input)
while !pipe.eof?
block = pipe.read(8192) # 8M at a time
@res.write block # steam it to the client
end
end
end
end
def get_info_refs
service_name = get_service_type
if has_access(service_name)
cmd = git_command("#{service_name} --stateless-rpc --advertise-refs .")
refs = `#{cmd}`
@res = Rack::Response.new
@res.status = 200
@res["Content-Type"] = "application/x-git-%s-advertisement" % service_name
hdr_nocache
@res.write(pkt_write("# service=git-#{service_name}\n"))
@res.write(pkt_flush)
@res.write(refs)
@res.finish
else
dumb_info_refs
end
end
def dumb_info_refs
update_server_info
send_file(@reqfile, "text/plain; charset=utf-8") do
hdr_nocache
end
end
def get_info_packs
# objects/info/packs
send_file(@reqfile, "text/plain; charset=utf-8") do
hdr_nocache
end
end
def get_loose_object
send_file(@reqfile, "application/x-git-loose-object") do
hdr_cache_forever
end
end
def get_pack_file
send_file(@reqfile, "application/x-git-packed-objects") do
hdr_cache_forever
end
end
def get_idx_file
send_file(@reqfile, "application/x-git-packed-objects-toc") do
hdr_cache_forever
end
end
def get_text_file
send_file(@reqfile, "text/plain") do
hdr_nocache
end
end
# ------------------------
# logic helping functions
# ------------------------
F = ::File
# some of this borrowed from the Rack::File implementation
def send_file(reqfile, content_type)
reqfile = File.join(@dir, reqfile)
return render_not_found if !F.exists?(reqfile)
@res = Rack::Response.new
@res.status = 200
@res["Content-Type"] = content_type
@res["Last-Modified"] = F.mtime(reqfile).httpdate
yield
if size = F.size?(reqfile)
@res["Content-Length"] = size.to_s
@res.finish do
F.open(reqfile, "rb") do |file|
while part = file.read(8192)
@res.write part
end
end
end
else
body = [F.read(reqfile)]
size = Rack::Utils.bytesize(body.first)
@res["Content-Length"] = size
@res.write body
@res.finish
end
end
def get_git_dir(path)
root = @config[:project_root] || `pwd`
path = F.expand_path(File.join(root, path))
if File.exists?(path) # TODO: check is a valid git directory
return path
end
false
end
def get_service_type
service_type = @req.params['service']
return false if !service_type
return false if service_type[0, 4] != 'git-'
service_type.gsub('git-', '')
end
def match_routing
cmd = nil
path = nil
SERVICES.each do |method, handler, match, rpc|
if m = Regexp.new(match).match(@req.path_info)
return ['not_allowed'] if method != @req.request_method
cmd = handler
path = m[1]
file = @req.path_info.sub(path + '/', '')
return [cmd, path, file, rpc]
end
end
return nil
end
def has_access(rpc, check_content_type = false)
if check_content_type
return false if @req.content_type != "application/x-git-%s-request" % rpc
end
return false if !['upload-pack', 'receive-pack'].include? rpc
if rpc == 'receive-pack'
return @config[:receive_pack] if @config.include? :receive_pack
end
if rpc == 'upload-pack'
return @config[:upload_pack] if @config.include? :upload_pack
end
return get_config_setting(rpc)
end
def get_config_setting(service_name)
service_name = service_name.gsub('-', '')
setting = get_git_config("http.#{service_name}")
if service_name == 'uploadpack'
return setting != 'false'
else
return setting == 'true'
end
end
def get_git_config(config_name)
cmd = git_command("config #{config_name}")
`#{cmd}`.chomp
end
def read_body
if @env["HTTP_CONTENT_ENCODING"] =~ /gzip/
input = Zlib::GzipReader.new(@req.body).read
else
input = @req.body.read
end
end
def update_server_info
cmd = git_command("update-server-info")
`#{cmd}`
end
def git_command(command)
git_bin = @config[:git_path] || 'git'
raise "BAD GIT PATH: #{git_bin.inspect}" if `#{git_bin} --version`.chomp.empty?
command = "#{git_bin} #{command}"
command
end
# --------------------------------------
# HTTP error response handling functions
# --------------------------------------
PLAIN_TYPE = {"Content-Type" => "text/plain"}
def render_method_not_allowed
if @env['SERVER_PROTOCOL'] == "HTTP/1.1"
[405, PLAIN_TYPE, ["Method Not Allowed"]]
else
[400, PLAIN_TYPE, ["Bad Request"]]
end
end
def render_not_found
[404, PLAIN_TYPE, ["Not Found"]]
end
def render_no_access
[403, PLAIN_TYPE, ["Forbidden"]]
end
def render_list_or_repos
root = F.expand_path(@config[:project_root] || `pwd`)
repos = Dir[F.join(root,"/**/.git")].map { |repo|
repo_path = repo[root.size+1...-4].gsub(/\/$/,'')
repo_url = "#{@env['rack.url_scheme']}://#{@env['HTTP_HOST']}/#{repo_path}"
"git clone #{repo_url}"
}.join("\n")
[200, PLAIN_TYPE, ["REPOS (#{root}):\n#{repos}"]]
end
# ------------------------------
# packet-line handling functions
# ------------------------------
def pkt_flush
'0000'
end
def pkt_write(str)
(str.size + 4).to_s(base=16).rjust(4, '0') + str
end
# ------------------------
# header writing functions
# ------------------------
def hdr_nocache
@res["Expires"] = "Fri, 01 Jan 1980 00:00:00 GMT"
@res["Pragma"] = "no-cache"
@res["Cache-Control"] = "no-cache, max-age=0, must-revalidate"
end
def hdr_cache_forever
now = Time.now().to_i
@res["Date"] = now.to_s
@res["Expires"] = (now + 31536000).to_s;
@res["Cache-Control"] = "public, max-age=31536000";
end
end
end | 26.757862 | 88 | 0.548126 |
f786081e9ae02146de97b4fb045e7fecc49dd22d | 8,584 | require 'forwardable'
module Laser
module Cutter
module Notching
class Shift < Struct.new(:delta, :direction, :dim_index)
def next_point_after point
p = point.clone
shift = []
shift[dim_index] = delta * direction
shift[(dim_index + 1) % 2] = 0
p.plus *shift
end
end
# Alternating iterator
class InfiniteIterator < Struct.new(:array)
attr_accessor :array, :next_index, :calls
def initialize(array)
self.array = array
self.calls = 0
self.next_index = 0
end
def next
item = self.array[next_index].clone
self.next_index += 1
self.next_index %= array.size
self.calls += 1
item = yield item, self.calls if block_given?
item
end
end
# One of the key "tricks" that this algorithm applies, is that it converts everything into
# pure set of lines in the end. It then tries to find all intersections of the lines so that
# we can remove duplicates. So any segment of any line that is covered by 2 lines or more is removed,
# cleared completely for an empty space. This turns out to be very useful indeed, because we can
# paint with wide brush strokes to get the carcass, and then fine tune it by adding or removing line
# segments. Some of the lines below are added to actually remove the lines that might have otherwise
# been there.
#
# This comes especially handy when drawing corner boxes, which are deliberately made not to match the notch
# width, but to match thickness of the material. The corner notces for these sides will therefore have
# length equal to the thickness + regular notch length.
class PathGenerator
extend ::Forwardable
%i(center_out thickness corners kerf kerf? notch_width first_notch_out? adjust_corners corners).each do |method_name|
def_delegator :@edge, method_name, method_name
end
attr_accessor :edge
# This class generates lines that zigzag between two lines: the outside line, and the
# inside line of a single edge. Edge class encapsulates both of them with additional
# properties.
def initialize(edge)
@edge = edge
end
# Calculates a notched path that flows between the outer edge of the box
# (outside_line) and inner (inside_line). Relative location of these lines
# also defines the direction and orientation of the box, and hence the notches.
#
# We always want to create a symmetric path that has a notch in the middle
# (for center_out = true) or dip in the middle (center_out = false)
def generate
shifts = define_shifts
vertices = []
lines = []
if corners
lines << corner_box_sides
end
point = starting_point
vertices << point
adjust_for_kerf(vertices,-1) if adjust_corners && !first_notch_out?
shifts.each do |shift|
point = shift.next_point_after point
vertices << point
end
adjust_for_kerf(vertices, 1) if adjust_corners && !first_notch_out?
lines << create_lines(vertices)
lines.flatten
end
def adjust_for_kerf(vertices, direction)
if kerf?
point = vertices.pop
point = corners ? point.plus(2 * direction * shift_vector(1)) : point
vertices << point
end
end
# These two boxes occupy the corners of the 3D box. They do not match
# in width to our notches because they are usually merged with them. Their
# size is equal to the thickness of the material (adjusted for kerf)
# It's just an aesthetic choice I guess.
def corner_box_sides
boxes = []
extra_lines = []
boxes << Geometry::Rect[edge.inside.p1.clone, edge.outside.p1.clone]
boxes << Geometry::Rect[edge.inside.p2.clone, edge.outside.p2.clone]
extra_lines << add_corners if adjust_corners && kerf?
sides = boxes.flatten.map(&:relocate!).map(&:sides)
sides << extra_lines if !extra_lines.empty?
sides.flatten
end
def shift_vector(index, dim_shift = 0)
shift = []
shift[(d_index_across + dim_shift) % 2] = 0
shift[(d_index_along + dim_shift) % 2] = kerf / 2.0 * edge.send("v#{index}".to_sym).[]((d_index_along + dim_shift) % 2)
Vector.[](*shift)
end
def starting_point
edge.inside.p1.clone # start
end
# 0 = X, 1 = Y
def d_index_along
(edge.inside.p1.x == edge.inside.p2.x) ? 1 : 0
end
def d_index_across
(d_index_along + 1) % 2
end
def direction_along
(edge.inside.p1.coords.[](d_index_along) < edge.inside.p2.coords.[](d_index_along)) ? 1 : -1
end
def direction_across
(edge.inside.p1.coords.[](d_index_across) < edge.outside.p1.coords.[](d_index_across)) ? 1 : -1
end
private
# Helper method to calculate dimensions of our corners.
def add_corners
k, direction, dim_index, edge_along, edge_across = if first_notch_out?
[2, -1, 1, :inside, :outside]
else
[-2, 1, 0, :outside, :inside]
end
v1 = direction * k * shift_vector(1, dim_index)
v2 = direction * k * shift_vector(2, dim_index)
r1 = define_corner_rect(:p1, v1, edge_along, edge_across)
r2 = define_corner_rect(:p2, v2, edge_along, edge_across)
lines = [r1, r2].map(&:sides).flatten
# Our clever algorithm removes automatically duplicate lines. These lines
# below are added to actually clear out this space and remove the existing
# lines that are already there.
lines << Geometry::Line[edge.inside.p1.plus(v1), edge.inside.p1.clone]
lines << Geometry::Line[edge.inside.p2.plus(v2), edge.inside.p2.clone]
lines
end
def define_corner_rect(point, delta, edge_along, edge_across)
p1 = edge.inside.send(point).plus(delta)
coords = []
coords[d_index_along] = edge.send(edge_along).send(point)[d_index_along]
coords[d_index_across] = edge.send(edge_across).send(point)[d_index_across]
p2 = Geometry::Point[*coords]
Geometry::Rect[p1, p2]
end
# This method has the bulk of the logic: we create the list of path deltas
# to be applied when we walk the edge next.
# @param [Object] shift
def define_shifts
along_iter = create_iterator_along
across_iter = create_iterator_across
shifts = []
inner = true # false when we are drawing outer notch, true when inner
if first_notch_out?
shifts << across_iter.next
inner = !inner
end
(1..edge.notch_count).to_a.each do |notch_number|
shifts << along_iter.next do |shift, index|
if inner && (notch_number > 1 && notch_number < edge.notch_count)
shift.delta -= kerf
elsif !inner
shift.delta += kerf
end
inner = !inner
shift
end
shifts << across_iter.next unless notch_number == edge.notch_count
end
shifts << across_iter.next if first_notch_out?
shifts
end
# As we draw notches, shifts define the 'delta' – movement from one point
# to the next. This method defines three types of movements we'll be doing:
# one alongside the edge, and two across (towards the box and outward from the box)
def create_iterator_along
InfiniteIterator.new([Shift.new(notch_width, direction_along, d_index_along)])
end
def create_iterator_across
InfiniteIterator.new([Shift.new(thickness, direction_across, d_index_across),
Shift.new(thickness, -direction_across, d_index_across)])
end
def create_lines(vertices)
lines = []
vertices.each_with_index do |v, i|
if v != vertices.last
lines << Geometry::Line.new(v, vertices[i+1])
end
end
lines.flatten
end
end
end
end
end
| 36.52766 | 129 | 0.598905 |
619669cf85dc3da1adc9c44568493c73fea46b3e | 89 | class TaskKeyResult < ActiveRecord::Base
belongs_to :task
belongs_to :key_result
end
| 17.8 | 40 | 0.797753 |
e956f8b7d1522415dec6c7ac20df96caadba6bb0 | 949 | # frozen_string_literal: true
class FastIgnore
class RuleGroup
def initialize(patterns, allow)
@matchers = Array(patterns).flat_map { |x| x.build_matchers(allow: allow) }.compact
@allow = allow
@allowed_recursive = { ::FastIgnore::Candidate.root.key => true }.compare_by_identity
end
def empty?
@matchers.empty? || @matchers.all?(&:empty?)
end
def weight
@matchers.sum(&:weight)
end
def freeze
@matchers.freeze
super
end
def allowed_recursive?(candidate)
@allowed_recursive.fetch(candidate.key) do
@allowed_recursive[candidate.key] =
allowed_recursive?(candidate.parent) &&
allowed_unrecursive?(candidate)
end
end
def allowed_unrecursive?(candidate)
@matchers.reverse_each do |matcher|
val = matcher.match?(candidate)
return val == :allow if val
end
not @allow
end
end
end
| 22.069767 | 91 | 0.636459 |
33105ea073566074eaea868d71e9b5f90751407a | 1,559 | module Arroyo
class Reader
def initialize(stream)
@stream = stream
end
# Public: Read at most `length` bytes from the stream, blocking only if it has no data immediately
# available. If the stream has any data available, this method does not block.
#
# length - the maximum number of bytes to read (optional; default is 16 KB)
#
# Returns String data from the stream.
# Raises EOFError if the end of the stream was previously reached.
def readpartial(length = 16.kilobytes, buffer = nil)
if chunk = @stream.readpartial(length)
# Hack: IO.copy_stream expects us to accept the second buffer arg, but
# HTTP::Connection#readpartial doesn't support it. Play make-believe.
if buffer.nil?
chunk
else
buffer.replace(chunk)
end
else
raise EOFError
end
end
# Public: Iterate over chunks of String data as they're read from the stream.
#
# length - the maximum number of bytes to read in each iteration (optional; default is 16 KB)
#
# Returns nothing.
def each(length = 16.kilobytes)
loop do
begin
chunk = readpartial(length)
rescue EOFError
break
end
yield chunk
end
end
# Public: Copy stream data to the given destination.
#
# destination - a File, IO, String path, or IO-like object responding to #write
#
# Returns nothing.
def copy_to(destination)
IO.copy_stream self, destination
end
end
end
| 28.345455 | 102 | 0.634381 |
b9fe937b206e3115c6768c88c4b5439649a9e04f | 2,907 | # rubocop:disable all
# This file is copied to spec/ when you run 'rails generate rspec:install'
require 'spec_helper'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../config/environment', __dir__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'rspec/rails'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec', 'support', '**', '*.rb')].sort.each { |f| require f }
# Checks for pending migrations and applies them before tests are run.
# If you are not using ActiveRecord, you can remove these lines.
begin
ActiveRecord::Migration.maintain_test_schema!
rescue ActiveRecord::PendingMigrationError => e
puts e.to_s.strip
exit 1
end
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# You can uncomment this line to turn off ActiveRecord support entirely.
# config.use_active_record = false
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, type: :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
end
| 44.045455 | 86 | 0.75129 |
b9719a54577fff141a6b3f79e16a169a246dfe27 | 483 | cask :v1 => 'vagrant' do
version '1.7.2'
sha256 '78d02afada2f066368bd0ce1883f900f89b6dc20f860463ce125e7cb295e347c'
# bintray.com is the official download host per the vendor homepage
url "https://dl.bintray.com/mitchellh/vagrant/vagrant_#{version}.dmg"
name 'Vagrant'
homepage 'http://www.vagrantup.com'
license :mit
pkg 'Vagrant.pkg'
uninstall :script => { :executable => 'uninstall.tool', :input => %w[Yes] },
:pkgutil => 'com.vagrant.vagrant'
end
| 30.1875 | 78 | 0.701863 |
18c34f6abd973719700be36d27239ba5df51caad | 708 | require "minitest_helper"
module Hermod
module Validators
describe RegularExpression do
subject do
RegularExpression.new(/\A[A-Z]{2} [0-9]{6} [A-D]\z/x)
end
it "allows values that match the pattern" do
subject.valid?("AB123456C", {}).must_equal true
end
it "allows blank values" do
subject.valid?("", {}).must_equal true
subject.valid?(nil, {}).must_equal true
end
it "raises an error for values that don't match the pattern" do
ex = proc { subject.valid?("fish", {}) }.must_raise InvalidInputError
ex.message.must_equal "\"fish\" does not match /\\A[A-Z]{2} [0-9]{6} [A-D]\\z/x"
end
end
end
end
| 27.230769 | 88 | 0.60452 |
18c2515396c0ced4cd706cb70fad7263f62deae0 | 5,706 | RSpec.describe StackMaster::Config do
subject(:loaded_config) { StackMaster::Config.load!('spec/fixtures/stack_master.yml') }
let(:base_dir) { File.expand_path('spec/fixtures') }
let(:myapp_vpc_definition) {
StackMaster::StackDefinition.new(
region: 'us-east-1',
region_alias: 'production',
stack_name: 'myapp-vpc',
template: 'myapp_vpc.json',
allowed_accounts: ["555555555"],
tags: { 'application' => 'my-awesome-blog', 'environment' => 'production' },
s3: { 'bucket' => 'my-bucket', 'region' => 'us-east-1' },
notification_arns: ['test_arn', 'test_arn_2'],
role_arn: 'test_service_role_arn2',
base_dir: base_dir,
stack_policy_file: 'my_policy.json',
additional_parameter_lookup_dirs: ['production']
)
}
let(:bad_yaml) { "a: b\n- c" }
describe ".load!" do
it "fails to load the config if no stack_master.yml in parent directories" do
expect { StackMaster::Config.load!('stack_master.yml') }.to raise_error Errno::ENOENT
end
it "raises exception on invalid yaml" do
begin
orig_dir = Dir.pwd
Dir.chdir './spec/fixtures/'
allow(File).to receive(:read).and_return(bad_yaml)
expect { StackMaster::Config.load!('stack_master.yml') }.to raise_error StackMaster::Config::ConfigParseError
ensure
Dir.chdir orig_dir
end
end
it "searches up the tree for stack master yaml" do
begin
orig_dir = Dir.pwd
Dir.chdir './spec/fixtures/templates'
expect(StackMaster::Config.load!('stack_master.yml')).to_not be_nil
ensure
Dir.chdir orig_dir
end
end
end
describe '#find_stack' do
it 'returns an object that can find stack definitions' do
stack = loaded_config.find_stack('us-east-1', 'myapp-vpc')
expect(stack).to eq(myapp_vpc_definition)
end
it 'can find things with underscores instead of hyphens' do
stack = loaded_config.find_stack('us_east_1', 'myapp_vpc')
expect(stack).to eq(myapp_vpc_definition)
end
end
describe '#filter' do
it 'returns a list of stack definitions' do
stack = loaded_config.filter('us-east-1', 'myapp-vpc')
expect(stack).to eq([myapp_vpc_definition])
end
it 'can filter by region only' do
stacks = loaded_config.filter('us-east-1')
expect(stacks.size).to eq 3
end
it 'can return all stack definitions with no filters' do
stacks = loaded_config.filter
expect(stacks.size).to eq 5
end
end
it 'exposes the base_dir' do
expect(loaded_config.base_dir).to eq base_dir
end
it 'loads stack defaults' do
expect(loaded_config.stack_defaults).to eq({
'allowed_accounts' => ["555555555"],
'tags' => { 'application' => 'my-awesome-blog' },
's3' => { 'bucket' => 'my-bucket', 'region' => 'us-east-1' }
})
end
it 'loads template compiler mappings' do
expect(loaded_config.template_compilers).to eq({
rb: :ruby_dsl,
json: :json,
yml: :yaml,
yaml: :yaml,
})
end
it 'loads region defaults' do
expect(loaded_config.region_defaults).to eq({
'us-east-1' => {
'tags' => { 'environment' => 'production' },
'role_arn' => 'test_service_role_arn',
'notification_arns' => ['test_arn'],
'stack_policy_file' => 'my_policy.json'
},
'ap-southeast-2' => {
'tags' => {'environment' => 'staging', 'test_override' => 1 },
'role_arn' => 'test_service_role_arn3',
'notification_arns' => ['test_arn_3'],
}
})
end
it 'loads region_aliases' do
expect(loaded_config.region_aliases).to eq(
'production' => 'us-east-1',
'staging' => 'ap-southeast-2'
)
end
it 'deep merges stack attributes' do
expect(loaded_config.find_stack('ap-southeast-2', 'myapp-vpc')).to eq(StackMaster::StackDefinition.new(
stack_name: 'myapp-vpc',
region: 'ap-southeast-2',
region_alias: 'staging',
allowed_accounts: ["555555555"],
tags: {
'application' => 'my-awesome-blog',
'environment' => 'staging',
'test_override' => 1
},
s3: { 'bucket' => 'my-bucket', 'region' => 'us-east-1' },
role_arn: 'test_service_role_arn4',
notification_arns: ['test_arn_3', 'test_arn_4'],
template: 'myapp_vpc.rb',
base_dir: base_dir,
additional_parameter_lookup_dirs: ['staging']
))
expect(loaded_config.find_stack('ap-southeast-2', 'myapp-web')).to eq(StackMaster::StackDefinition.new(
stack_name: 'myapp-web',
region: 'ap-southeast-2',
region_alias: 'staging',
allowed_accounts: ["1234567890", "9876543210"],
tags: {
'application' => 'my-awesome-blog',
'environment' => 'staging',
'test_override' => 2
},
s3: { 'bucket' => 'my-bucket', 'region' => 'us-east-1' },
role_arn: 'test_service_role_arn3',
notification_arns: ['test_arn_3'],
template: 'myapp_web',
base_dir: base_dir,
additional_parameter_lookup_dirs: ['staging']
))
end
it 'allows region aliases in region defaults' do
config = StackMaster::Config.new({'region_aliases' => { 'production' => 'us-east-1' }, 'region_defaults' => { 'production' => { 'secret_file' => 'production.yml.gpg' }}, 'stacks' => {}}, '/base')
expect(config.region_defaults).to eq('us-east-1' => { 'secret_file' => 'production.yml.gpg' })
end
end
| 34.581818 | 199 | 0.597967 |
115539460c51037bbbec69ac7c48778b49e5a0cc | 127 | class RemoveEstateFromBlocks < ActiveRecord::Migration[4.2]
def change
remove_column :blocks, :estate, :string
end
end
| 21.166667 | 59 | 0.755906 |
e9cf154942f7a72197a32996b42309db696d4a3c | 1,114 | Gem::Specification.new do |s|
s.name = %q{factory_girl_rails}
s.version = '4.2.1'
s.authors = ["Joe Ferris"]
s.email = %q{[email protected]}
s.homepage = "http://github.com/thoughtbot/factory_girl_rails"
s.summary = %q{factory_girl_rails provides integration between
factory_girl and rails 3}
s.description = %q{factory_girl_rails provides integration between
factory_girl and rails 3 (currently just automatic factory definition
loading)}
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- Appraisals {spec,features,gemfiles}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.add_runtime_dependency('railties', '>= 3.0.0')
s.add_runtime_dependency('factory_girl', '~> 4.2.0')
s.add_development_dependency('appraisal', '~> 0.5.0')
s.add_development_dependency('rake')
s.add_development_dependency('rspec', '~> 2.11.0')
s.add_development_dependency('cucumber', '~> 1.2.1')
s.add_development_dependency('aruba', '~> 0.5.1')
end
| 41.259259 | 87 | 0.670557 |
bbee10a67aae436e006b12ad09ab659f06237b21 | 224 | class AddHomeButtonHomingToWebAppConfigs < ActiveRecord::Migration[5.1]
def change
add_column :web_app_configs,
:home_button_homing,
:boolean,
default: false
end
end
| 22.4 | 71 | 0.620536 |
8713f882babbc6bc30931efca4ea87b350dc8752 | 409 | class MoveTwitterToUser < ActiveRecord::Migration[6.0]
def change
add_column :users, :twitter_username, :string
Player.where.not(twitter_username: [nil, '']).each do |player|
next if player.user_id.nil?
User.where(
id: player.user_id
).update_all(
twitter_username: player['twitter_username']
)
end
remove_column :players, :twitter_username
end
end
| 27.266667 | 66 | 0.672372 |
1c65ff5e42c366ef8c1f32df30001cd83703033b | 140 | FactoryBot.define do
factory :context_option_value, class: Spree::ContextOptionValue do
option_value {create(:option_value)}
end
end | 28 | 68 | 0.792857 |
d5b7772742988332a1e95538881e73ac1e800dc7 | 3,933 | module CoronavirusContentItemHelper
CORONAVIRUS_TAXON_PATH = "/coronavirus-taxons".freeze
def coronavirus_landing_page_content_item
load_content_item("coronavirus_landing_page.json")
end
def load_content_item(file_name)
json = File.read(
Rails.root.join("spec/fixtures/content_store/", file_name),
)
JSON.parse(json)
end
def business_content_item_fixture
load_content_item("business_support_page.json")
end
def education_content_item_fixture
load_content_item("coronavirus_education_page.json")
end
def stub_coronavirus_statistics
body = { data: [{ "date" => "2021-03-18",
"cumulativeVaccinations" => 25_000_000,
"hospitalAdmissions" => 1000,
"newPositiveTests" => 5000 }] }
stub_request(:get, /coronavirus.data.gov.uk/).to_return(status: 200, body: body.to_json)
end
def random_landing_page
GovukSchemas::RandomExample.for_schema(frontend_schema: "coronavirus_landing_page") do |item|
yield(item)
end
end
def coronavirus_content_item
random_landing_page do |item|
item.merge(coronavirus_landing_page_content_item)
end
end
def coronavirus_content_item_with_livestream_disabled
content_item = coronavirus_content_item
content_item["details"]["live_stream"].delete("show_live_stream")
content_item
end
def coronavirus_content_item_with_live_stream_time
content_item = coronavirus_content_item
content_item["details"]["live_stream"]["time"] = "5:00pm"
content_item
end
def content_item_with_ask_a_question_disabled
content_item = coronavirus_content_item
content_item["details"]["live_stream"]["ask_a_question_visible"] = false
content_item
end
def content_item_with_popular_questions_link_disabled
content_item = coronavirus_content_item
content_item["details"]["live_stream"]["popular_questions_link_visible"] = false
content_item
end
def coronavirus_content_item_with_risk_level_element_enabled
content_item = coronavirus_content_item
content_item["details"]["risk_level"]["show_risk_level_section"] = true
content_item
end
def business_content_item
random_landing_page do |item|
item.merge(business_content_item_fixture)
end
end
def random_taxon_page
GovukSchemas::RandomExample.for_schema(frontend_schema: "taxon") do |item|
yield(item) if block_given?
item["phase"] = "live"
item
end
end
def business_taxon_content_item
random_taxon_page do |item|
item["content_id"] = TaxonsRedirectionController::HUB_PAGE_FROM_CONTENT_ID.key("/coronavirus/business-support")
item
end
end
def business_subtaxon_content_item
stubbed_business_taxon = business_taxon_content_item.tap do |item|
item["links"] = {}
end
random_taxon_page do |item|
item["links"]["parent_taxons"] = [stubbed_business_taxon]
item
end
end
def other_subtaxon_item
random_linked_taxon = random_taxon_page do |item|
item["links"] = {}
end
random_taxon_page do |item|
item["links"]["parent_taxons"] = [random_linked_taxon]
item
end
end
def education_content_item
random_landing_page do |item|
item.merge(education_content_item_fixture)
end
end
def coronavirus_root_taxon_content_item
GovukSchemas::Example.find("taxon", example_name: "taxon").tap do |item|
item["base_path"] = "/coronavirus-taxon"
end
end
def coronavirus_taxon_one
GovukSchemas::Example.find("taxon", example_name: "taxon").tap do |item|
item["links"]["parent_taxons"] = [coronavirus_root_taxon_content_item]
item["links"]["ordered_related_items"] =
[
GovukSchemas::Example.find("guide", example_name: "guide"),
GovukSchemas::Example.find("news_article", example_name: "news_article"),
]
end
end
end
| 28.092857 | 117 | 0.720315 |
5d931e9245e4e649fdc78a3de9d2a0ce311db598 | 296 | # set @scammable before using these shared examples
describe 'Scammable with scams not loaded', :shared => true do
it 'should not attempt to save scams on save' do
@scammable.scam_names.each do |scam_name|
@scammable.should_not_receive(scam_name)
end
@scammable.save
end
end | 32.888889 | 62 | 0.736486 |
010ed5c4995c4d575662976db871faf2a1968a93 | 1,108 | module Itest5ch
class BoardListPage
include HttpMethods
BOARDS_URL = "http://itest.5ch.net/".freeze
# Get all boards
#
# @return [Hash<String, Array<Itest5ch::Board>>] key: category name, value: boards
def all
doc = Hpricot(get_html(BOARDS_URL))
doc.search("//div[@id='bbsmenu']//ul[@class='pure-menu-list']").
reject {|ul| ul["id"] == "history" }.each_with_object({}) do |ul, categories|
category_name = ul.at("/li[@class='pure-menu-item pure-menu-selected']").inner_text.strip
categories[category_name] = get_boards(ul)
end
end
private
def get_boards(ul)
ul.search("/li").select {|li| board_element?(li) }.each_with_object([]) do |li, boards|
url = URI.join(BOARDS_URL, li.at("/a")["href"]).to_s
name = li.inner_text.strip
boards << Board.new(url, name: name)
end
end
def board_element?(li)
return false unless li["class"].include?("pure-menu-item")
return false if li["class"].include?("pure-menu-selected")
true
end
end
end
| 28.410256 | 97 | 0.601986 |
21409356e00a8046af87c6ba45531d5a199f39a5 | 4,114 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
Logger.new(STDOUT).info 'Start Organisations seed'
100.times do
Organisation.create(
name: Faker::Company.name,
address: Faker::Address.street_address,
postcode: Faker::Address.zip_code,
email: Faker::Internet.email,
description: Faker::TvShows::DrWho.quote,
website: Faker::Internet.url,
telephone: Faker::PhoneNumber.phone_number,
latitude: rand(51.546702..51.6247775).round(7),
longitude: rand(-0.4476553..-0.2687842).round(7),
gmaps: Faker::Boolean.boolean,
donation_info: Faker::Lorem.paragraph,
publish_address: Faker::Boolean.boolean,
publish_phone: Faker::Boolean.boolean,
publish_email: Faker::Boolean.boolean,
type: 'Organisation',
non_profit: Faker::Boolean.boolean
)
end
organisations = Organisation.all
Logger.new(STDOUT).info 'Events seed'
organisations.each do |organisation|
event_day = Date.today + rand(30)
organisation.events.create(
title: Faker::Book.title,
description: Faker::Quote.robin,
start_date: event_day,
end_date: event_day + rand(1..5).hours,
latitude: organisation.latitude,
longitude: organisation.longitude
)
end
Logger.new(STDOUT).info 'Services Seed'
5.times do
Service.create(
contact_id: Faker::Internet.email,
name: Faker::Company.name,
description: Faker::Lorem.sentences,
email: Faker::Internet.email,
telephone: Faker::PhoneNumber.phone_number,
website: Faker::Internet.url,
latitude: rand(51.546702..51.6247775).round(7),
longitude: rand(-0.4476553..-0.2687842).round(7),
address: Faker::Address.street_address,
)
end
Logger.new(STDOUT).info 'Start Users seed'
User.create(
email: '[email protected]',
password: 'asdf1234',
password_confirmation: 'asdf1234',
confirmed_at: DateTime.now,
superadmin: true
)
User.create(
email: '[email protected]',
password: 'asdf1234',
password_confirmation: 'asdf1234',
confirmed_at: DateTime.now,
superadmin: true
)
120.times do |i|
user = User.where(email: "user#{i}@example.com").first_or_initialize
user.password = "asdf1234"
user.password_confirmation = "asdf1234"
user.confirmed_at = DateTime.now
user.organisation_id = Organisation.all.sample.id
user.save!
end
user_ids = User.pluck(:id)
Logger.new(STDOUT).info 'Proposed Organisation Edits seed'
organisations.each do |organisation|
organisation.edits.create(
name: Faker::Company.name,
address: Faker::Address.full_address,
postcode: Faker::Address.postcode,
email: Faker::Internet.email,
description: Faker::Superhero.descriptor,
website: Faker::Internet.url,
telephone: Faker::PhoneNumber.phone_number,
donation_info: Faker::Superhero.power,
user_id: user_ids.shuffle.first,
accepted: Faker::Boolean.boolean,
archived: Faker::Boolean.boolean
)
end
Logger.new(STDOUT).info 'Start VolunteerOps seed'
3.times do |n|
VolunteerOp.create(description: "This is a test#{n}", title: "Test#{n}", organisation_id: "#{1 + n}")
end
Logger.new(STDOUT).info 'Start features seed'
Feature.create(name: 'volunteer_ops_create')
Feature.activate('volunteer_ops_create')
Feature.create(name: 'volunteer_ops_list')
Feature.activate('volunteer_ops_list')
Feature.create(name: 'automated_propose_org')
Feature.activate('automated_propose_org')
Feature.create(name: 'search_input_bar_on_org_pages')
Feature.activate('search_input_bar_on_org_pages')
Feature.create(name: 'doit_volunteer_opportunities')
Feature.activate('doit_volunteer_opportunities')
Feature.create(name: 'reachskills_volunteer_opportunities')
Feature.activate('reachskills_volunteer_opportunities')
Feature.create(name: 'events')
Feature.activate('events')
Feature.create(name: 'services')
Logger.new(STDOUT).info 'Seed completed'
| 32.393701 | 103 | 0.739183 |
39300b5e8f02dfcdaff084867bdadfc20954d0b8 | 1,349 | class FavoritesController < ApplicationController
before_action :redirect_if_not_logged_in, only: [:index, :new, :create, :show, :edit, :update, :destroy]
before_action :set_favorite, only: [:show, :update, :edit, :destroy]
def new
@favorite = Favorite.new
end
def create
@favorite = current_user.favorites.build(favorite_params)
if @favorite.save
redirect_to favorite_path(@favorite)
else
render :new
end
end
def index
if current_user.favorites.empty?
flash[:message] = "You don't have any list yet why don't you make one"
redirect_to new_favorite_path
else
@favorites=current_user.favorites
end
end
def edit
end
def update
@favorite.update(favorite_params)
if @favorite.valid?
redirect_to favorite_path(@favorite)
else
flash[:message] = "Unable to update"
render :edit
end
end
def show
end
def destroy
@favorite.destroy
redirect_to favorites_path
end
private
def set_favorite
@favorite=Favorite.find_by(id: params[:id])
end
def favorite_params
params.require(:favorite).permit(:name, :user_id, post_ids: [])
end
end
| 22.864407 | 108 | 0.599703 |
39454099e2d8099c6e4696565986065915485318 | 9,082 | # coding: utf-8
module Fastlane
module Actions
module SharedValues
end
class UpdateProjectProvisioningAction < Action
ROOT_CERTIFICATE_URL = "https://www.apple.com/appleca/AppleIncRootCertificate.cer"
def self.run(params)
UI.message("You’re updating provisioning profiles directly in your project, but have you considered easier ways to do code signing?")
UI.message("https://docs.fastlane.tools/codesigning/GettingStarted/")
# assign folder from parameter or search for xcodeproj file
folder = params[:xcodeproj] || Dir["*.xcodeproj"].first
# validate folder
project_file_path = File.join(folder, "project.pbxproj")
UI.user_error!("Could not find path to project config '#{project_file_path}'. Pass the path to your project (not workspace)!") unless File.exist?(project_file_path)
# download certificate
unless File.exist?(params[:certificate])
UI.message("Downloading root certificate from (#{ROOT_CERTIFICATE_URL}) to path '#{params[:certificate]}'")
require 'open-uri'
File.open(params[:certificate], "w:ASCII-8BIT") do |file|
file.write(open(ROOT_CERTIFICATE_URL, "rb").read)
end
end
# parsing mobileprovision file
UI.message("Parsing mobile provisioning profile from '#{params[:profile]}'")
profile = File.read(params[:profile])
p7 = OpenSSL::PKCS7.new(profile)
store = OpenSSL::X509::Store.new
UI.user_error!("Could not find valid certificate at '#{params[:certificate]}'") unless File.size(params[:certificate]) > 0
cert = OpenSSL::X509::Certificate.new(File.read(params[:certificate]))
store.add_cert(cert)
p7.verify([cert], store)
data = Plist.parse_xml(p7.data)
target_filter = params[:target_filter] || params[:build_configuration_filter]
configuration = params[:build_configuration]
code_signing_identity = params[:code_signing_identity]
# manipulate project file
UI.success("Going to update project '#{folder}' with UUID")
require 'xcodeproj'
project = Xcodeproj::Project.open(folder)
project.targets.each do |target|
if !target_filter || target.name.match(target_filter) || (target.respond_to?(:product_type) && target.product_type.match(target_filter))
UI.success("Updating target #{target.name}...")
else
UI.important("Skipping target #{target.name} as it doesn't match the filter '#{target_filter}'")
next
end
target.build_configuration_list.build_configurations.each do |build_configuration|
config_name = build_configuration.name
if !configuration || config_name.match(configuration)
UI.success("Updating configuration #{config_name}...")
else
UI.important("Skipping configuration #{config_name} as it doesn't match the filter '#{configuration}'")
next
end
if code_signing_identity
codesign_build_settings_keys = build_configuration.build_settings.keys.select { |key| key.to_s.match(/CODE_SIGN_IDENTITY.*/) }
codesign_build_settings_keys.each do |setting|
build_configuration.build_settings[setting] = code_signing_identity
end
end
build_configuration.build_settings["PROVISIONING_PROFILE"] = data["UUID"]
build_configuration.build_settings["PROVISIONING_PROFILE_SPECIFIER"] = data["Name"]
end
end
project.save
# complete
UI.success("Successfully updated project settings in '#{params[:xcodeproj]}'")
end
def self.description
"Update projects code signing settings from your provisioning profile"
end
def self.details
[
"You should check out the code signing guide before using this action: https://docs.fastlane.tools/codesigning/getting-started/",
"This action retrieves a provisioning profile UUID from a provisioning profile (.mobileprovision) to set",
"up the xcode projects' code signing settings in *.xcodeproj/project.pbxproj",
"The `target_filter` value can be used to only update code signing for specified targets",
"The `build_configuration` value can be used to only update code signing for specified build configurations of the targets passing through the `target_filter`",
"Example Usage is the WatchKit Extension or WatchKit App, where you need separate provisioning profiles",
"Example: `update_project_provisioning(xcodeproj: \"..\", target_filter: \".*WatchKit App.*\")"
].join("\n")
end
def self.available_options
[
FastlaneCore::ConfigItem.new(key: :xcodeproj,
env_name: "FL_PROJECT_PROVISIONING_PROJECT_PATH",
description: "Path to your Xcode project",
optional: true,
verify_block: proc do |value|
UI.user_error!("Path to xcode project is invalid") unless File.exist?(value)
end),
FastlaneCore::ConfigItem.new(key: :profile,
env_name: "FL_PROJECT_PROVISIONING_PROFILE_FILE",
description: "Path to provisioning profile (.mobileprovision)",
default_value: Actions.lane_context[SharedValues::SIGH_PROFILE_PATH],
default_value_dynamic: true,
verify_block: proc do |value|
UI.user_error!("Path to provisioning profile is invalid") unless File.exist?(value)
end),
FastlaneCore::ConfigItem.new(key: :target_filter,
env_name: "FL_PROJECT_PROVISIONING_PROFILE_TARGET_FILTER",
description: "A filter for the target name. Use a standard regex",
optional: true,
is_string: false,
verify_block: proc do |value|
UI.user_error!("target_filter should be Regexp or String") unless [Regexp, String].any? { |type| value.kind_of?(type) }
end),
FastlaneCore::ConfigItem.new(key: :build_configuration_filter,
env_name: "FL_PROJECT_PROVISIONING_PROFILE_FILTER",
description: "Legacy option, use 'target_filter' instead",
optional: true),
FastlaneCore::ConfigItem.new(key: :build_configuration,
env_name: "FL_PROJECT_PROVISIONING_PROFILE_BUILD_CONFIGURATION",
description: "A filter for the build configuration name. Use a standard regex. Applied to all configurations if not specified",
optional: true,
is_string: false,
verify_block: proc do |value|
UI.user_error!("build_configuration should be Regexp or String") unless [Regexp, String].any? { |type| value.kind_of?(type) }
end),
FastlaneCore::ConfigItem.new(key: :certificate,
env_name: "FL_PROJECT_PROVISIONING_CERTIFICATE_PATH",
description: "Path to apple root certificate",
default_value: "/tmp/AppleIncRootCertificate.cer"),
FastlaneCore::ConfigItem.new(key: :code_signing_identity,
env_name: "FL_PROJECT_PROVISIONING_CODE_SIGN_IDENTITY",
description: "Code sign identity for build configuration",
optional: true)
]
end
def self.authors
["tobiasstrebitzer", "czechboy0"]
end
def self.is_supported?(platform)
[:ios, :mac].include?(platform)
end
def self.example_code
[
'update_project_provisioning(
xcodeproj: "Project.xcodeproj",
profile: "./watch_app_store.mobileprovision", # optional if you use sigh
target_filter: ".*WatchKit Extension.*", # matches name or type of a target
build_configuration: "Release",
code_signing_identity: "iPhone Development" # optionally specify the codesigning identity
)'
]
end
def self.category
:code_signing
end
end
end
end
| 51.897143 | 172 | 0.580929 |
113f8f95c579a0da7db090804f622801b819b1cb | 306 | class AddUserIdFieldToProducts < ActiveRecord::Migration
def up
add_column :spree_products, :user_id, :integer, :default => 0, :null => false
add_index "spree_products", ["user_id"], :name => "index_spree_orders_on_user_id"
end
def down
remove_column :spree_products, :user_id
end
end
| 27.818182 | 85 | 0.728758 |
0301a959a60c9be4ee19068ba5894148508acee8 | 4,093 | class Offlineimap < Formula
desc "Synchronizes emails between two repositories"
homepage "https://www.offlineimap.org/"
url "https://files.pythonhosted.org/packages/09/12/73db8d38fea8ec3536cbccb8286b46b426639aff7e166840fa5e68e889e2/offlineimap-7.3.4.tar.gz"
sha256 "5dbd7167b8729d87caa50bed63562868b6634b888348d9bc088a721530c82fef"
license "GPL-2.0-or-later"
head "https://github.com/OfflineIMAP/offlineimap.git", branch: "master"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "ddd56697d3c6e9caf9ce43cb18b4c8c5e2b71dda041363be7b3f02487700edd0"
sha256 cellar: :any_skip_relocation, big_sur: "022f1f1fb23f151854e050d510398d0c156d71fdb718ac32c5f7061152732b92"
sha256 cellar: :any_skip_relocation, catalina: "022f1f1fb23f151854e050d510398d0c156d71fdb718ac32c5f7061152732b92"
sha256 cellar: :any_skip_relocation, mojave: "8bad1b2782ecd2d85bb388c616d57ad98f10886384711dbf36447269d076f0d9"
end
depends_on :macos # Due to Python 2 (Will never support Python 3)
# https://github.com/OfflineIMAP/offlineimap/issues/616#issuecomment-491003691
uses_from_macos "libxml2"
uses_from_macos "libxslt"
resource "rfc6555" do
url "https://files.pythonhosted.org/packages/58/a8/1dfba2db1f744657065562386069e547eefea9432d3f520d4af5b5fabd28/rfc6555-0.0.0.tar.gz"
sha256 "191cbba0315b53654155321e56a93466f42cd0a474b4f341df4d03264dcb5217"
end
resource "selectors2" do
url "https://files.pythonhosted.org/packages/86/72/27ccb21c1ff9fa87e1ba45e38045722b4eff345ba61760224793560638f4/selectors2-2.0.2.tar.gz"
sha256 "1f1bbaac203a23fbc851dc1b5a6e92c50698cc8cefa5873eb5b89eef53d1d82b"
end
resource "six" do
url "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz"
sha256 "1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"
end
def install
ENV.delete("PYTHONPATH")
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages"
resources.each do |r|
r.stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
end
# Remove hardcoded python2 that does not exist on high-sierra or mojave
inreplace "Makefile", "python2", "python"
inreplace "bin/offlineimap", "python2", "python"
etc.install "offlineimap.conf", "offlineimap.conf.minimal"
libexec.install "bin/offlineimap" => "offlineimap.py"
libexec.install "offlineimap"
(bin/"offlineimap").write_env_script(libexec/"offlineimap.py",
PYTHONPATH: ENV["PYTHONPATH"])
end
def caveats
<<~EOS
To get started, copy one of these configurations to ~/.offlineimaprc:
* minimal configuration:
cp -n #{etc}/offlineimap.conf.minimal ~/.offlineimaprc
* advanced configuration:
cp -n #{etc}/offlineimap.conf ~/.offlineimaprc
EOS
end
plist_options manual: "offlineimap"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>EnvironmentVariables</key>
<dict>
<key>PATH</key>
<string>/usr/bin:/bin:/usr/sbin:/sbin:#{HOMEBREW_PREFIX}/bin</string>
</dict>
<key>KeepAlive</key>
<false/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/offlineimap</string>
<string>-q</string>
<string>-u</string>
<string>basic</string>
</array>
<key>StartInterval</key>
<integer>300</integer>
<key>RunAtLoad</key>
<true />
<key>StandardErrorPath</key>
<string>/dev/null</string>
<key>StandardOutPath</key>
<string>/dev/null</string>
</dict>
</plist>
EOS
end
test do
system bin/"offlineimap", "--version"
end
end
| 37.550459 | 140 | 0.696555 |
7ab78b3a15fe3bea8a630c0dff03ccd848a86eef | 244 | class AddTransactionIdToSpreePaypalExpressCheckouts < SolidusSupport::Migration[4.2]
def change
add_column :spree_paypal_express_checkouts, :transaction_id, :string
add_index :spree_paypal_express_checkouts, :transaction_id
end
end
| 34.857143 | 84 | 0.831967 |
62ca14eb9983e3c00460cdc82748469972670330 | 451 | class AddPaymentStatusToOrderItem < ActiveRecord::Migration
class OrderItem < ActiveRecord::Base
has_one :order
end
class Order < ActiveRecord::Base
has_many :order_items
end
def up
add_column :order_items, :payment_status, :string, default: 'unpaid'
Order.all.each do |o|
o.order_items.update_all(payment_status: o.payment_status)
end
end
def down
remove_column :order_items, :payment_status
end
end
| 20.5 | 72 | 0.727273 |
bf96439602f73fe3cabd00a9f42cdfda3a138b62 | 3,444 | # frozen_string_literal: true
require File.join(File.expand_path('../../../../test', __dir__), 'test_helper')
module MesscadaApp
class TestCartonVerification < MiniTestWithHooks
include Crossbeams::Responses
include CartonFactory
include PalletFactory
include ProductionApp::ProductionRunFactory
include ProductionApp::ResourceFactory
include ProductionApp::ProductSetupFactory
include MasterfilesApp::FarmFactory
include MasterfilesApp::FruitFactory
include MasterfilesApp::PartyFactory
include MasterfilesApp::CalendarFactory
include MasterfilesApp::CommodityFactory
include MasterfilesApp::CultivarFactory
include MasterfilesApp::TargetMarketFactory
include MasterfilesApp::GeneralFactory
include MasterfilesApp::MarketingFactory
include MasterfilesApp::PackagingFactory
include MasterfilesApp::HRFactory
include MasterfilesApp::LocationFactory
include MasterfilesApp::DepotFactory
include MasterfilesApp::VesselFactory
include MasterfilesApp::PortFactory
include MasterfilesApp::QualityFactory
include MasterfilesApp::RmtContainerFactory
include RawMaterialsApp::RmtBinFactory
include RawMaterialsApp::RmtDeliveryFactory
include FinishedGoodsApp::LoadFactory
include FinishedGoodsApp::VoyageFactory
def test_carton_verification_scan_carton_label
carton_label_id = create_carton_label
start_pallets = DB[:pallets].count
scanned_number = carton_label_id
res = MesscadaApp::CartonVerification.call(current_user, scanned_number)
assert res.success, 'Should be able to verify carton'
res = MesscadaApp::CartonVerification.call(current_user, scanned_number)
assert res.success, 'Revalidation should return success'
end_pallets = DB[:pallets].count
assert_equal start_pallets, end_pallets, "Carton verification should not create a pallet: was #{start_pallets}, now: #{end_pallets}"
end
def test_carton_verification_scan_carton_label_and_create_pallet
carton_label_id = create_carton_label(carton_equals_pallet: true)
start_pallets = DB[:pallets].count
scanned_number = carton_label_id
res = MesscadaApp::CartonVerification.call(current_user, scanned_number)
assert res.success, 'Should be able to verify carton'
end_pallets = DB[:pallets].count
assert_equal start_pallets + 1, end_pallets, "Carton verification should create one pallet: was #{start_pallets}, now: #{end_pallets}"
end
def test_carton_verification_scan_pallet
pallet_id = create_pallet
pallet_number = DB[:pallets].where(id: pallet_id).get(:pallet_number)
create_carton_label(pallet_number: pallet_number)
scanned_number = pallet_number
res = MesscadaApp::CartonVerification.call(current_user, scanned_number)
assert res.success, 'Should be able to verify pallet'
res = MesscadaApp::CartonVerification.call(current_user, scanned_number)
assert res.success, 'Revalidation should return success'
end
def test_carton_verification_scan_pallet_fail
pallet_id = create_pallet
pallet_number = DB[:pallets].where(id: pallet_id).get(:pallet_number)
scanned_number = pallet_number
res = MesscadaApp::CartonVerification.call(current_user, scanned_number)
refute res.success, 'Should not be able to verify pallet, pallet number not on carton label'
end
end
end
| 40.046512 | 140 | 0.769454 |
391438327a3dd1004ddbc072d298961ec3abb21c | 570 | require 'rails_helper'
RSpec.describe Value, type: :model do
let!(:plan) {FactoryBot.create(:plan)}
context "Validation tests" do
it "Requires plan_id" do
value_test = Value.new(name: "ReallyNotNormalName").save
expect(value_test).to eq(false)
end
it "Requires name" do
value_test = Value.new(plan_id: plan.id).save
expect(value_test).to eq(false)
end
it "Should save successfully" do
value_test = Value.new(name: "ReallyNormalName", plan_id: plan.id).save
expect(value_test).to eq(true)
end
end
end
| 24.782609 | 77 | 0.673684 |
bb415e2f620cca65a74a8d83cb34fadf7be677d5 | 723 | Pod::Spec.new do |s|
s.name = "Nimble"
s.version = "0.2.0"
s.summary = "A Matcher Framework for Swift and Objective-C"
s.description = <<-DESC
Use Nimble to express the expected outcomes of Swift or Objective-C expressions. Inspired by Cedar.
DESC
s.homepage = "https://github.com/Quick/Nimble"
s.license = { :type => "Apache 2.0", :file => "LICENSE.md" }
s.author = "Quick Contributors"
s.ios.deployment_target = "8.0"
s.osx.deployment_target = "10.10"
s.source = { :git => "https://github.com/Quick/Nimble.git", :tag => "v0.2.0" }
s.source_files = "Nimble", "Nimble/**/*.{swift,h,m}"
s.framework = "XCTest"
end
| 40.166667 | 118 | 0.578147 |
acf0d59c6ab7d84549dbf1e7adf4b828659090bb | 264 | #
# Load ECS assets into RedisGraph
#
# Each method returns an array of Cypher queries
#
class AWSLoader::ECS < GraphDbLoader
def cluster
node = 'AWS_ECS_CLUSTER'
q = []
# cluster node
q.push(_upsert({ node: node, id: @name }))
q
end
end
| 15.529412 | 48 | 0.643939 |
1c5a5fb7f6db635174eeda475f73ace630cdd843 | 1,669 | require "language/node"
class GenerateJsonSchema < Formula
desc "Generate a JSON Schema from Sample JSON"
homepage "https://github.com/Nijikokun/generate-schema"
url "https://registry.npmjs.org/generate-schema/-/generate-schema-2.6.0.tgz"
sha256 "1ddbf91aab2d649108308d1de7af782d9270a086919edb706f48d0216d51374a"
head "https://github.com/Nijikokun/generate-schema.git"
bottle do
cellar :any_skip_relocation
sha256 "4d5a50f712bb6714564574d20cbd771e62ad1da6dcd58d9b7225822af0821d73" => :catalina
sha256 "e049d098796be43aa340eca884fa71ec90f4fbeda02031142f66752df005de97" => :mojave
sha256 "3461301c038b8bb6e15b8e183661976e95ea7b7e0659d57f0f21ea2c0eb4e67c" => :high_sierra
sha256 "a6ff075810774d44030a59a12032d302c64834d03c7aabeb32efb8dc86d276de" => :sierra
sha256 "5a5b34d8e233d9b75648c39f8edada5077c8f6c6466bd3358f3f661062ccbe83" => :el_capitan
sha256 "603a75d4ae2557499f867cfbb968bbc01006e65fc9e361fac84632d9558f6151" => :x86_64_linux
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
(testpath/"test.json").write <<~EOS
{
"id": 2,
"name": "An ice sculpture",
"price": 12.50,
"tags": ["cold", "ice"],
"dimensions": {
"length": 7.0,
"width": 12.0,
"height": 9.5
},
"warehouseLocation": {
"latitude": -78.75,
"longitude": 20.4
}
}
EOS
assert_match "schema.org", shell_output("#{bin}/generate-schema test.json", 1)
end
end
| 34.770833 | 94 | 0.682445 |
18a3bd320b59ab76df4798cd638f5f1d439610d4 | 494 | class P4merge < Cask
url 'http://filehost.perforce.com/perforce/r13.4/bin.macosx106x86_64/P4V.dmg'
homepage 'http://www.perforce.com/product/components/perforce-visual-merge-and-diff-tools'
version '2013.4'
sha256 '0e1d193a353226b5452ec4a309bccf222f8fc973661a1e575a8b7f2359db123c'
link 'p4merge.app'
caveats <<-EOS.undent
You can set up git to use p4merge as a merge tool by following the instructions available here:
https://gist.github.com/henrik242/1510148
EOS
end
| 41.166667 | 99 | 0.771255 |
bb9b32ba324159f8ee398119d21382b0d1192ece | 702 | class GitCredentialManager < Formula
desc "Stores credentials for Git on Visual Studio Online (VSO)"
homepage "https://java.visualstudio.com/Docs/tools/intro"
url "https://github.com/Microsoft/Git-Credential-Manager-for-Mac-and-Linux/releases/download/git-credential-manager-1.2.0/git-credential-manager-1.2.0.jar"
sha256 "537c066469f3a232818cd876c9787ecf323e8e7b0cfb1bff4028fbb2315e07fc"
bottle :unneeded
depends_on :java => "1.7+"
def install
libexec.install "git-credential-manager-#{version}.jar"
bin.write_jar_script libexec/"git-credential-manager-#{version}.jar", "git-credential-manager"
end
test do
system "#{bin}/git-credential-manager", "version"
end
end
| 35.1 | 157 | 0.760684 |
61c387a2d3f9640a80395d90e371644a9c73cdab | 313 | class CreateTwitterAccounts < ActiveRecord::Migration
def change
create_table :twitter_accounts do |t|
t.string :name_en
t.string :consumer_key
t.string :consumer_secret
t.string :access_token_key
t.string :access_token_secret
t.timestamps null: false
end
end
end
| 22.357143 | 53 | 0.702875 |
1d5527f8b579d1aa812a875ebbdd282da3b4e661 | 42 | require 'hemify/rails'
module Hemify
end
| 8.4 | 22 | 0.785714 |
016691f54925ac4780de138a8d9c1de8cb02566e | 1,051 | class Libmpdclient < Formula
desc "Library for MPD in the C, C++, and Objective-C languages"
homepage "https://www.musicpd.org/libs/libmpdclient/"
url "https://www.musicpd.org/download/libmpdclient/2/libmpdclient-2.14.tar.xz"
sha256 "0a84e2791bfe3077cf22ee1784c805d5bb550803dffe56a39aa3690a38061372"
revision 1
head "https://github.com/MusicPlayerDaemon/libmpdclient.git"
bottle do
cellar :any
sha256 "c3d3c81a4885afef4b0fa613859bbaadf14e755921637c27095192bceba7b57d" => :mojave
sha256 "a4b68fd2d553aad650263ddf646fc8a402df86f1341f4febee85a69e46916a2f" => :high_sierra
sha256 "4e1b4802e6fa4e958d78c03d3cc14f33fece909975a9c40fa83946c5fd2a30b1" => :sierra
sha256 "573291d299ec6ee87a40cd79374cd7697e784230774852cf2bab9e20fcc83b54" => :el_capitan
end
depends_on "doxygen" => :build
depends_on "meson-internal" => :build
depends_on "ninja" => :build
def install
system "meson", "--prefix=#{prefix}", ".", "output"
system "ninja", "-C", "output"
system "ninja", "-C", "output", "install"
end
end
| 38.925926 | 93 | 0.755471 |
e86c4c0f8bae2b2e1cb3f2b764db615f580647cb | 160 | if Assignment.table_exists?
Assignment.all.each do |ass|
if ass.status == 'open' and ass.deadline < Time.now
ass.set_deadline_job
end
end
end
| 20 | 55 | 0.6875 |
ab4d124dac2bf3ef1c4d4edc83e7bc3f46863851 | 949 | # frozen_string_literal: true
require 'geocoder/lookups/base'
require 'geocoder/results/abstract_api'
module Geocoder
module Lookup
class AbstractApi < Base
def name
'Abstract API'
end
def required_api_key_parts
['api_key']
end
def supported_protocols
[:https]
end
private # ---------------------------------------------------------------
def base_query_url(_query)
"#{protocol}://ipgeolocation.abstractapi.com/v1/?"
end
def query_url_params(query)
params = { api_key: configuration.api_key }
ip_address = query.sanitized_text
params[:ip_address] = ip_address if ip_address.is_a?(String) && ip_address.length.positive?
params.merge(super)
end
def results(query, _reverse = false)
if doc = fetch_data(query)
[doc]
else
[]
end
end
end
end
end
| 20.630435 | 99 | 0.562698 |
7ad867f944c6c4016c5e310ed4f6487a65ff4d08 | 1,119 | module Cryptoexchange::Exchanges
module Coingi
module Services
class Trades < Cryptoexchange::Services::Market
def fetch(market_pair)
output = super(ticker_url(market_pair))
adapt(output, market_pair)
end
def ticker_url(market_pair)
base = market_pair.base.downcase
target = market_pair.target.downcase
"#{Cryptoexchange::Exchanges::Coingi::Market::API_URL}/current/transactions/#{base}-#{target}/64"
end
def adapt(output, market_pair)
output.collect do |trade|
tr = Cryptoexchange::Models::Trade.new
tr.base = market_pair.base
tr.target = market_pair.target
tr.trade_id = trade['id']
tr.type = trade['type'] == 1 ? 'sell' : 'buy'
tr.price = trade['price']
tr.amount = trade['amount']
tr.timestamp = trade['timestamp'] / 1000
tr.payload = trade
tr.market = Coingi::Market::NAME
tr
end
end
end
end
end
end
| 31.971429 | 107 | 0.546917 |
f7e0007873f8d2a8c655813abed7aab454641721 | 2,266 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe "Kernel#p" do
before :all do
@rs_f, @rs_b, @rs_c = $/, $\, $,
end
after :each do
$/, $\, $, = @rs_f, @rs_b, @rs_c
end
it "is a private method" do
Kernel.should have_private_instance_method(:p)
end
# TODO: fix
it "flushes output if receiver is a File" do
filename = tmp("Kernel_p_flush") + $$.to_s
begin
File.open(filename, "w") do |f|
begin
old_stdout = $stdout
$stdout = f
p("abcde")
ensure
$stdout = old_stdout
end
File.open(filename) do |f2|
f2.read(7).should == "\"abcde\""
end
end
ensure
rm_r filename
end
end
it "prints obj.inspect followed by system record separator for each argument given" do
o = mock("Inspector Gadget")
o.should_receive(:inspect).any_number_of_times.and_return "Next time, Gadget, NEXT TIME!"
lambda { p(o) }.should output("Next time, Gadget, NEXT TIME!\n")
lambda { p(*[o]) }.should output("Next time, Gadget, NEXT TIME!\n")
lambda { p(*[o, o]) }.should output("Next time, Gadget, NEXT TIME!\nNext time, Gadget, NEXT TIME!\n")
lambda { p([o])}.should output("[#{o.inspect}]\n")
end
it "is not affected by setting $\\, $/ or $," do
o = mock("Inspector Gadget")
o.should_receive(:inspect).any_number_of_times.and_return "Next time, Gadget, NEXT TIME!"
$, = " *helicopter sound*\n"
lambda { p(o) }.should output_to_fd("Next time, Gadget, NEXT TIME!\n")
$\ = " *helicopter sound*\n"
lambda { p(o) }.should output_to_fd("Next time, Gadget, NEXT TIME!\n")
$/ = " *helicopter sound*\n"
lambda { p(o) }.should output_to_fd("Next time, Gadget, NEXT TIME!\n")
end
it "prints nothing if no argument is given" do
lambda { p }.should output("")
end
it "prints nothing if called splatting an empty Array" do
lambda { p(*[]) }.should output("")
end
=begin Not sure how to spec this, but wanted to note the behavior here
it "does not flush if receiver is not a TTY or a File" do
end
=end
end
describe "Kernel.p" do
it "needs to be reviewed for spec completeness"
end
| 28.325 | 105 | 0.617387 |
b9e7894e5c98f3d6e00fd38f41dbf9df7be8e5a7 | 721 | module ActiveRecord
module ConnectionAdapters
module PostgreSQL
module OID # :nodoc:
class DateTime < Type::DateTime # :nodoc:
include Infinity
def cast_value(value)
if value.is_a?(::String)
case value
when 'infinity' then ::Float::INFINITY
when '-infinity' then -::Float::INFINITY
when / BC$/
astronomical_year = format("%04d", -value[/^\d+/].to_i + 1)
super(value.sub(/ BC$/, "").sub(/^\d+/, astronomical_year))
else
super
end
else
value
end
end
end
end
end
end
end
| 25.75 | 75 | 0.471567 |
bbae384d7a117bdc3c5e7095ffcdc4c5d9a093e5 | 549 | shared_examples_for 'will be skipped for this record' do |message|
it message.to_s do
view_context = setup_view_context_with_sandbox({})
button = described_class.new(view_context, {}, {'record' => @record}, {})
expect(button.visible?).to be_falsey
end
end
shared_examples_for 'will not be skipped for this record' do |message|
it message.to_s do
view_context = setup_view_context_with_sandbox({})
button = described_class.new(view_context, {}, {'record' => @record}, {})
expect(button.visible?).to be_truthy
end
end
| 34.3125 | 77 | 0.71949 |
87eae6fbd5224d1b8df4020c951b4a5d506588cc | 8,862 | describe "ls" do
describe "bug #1407" do
it "behaves as usual when a method of the same name exists." do
expect(
pry_eval("def ls; 5; end", "ls")
).to match(/self\.methods: /)
pry_eval("undef ls")
end
end
describe "below ceiling" do
it "should stop before Object by default" do
expect(pry_eval("cd Class.new{ def goo; end }.new", "ls")).not_to match(/Object/)
expect(pry_eval("cd Class.new{ def goo; end }", "ls -M")).not_to match(/Object/)
end
it "should include object if -v is given" do
expect(pry_eval("cd Class.new{ def goo; end }.new", "ls -m -v")).to match(/Object/)
expect(pry_eval("cd Class.new{ def goo; end }", "ls -vM")).to match(/Object/)
end
it "should include super-classes by default" do
expect(pry_eval(
"cd Class.new(Class.new{ def goo; end; public :goo }).new",
"ls")).to match(/goo/)
expect(pry_eval(
"cd Class.new(Class.new{ def goo; end; public :goo })",
"ls -M")).to match(/goo/)
end
it "should not include super-classes when -q is given" do
expect(pry_eval("cd Class.new(Class.new{ def goo; end }).new", "ls -q")).not_to match(/goo/)
expect(pry_eval("cd Class.new(Class.new{ def goo; end })", "ls -M -q")).not_to match(/goo/)
end
end
describe "help" do
it 'should show help with -h' do
expect(pry_eval("ls -h")).to match(/Usage: ls/)
end
end
describe "BasicObject" do
it "should work on BasicObject" do
expect(pry_eval("ls BasicObject.new")).to match(/BasicObject#methods:.*__send__/m)
end
it "should work on subclasses of BasicObject" do
expect(pry_eval(
"class LessBasic < BasicObject; def jaroussky; 5; end; end",
"ls LessBasic.new"
)).to match(/LessBasic#methods:.*jaroussky/m)
end
end
describe "immediates" do
# Ruby 2.4+
if 5.class.name == 'Integer'
it "should work on Integer" do
expect(pry_eval("ls 5")).to match(/Integer#methods:.*modulo/m)
end
else
it "should work on Fixnum" do
expect(pry_eval("ls 5")).to match(/Fixnum#methods:.*modulo/m)
end
end
end
describe "methods" do
it "should show public methods by default" do
output = pry_eval("ls Class.new{ def goo; end; public :goo }.new")
expect(output).to match(/methods: goo/)
end
it "should not show protected/private by default" do
expect(pry_eval("ls -M Class.new{ def goo; end; private :goo }")).not_to match(/goo/)
expect(pry_eval("ls Class.new{ def goo; end; protected :goo }.new")).not_to match(/goo/)
end
it "should show public methods with -p" do
expect(pry_eval("ls -p Class.new{ def goo; end }.new")).to match(/methods: goo/)
end
it "should show protected/private methods with -p" do
expect(pry_eval("ls -pM Class.new{ def goo; end; protected :goo }")).to match(/methods: goo/)
expect(pry_eval("ls -p Class.new{ def goo; end; private :goo }.new")).to match(/methods: goo/)
end
it "should work for objects with an overridden method method" do
require 'net/http'
# This doesn't actually touch the network, promise!
expect(pry_eval("ls Net::HTTP::Get.new('localhost')")).to match(/Net::HTTPGenericRequest#methods/)
end
it "should work for objects which instance_variables returns array of symbol but there is no Symbol#downcase" do
test_case = "class Object; alias :fg :instance_variables; def instance_variables; fg.map(&:to_sym); end end;"
normalize = "class Object; def instance_variables; fg; end end;"
test = lambda do
begin
pry_eval(test_case, "class GeFromulate2; @flurb=1.3; end", "cd GeFromulate2", "ls")
pry_eval(normalize)
rescue
pry_eval(normalize)
raise
end
end
expect(test).to_not raise_error
end
it "should show error message when instance is given with -M option" do
expect { pry_eval("ls -M String.new") }.to raise_error(Pry::CommandError, /-M only makes sense with a Module or a Class/)
end
it "should handle classes that (pathologically) define .ancestors" do
output = pry_eval("ls Class.new{ def self.ancestors; end; def hihi; end }")
expect(output).to match(/hihi/)
end
end
describe 'with -l' do
focus 'should find locals and sort by descending size' do
result = pry_eval(Object.new, "aa = 'asdf'; bb = 'xyz'", 'ls -l')
expect(result).not_to match(/=>/)
expect(result).not_to match(/0x\d{5}/)
expect(result).to match(/asdf.*xyz/m)
end
it 'should not list pry noise' do
expect(pry_eval('ls -l')).not_to match(/_(?:dir|file|ex|pry|out|in)_/)
end
end
describe "when inside Modules" do
it "should still work" do
expect(pry_eval(
"cd Module.new{ def foobie; end; public :foobie }",
"ls -M")).to match(/foobie/)
end
it "should work for ivars" do
expect(pry_eval(
"module StigmaT1sm; def foobie; @@gharble = 456; end; end",
"Object.new.tap{ |o| o.extend(StigmaT1sm) }.foobie",
"cd StigmaT1sm",
"ls -i")).to match(/@@gharble/)
end
it "should include instance methods by default" do
output = pry_eval(
"ls Module.new{ def shinanagarns; 4; end; public :shinanagarns }")
expect(output).to match(/shinanagarns/)
end
it "should behave normally when invoked on Module itself" do
expect(pry_eval("ls Module")).not_to match(/Pry/)
end
end
describe "constants" do
it "works on top-level" do
toplevel_consts = pry_eval('ls -c')
[/RUBY_PLATFORM/, /ARGF/, /STDOUT/].each do |const|
expect(toplevel_consts).to match(const)
end
end
it "should show constants defined on the current module" do
expect(pry_eval("class TempFoo1; BARGHL = 1; end", "ls TempFoo1")).to match(/BARGHL/)
end
it "should not show constants defined on parent modules by default" do
expect(pry_eval("class TempFoo2; LHGRAB = 1; end; class TempFoo3 < TempFoo2; BARGHL = 1; end", "ls TempFoo3")).not_to match(/LHGRAB/)
end
it "should show constants defined on ancestors with -v" do
expect(pry_eval("class TempFoo4; LHGRAB = 1; end; class TempFoo5 < TempFoo4; BARGHL = 1; end", "ls -v TempFoo5")).to match(/LHGRAB/)
end
it "should not autoload constants!" do
autoload :McflurgleTheThird, "/tmp/this-file-d000esnat-exist.rb"
expect { pry_eval("ls -c") }.to_not raise_error
end
it "should show constants for an object's class regardless of mixins" do
expect(pry_eval(
"cd Pry.new",
"extend Module.new",
"ls -c"
)).to match(/Method/)
end
end
describe "grep" do
it "should reduce the number of outputted things" do
expect(pry_eval("ls -c Object")).to match(/ArgumentError/)
expect(pry_eval("ls -c Object --grep Run")).not_to match(/ArgumentError/)
end
it "should still output matching things" do
expect(pry_eval("ls -c Object --grep Run")).to match(/RuntimeError/)
end
end
describe "when no arguments given" do
describe "when at the top-level" do
it "should show local variables" do
expect(pry_eval("ls")).to match(/_pry_/)
expect(pry_eval("arbitrar = 1", "ls")).to match(/arbitrar/)
end
end
describe "when in a class" do
it "should show constants" do
expect(pry_eval("class GeFromulate1; FOOTIFICATE=1.3; end", "cd GeFromulate1", "ls")).to match(/FOOTIFICATE/)
end
it "should show class variables" do
expect(pry_eval("class GeFromulate2; @@flurb=1.3; end", "cd GeFromulate2", "ls")).to match(/@@flurb/)
end
it "should show methods" do
expect(pry_eval("class GeFromulate3; def self.mooflight; end ; end", "cd GeFromulate3", "ls")).to match(/mooflight/)
end
end
describe "when in an object" do
it "should show methods" do
expect(pry_eval("cd Class.new{ def self.fooerise; end; self }", "ls")).to match(/fooerise/)
end
it "should show instance variables" do
expect(pry_eval("cd Class.new", "@alphooent = 1", "ls")).to match(/@alphooent/)
end
end
end
describe 'on java objects', skip: !Pry::Helpers::Platform.jruby? do
it 'should omit java-esque aliases by default' do
expect(pry_eval('ls java.lang.Thread.current_thread')).to match(/\bthread_group\b/)
expect(pry_eval('ls java.lang.Thread.current_thread')).not_to match(/\bgetThreadGroup\b/)
end
it 'should include java-esque aliases if requested' do
expect(pry_eval('ls java.lang.Thread.current_thread -J')).to match(/\bthread_group\b/)
expect(pry_eval('ls java.lang.Thread.current_thread -J')).to match(/\bgetThreadGroup\b/)
end
end
end
| 35.306773 | 139 | 0.633604 |
f74fb12e428fa85011b5e52823dff35ccb736705 | 5,855 | # frozen_string_literal: true
require 'rubygems/command'
require 'rubygems/remote_fetcher'
require 'rubygems/spec_fetcher'
require 'rubygems/local_remote_options'
class Gem::Commands::SourcesCommand < Gem::Command
include Gem::LocalRemoteOptions
def initialize
require 'fileutils'
super 'sources',
'Manage the sources and cache file RubyGems uses to search for gems'
add_option '-a', '--add SOURCE_URI', 'Add source' do |value, options|
options[:add] = value
end
add_option '-l', '--list', 'List sources' do |value, options|
options[:list] = value
end
add_option '-r', '--remove SOURCE_URI', 'Remove source' do |value, options|
options[:remove] = value
end
add_option '-c', '--clear-all',
'Remove all sources (clear the cache)' do |value, options|
options[:clear_all] = value
end
add_option '-u', '--update', 'Update source cache' do |value, options|
options[:update] = value
end
add_option '-f', '--[no-]force', "Do not show any confirmation prompts and behave as if 'yes' was always answered" do |value, options|
options[:force] = value
end
add_proxy_option
end
def add_source(source_uri) # :nodoc:
check_rubygems_https source_uri
source = Gem::Source.new source_uri
check_typo_squatting(source)
begin
if Gem.sources.include? source
say "source #{source_uri} already present in the cache"
else
source.load_specs :released
Gem.sources << source
Gem.configuration.write
say "#{source_uri} added to sources"
end
rescue URI::Error, ArgumentError
say "#{source_uri} is not a URI"
terminate_interaction 1
rescue Gem::RemoteFetcher::FetchError => e
say "Error fetching #{source_uri}:\n\t#{e.message}"
terminate_interaction 1
end
end
def check_typo_squatting(source)
if source.typo_squatting?("rubygems.org")
question = <<-QUESTION.chomp
#{source.uri.to_s} is too similar to https://rubygems.org
Do you want to add this source?
QUESTION
terminate_interaction 1 unless options[:force] || ask_yes_no(question)
end
end
def check_rubygems_https(source_uri) # :nodoc:
uri = URI source_uri
if uri.scheme and uri.scheme.downcase == 'http' and
uri.host.downcase == 'rubygems.org'
question = <<-QUESTION.chomp
https://rubygems.org is recommended for security over #{uri}
Do you want to add this insecure source?
QUESTION
terminate_interaction 1 unless options[:force] || ask_yes_no(question)
end
end
def clear_all # :nodoc:
path = Gem.spec_cache_dir
FileUtils.rm_rf path
unless File.exist? path
say "*** Removed specs cache ***"
else
unless File.writable? path
say "*** Unable to remove source cache (write protected) ***"
else
say "*** Unable to remove source cache ***"
end
terminate_interaction 1
end
end
def defaults_str # :nodoc:
'--list'
end
def description # :nodoc:
<<-EOF
RubyGems fetches gems from the sources you have configured (stored in your
~/.gemrc).
The default source is https://rubygems.org, but you may have other sources
configured. This guide will help you update your sources or configure
yourself to use your own gem server.
Without any arguments the sources lists your currently configured sources:
$ gem sources
*** CURRENT SOURCES ***
https://rubygems.org
This may list multiple sources or non-rubygems sources. You probably
configured them before or have an old `~/.gemrc`. If you have sources you
do not recognize you should remove them.
RubyGems has been configured to serve gems via the following URLs through
its history:
* http://gems.rubyforge.org (RubyGems 1.3.6 and earlier)
* https://rubygems.org/ (RubyGems 1.3.7 through 1.8.25)
* https://rubygems.org (RubyGems 2.0.1 and newer)
Since all of these sources point to the same set of gems you only need one
of them in your list. https://rubygems.org is recommended as it brings the
protections of an SSL connection to gem downloads.
To add a source use the --add argument:
$ gem sources --add https://rubygems.org
https://rubygems.org added to sources
RubyGems will check to see if gems can be installed from the source given
before it is added.
To remove a source use the --remove argument:
$ gem sources --remove https://rubygems.org/
https://rubygems.org/ removed from sources
EOF
end
def list # :nodoc:
say "*** CURRENT SOURCES ***"
say
Gem.sources.each do |src|
say src
end
end
def list? # :nodoc:
!(options[:add] ||
options[:clear_all] ||
options[:remove] ||
options[:update])
end
def execute
clear_all if options[:clear_all]
source_uri = options[:add]
add_source source_uri if source_uri
source_uri = options[:remove]
remove_source source_uri if source_uri
update if options[:update]
list if list?
end
def remove_source(source_uri) # :nodoc:
unless Gem.sources.include? source_uri
say "source #{source_uri} not present in cache"
else
Gem.sources.delete source_uri
Gem.configuration.write
say "#{source_uri} removed from sources"
end
end
def update # :nodoc:
Gem.sources.each_source do |src|
src.load_specs :released
src.load_specs :latest
end
say "source cache successfully updated"
end
def remove_cache_file(desc, path) # :nodoc:
FileUtils.rm_rf path
if not File.exist?(path)
say "*** Removed #{desc} source cache ***"
elsif not File.writable?(path)
say "*** Unable to remove #{desc} source cache (write protected) ***"
else
say "*** Unable to remove #{desc} source cache ***"
end
end
end
| 25.792952 | 138 | 0.67105 |
18af154c6b8660cbbd81b9485dc9527bf4141627 | 3,145 | require File.dirname(__FILE__) + "/spec_helper"
require 'httperf_result_parser'
require 'httperf_result'
SAMPLE_HTTPERF_OUTPUT = <<-EOS
Maximum connect burst length: 1
Total: connections 100 requests 99 replies 98 test-duration 10.082 s
Connection rate: 9.9 conn/s (100.8 ms/conn, <=3 concurrent connections)
Connection time [ms]: min 173.3 avg 180.0 max 205.1 median 178.5 stddev 5.6
Connection time [ms]: connect 89.8
Connection length [replies/conn]: 1.000
Request rate: 9.9 req/s (100.8 ms/req)
Request size [B]: 68.0
Reply rate [replies/s]: min 9.8 avg 9.9 max 10.0 stddev 0.1 (2 samples)
Reply time [ms]: response 90.1 transfer 0.0
Reply size [B]: header 287.0 content 438.0 footer 0.0 (total 725.0)
Reply status: 1xx=0 2xx=100 3xx=0 4xx=0 5xx=1
CPU time [s]: user 1.57 system 8.50 (user 15.6% system 84.3% total 99.8%)
Net I/O: 7.7 KB/s (0.1*10^6 bps)
Errors: total 0 client-timo 0 socket-timo 0 connrefused 0 connreset 0
Errors: fd-unavail 0 addrunavail 0 ftab-full 0 other 0
EOS
describe HanselCore::HttperfResultParser, "#score" do
before(:each) do
@parser = HanselCore::HttperfResultParser.new SAMPLE_HTTPERF_OUTPUT
end
describe "when calling parse" do
before(:each) do
@httperf_result = HanselCore::HttperfResult.new(
:rate => 10,
:server => 'www.example.com',
:port => 80,
:uri => '/',
:num_conns => 100
)
@parser.parse @httperf_result
end
it "should set the passed HttperfResult object" do
@httperf_result.class.name =~ /HttperfResult/
end
describe "the HttperfResult object should initialize the" do
it "rate to 10" do
@httperf_result.rate.should == 10
end
it "server to 'www.example.com'" do
@httperf_result.server.should == 'www.example.com'
end
it "port to 80" do
@httperf_result.port.should == 80
end
it "uri to '/'" do
@httperf_result.uri.should == '/'
end
it "num_conns to 100" do
@httperf_result.num_conns.should == 100
end
it "replies to 98" do
@httperf_result.replies.should == 98
end
it "connection_rate to 9.9" do
@httperf_result.connection_rate.should == 9.9
end
it "request_rate should to 9.9" do
@httperf_result.request_rate.should == 9.9
end
it "reply_time to 90.1" do
@httperf_result.reply_time.should == 90.1
end
it "net_io to 7.7" do
@httperf_result.net_io.should == 7.7
end
it "errors to 0" do
@httperf_result.errors.should == 0
end
it "status to 1" do
@httperf_result.status.should == 1
end
it "reply_rate_min to 9.8" do
@httperf_result.reply_rate_min.should == 9.8
end
it "the reply_rate_avg to 9.9" do
@httperf_result.reply_rate_avg.should == 9.9
end
it "the reply_rate_max to 10.0" do
@httperf_result.reply_rate_max.should == 10.0
end
it "the reply_rate_stddev to 0.1" do
@httperf_result.reply_rate_stddev.should == 0.1
end
end
end
end
| 26.428571 | 75 | 0.63434 |
2141171edecc8bbdec7c666e09f6af5b0cc2627a | 105 | class AddScoresValueIndex < ActiveRecord::Migration
def change
add_index :scores, :value
end
end
| 17.5 | 51 | 0.761905 |
d56c0a664bfd28068f040426fdcba8e8c563cac6 | 9,551 | require_relative '../../spec_helper'
require 'bigdecimal'
describe "BigDecimal" do
it "is not defined unless it is required" do
ruby_exe('puts Object.const_defined?(:BigDecimal)').should == "false\n"
end
end
describe "Kernel#BigDecimal" do
it "creates a new object of class BigDecimal" do
BigDecimal("3.14159").should be_kind_of(BigDecimal)
(0..9).each {|i|
BigDecimal("1#{i}").should == 10 + i
BigDecimal("-1#{i}").should == -10 - i
BigDecimal("1E#{i}").should == 10**i
BigDecimal("1000000E-#{i}").should == 10**(6-i).to_f
# ^ to_f to avoid Rational type
}
(1..9).each {|i|
BigDecimal("100.#{i}").to_s.should =~ /\A0\.100#{i}E3\z/i
BigDecimal("-100.#{i}").to_s.should =~ /\A-0\.100#{i}E3\z/i
}
end
it "BigDecimal(Rational) with bigger-than-double numerator" do
rational = 99999999999999999999/100r
rational.numerator.should > 2**64
BigDecimal(rational, 100).to_s.should == "0.99999999999999999999e18"
end
it "accepts significant digits >= given precision" do
suppress_warning do
BigDecimal("3.1415923", 10).precs[1].should >= 10
end
end
it "determines precision from initial value" do
pi_string = "3.14159265358979323846264338327950288419716939937510582097494459230781640628620899862803482534211706798214808651328230664709384460955058223172535940812848111745028410270193852110555964462294895493038196442881097566593014782083152134043"
suppress_warning {
BigDecimal(pi_string).precs[1]
}.should >= pi_string.size-1
end
it "ignores leading and trailing whitespace" do
BigDecimal(" \t\n \r1234\t\r\n ").should == BigDecimal("1234")
BigDecimal(" \t\n \rNaN \n").should.nan?
BigDecimal(" \t\n \rInfinity \n").infinite?.should == 1
BigDecimal(" \t\n \r-Infinity \n").infinite?.should == -1
end
it "coerces the value argument with #to_str" do
initial = mock("value")
initial.should_receive(:to_str).and_return("123")
BigDecimal(initial).should == BigDecimal("123")
end
ruby_version_is ""..."2.6" do
it "ignores trailing garbage" do
BigDecimal("123E45ruby").should == BigDecimal("123E45")
BigDecimal("123x45").should == BigDecimal("123")
BigDecimal("123.4%E5").should == BigDecimal("123.4")
BigDecimal("1E2E3E4E5E").should == BigDecimal("100")
end
end
ruby_version_is "2.6" do
it "does not ignores trailing garbage" do
-> { BigDecimal("123E45ruby") }.should raise_error(ArgumentError)
-> { BigDecimal("123x45") }.should raise_error(ArgumentError)
-> { BigDecimal("123.4%E5") }.should raise_error(ArgumentError)
-> { BigDecimal("1E2E3E4E5E") }.should raise_error(ArgumentError)
end
end
it "raises ArgumentError for invalid strings" do
-> { BigDecimal("ruby") }.should raise_error(ArgumentError)
-> { BigDecimal(" \t\n \r-\t\t\tInfinity \n") }.should raise_error(ArgumentError)
end
it "allows omitting the integer part" do
BigDecimal(".123").should == BigDecimal("0.123")
end
ruby_version_is ""..."2.6" do
it "allows for underscores in all parts" do
reference = BigDecimal("12345.67E89")
BigDecimal("12_345.67E89").should == reference
BigDecimal("1_2_3_4_5_._6____7_E89").should == reference
BigDecimal("12345_.67E_8__9_").should == reference
end
end
ruby_version_is "2.6" do
it "process underscores as Float()" do
reference = BigDecimal("12345.67E89")
BigDecimal("12_345.67E89").should == reference
-> { BigDecimal("1_2_3_4_5_._6____7_E89") }.should raise_error(ArgumentError)
-> { BigDecimal("12345_.67E_8__9_") }.should raise_error(ArgumentError)
end
end
it "accepts NaN and [+-]Infinity" do
BigDecimal("NaN").should.nan?
pos_inf = BigDecimal("Infinity")
pos_inf.should_not.finite?
pos_inf.should > 0
pos_inf.should == BigDecimal("+Infinity")
neg_inf = BigDecimal("-Infinity")
neg_inf.should_not.finite?
neg_inf.should < 0
end
ruby_version_is "2.6" do
describe "with exception: false" do
it "returns nil for invalid strings" do
BigDecimal("invalid", exception: false).should be_nil
BigDecimal("0invalid", exception: false).should be_nil
BigDecimal("invalid0", exception: false).should be_nil
BigDecimal("0.", exception: false).should be_nil
end
end
end
describe "accepts NaN and [+-]Infinity as Float values" do
it "works without an explicit precision" do
BigDecimal(Float::NAN).should.nan?
pos_inf = BigDecimal(Float::INFINITY)
pos_inf.should_not.finite?
pos_inf.should > 0
pos_inf.should == BigDecimal("+Infinity")
neg_inf = BigDecimal(-Float::INFINITY)
neg_inf.should_not.finite?
neg_inf.should < 0
end
it "works with an explicit precision" do
BigDecimal(Float::NAN, Float::DIG).should.nan?
pos_inf = BigDecimal(Float::INFINITY, Float::DIG)
pos_inf.should_not.finite?
pos_inf.should > 0
pos_inf.should == BigDecimal("+Infinity")
neg_inf = BigDecimal(-Float::INFINITY, Float::DIG)
neg_inf.should_not.finite?
neg_inf.should < 0
end
end
it "allows for [eEdD] as exponent separator" do
reference = BigDecimal("12345.67E89")
BigDecimal("12345.67e89").should == reference
BigDecimal("12345.67E89").should == reference
BigDecimal("12345.67d89").should == reference
BigDecimal("12345.67D89").should == reference
end
it "allows for varying signs" do
reference = BigDecimal("123.456E1")
BigDecimal("+123.456E1").should == reference
BigDecimal("-123.456E1").should == -reference
BigDecimal("123.456E+1").should == reference
BigDecimal("12345.6E-1").should == reference
BigDecimal("+123.456E+1").should == reference
BigDecimal("+12345.6E-1").should == reference
BigDecimal("-123.456E+1").should == -reference
BigDecimal("-12345.6E-1").should == -reference
end
it "raises ArgumentError when Float is used without precision" do
-> { BigDecimal(1.0) }.should raise_error(ArgumentError)
end
it "returns appropriate BigDecimal zero for signed zero" do
BigDecimal(-0.0, Float::DIG).sign.should == -1
BigDecimal(0.0, Float::DIG).sign.should == 1
end
it "pre-coerces long integers" do
BigDecimal(3).add(1 << 50, 3).should == BigDecimal('0.113e16')
end
it "does not call to_s when calling inspect" do
value = BigDecimal('44.44')
value.to_s.should == '0.4444e2'
value.inspect.should == '0.4444e2'
ruby_exe( <<-'EOF').should == "cheese 0.4444e2"
require 'bigdecimal'
module BigDecimalOverride
def to_s; "cheese"; end
end
BigDecimal.prepend BigDecimalOverride
value = BigDecimal('44.44')
print "#{value.to_s} #{value.inspect}"
EOF
end
describe "when interacting with Rational" do
before :each do
@a = BigDecimal('166.666666666')
@b = Rational(500, 3)
@c = @a - @b
end
# Check the input is as we understand it
it "has the LHS print as expected" do
@a.to_s.should == "0.166666666666e3"
@a.to_f.to_s.should == "166.666666666"
Float(@a).to_s.should == "166.666666666"
end
it "has the RHS print as expected" do
@b.to_s.should == "500/3"
@b.to_f.to_s.should == "166.66666666666666"
Float(@b).to_s.should == "166.66666666666666"
end
it "has the expected precision on the LHS" do
suppress_warning { @a.precs[0] }.should == 18
end
it "has the expected maximum precision on the LHS" do
suppress_warning { @a.precs[1] }.should == 27
end
it "produces the expected result when done via Float" do
(Float(@a) - Float(@b)).to_s.should == "-6.666596163995564e-10"
end
it "produces the expected result when done via to_f" do
(@a.to_f - @b.to_f).to_s.should == "-6.666596163995564e-10"
end
# Check underlying methods work as we understand
it "BigDecimal precision is the number of digits rounded up to a multiple of nine" do
1.upto(100) do |n|
b = BigDecimal('4' * n)
precs, _ = suppress_warning { b.precs }
(precs >= 9).should be_true
(precs >= n).should be_true
(precs % 9).should == 0
end
suppress_warning { BigDecimal('NaN').precs[0] }.should == 9
end
it "BigDecimal maximum precision is nine more than precision except for abnormals" do
1.upto(100) do |n|
b = BigDecimal('4' * n)
precs, max = suppress_warning { b.precs }
max.should == precs + 9
end
suppress_warning { BigDecimal('NaN').precs[1] }.should == 9
end
it "BigDecimal(Rational, 18) produces the result we expect" do
BigDecimal(@b, 18).to_s.should == "0.166666666666666667e3"
end
it "BigDecimal(Rational, BigDecimal.precs[0]) produces the result we expect" do
BigDecimal(@b, suppress_warning { @a.precs[0] }).to_s.should == "0.166666666666666667e3"
end
# Check the top-level expression works as we expect
it "produces a BigDecimal" do
@c.class.should == BigDecimal
end
it "produces the expected result" do
@c.should == BigDecimal("-0.666667e-9")
@c.to_s.should == "-0.666667e-9"
end
it "produces the correct class for other arithmetic operators" do
(@a + @b).class.should == BigDecimal
(@a * @b).class.should == BigDecimal
(@a / @b).class.should == BigDecimal
(@a % @b).class.should == BigDecimal
end
end
end
| 32.376271 | 253 | 0.653439 |
8788409ed86c9ed708169de8644d784e22f33204 | 9,133 | # RFM provides easy access to FileMaker Pro data. With it, Ruby scripts can
# perform finds, read records and fields, update data, and perform scripts using
# a simple ruby-like syntax.
#
# Author:: Geoff Coffey (mailto:[email protected])
# Copyright:: Copyright (c) 2007 Six Fried Rice, LLC and Mufaddal Khumri
# License:: See MIT-LICENSE for details
#
# RFM uses the FileMaker XML API, so it requires:
# - FileMaker Server 9.0 or later
# - or FileMaker Server Advanced 7.0 or later
#
# This documentation serves as a reference to the classes in the API. For more complete
# usage documentation, see the RFM home page at http://sixfriedrice.com/wp/products/rfm/
#
# = Quick Start
#
# Rfm is a Gem. As such, any ruby file that uses it, needs to have these two lines on top:
#
# require "rubygems"
# require "rfm"
#
# (If you don't have Rfm installed, use the +gem install rfm+ command to get it.)
#
# === Get a Server
#
# Everything in Rfm starts with the Server object. You create a Server object like this:
#
# myServer = Rfm::Server.new(
# :host => "yourhost",
# :account_name => "someone",
# :pasword => "secret"
# )
#
# The Server object supports many other options, which you'll find explained in its
# documentation.
#
# Note: The account name and password are optional. You can instead provide them on
# a per-database basis (using Database::account_name and Database::password). But
# it is convenient to do it here because you often have one set of credentials
# across all databases. Also, you must provide an account_name and password if you
# want to ask the server for a list of available databases.
#
# === Get a Database
#
# Once you have a Server object, you can use it to get a Database. For example, if your
# database is called "Customers", you get it like this:
#
# myDatabase = myServer["Customers"]
#
# If you need to supply account and password info specifically for this database
# (rather than doing it at the Server level), do this:
#
# myDatabase.account_name = "someone"
# myDatabase.password = "secret"
#
# *IMPORTANT NOTE:* The account name you use to access FileMaker must have the
# +fmxml+ extended privilege. In other words, edit its privilege set and turn on
# "Access via XML Web Publishing (fmxml)" in the Extended Privileges section
# at the bottom-left of the Edit Privilege Set window. If you don't do this,
# Rfm will report that it can't log in.
#
# === Get a Layout
#
# Every action you send to FileMaker always goes through a layout. This is how Rfm knows
# which table you want to work with, and which fields on that table you care about. This
# should feel pretty familiar now:
#
# myLayout = myDatabase["Details"]
#
# You might use layouts you already have, or make new layout just for Rfm. Just remember that
# if you delete a layout, or remove a field from a layout that your Rfm code uses, the
# code will stop working.
#
# === Putting it Together
#
# Usually you don't care much about the intermediate Database object (it's a gateway object,
# if you will). So it is often easiest to combine all the above steps like this:
#
# myLayout = myServer["Customers"]["Details"]
#
# === Performing Actions
#
# The Layout object can do a lot of things (see its documentation for a full list). But
# in general, it involves records. For instance, you can find records:
#
# result = myLayout.find({"First Name" => "Bill"})
#
# That code finds everybody whose first name in Bill. All the Layout methods return an
# ResultSet object. It contains the records, as well as metadata about the fields and
# portals on the layout. Usually you'll only concern yourself with the records (and you
# can read about the others in the ResultSet documentation).
#
# ResultSet is a subclass of Array, Ruby's built in array type. So you can treate it just
# like any other array:
#
# first_record = result[0]
# a_few_records = result[3,7]
# record_count = result.size
#
# But usually you'll want to loop through them all. Because this is an array, you can use
# code that is familiar to any Ruby whiz:
#
# result.each { |record|
# # do something with record here
# }
#
# === Working with Records
#
# The records in a ResultSet are actually Record objects. They hold the actual data from
# FileMaker. Record subclasses Hash, another built in Ruby type, so you can use them like
# this:
#
# full_name = record["First Name"] + ' ' + record["Last Name"]
# info.merge(record)
# record.each_value { |value| puts value }
# if record.value?("Bill") then puts "Bill is in there somewhere"
#
# The field name serves as the hash key, so these examples get fields called First Name and
# Last Name. (Note: Unlike a typical Ruby hash, Record objects are not case sensitive. You
# can say +record["first name"]+ or +record["FIRST NAME"]+ and it will still work.)
#
# A record object has the power to save changes to itself back to the database. For example:
#
# records.each { |record|
# record["First Name"] = record["First Name"].upcase
# record.save
# }
#
# That concise code converts the First Name field to all uppercase in every record in the
# ResultSet. Note that each time you call Record::save, if the record has been modified,
# Rfm has to send an action to FileMaker. A loop like the one above will be quite slow
# across many records. There is not fast way to update lots of records at once right now,
# although you might be able to accomplish it with a FileMaker script by passing a
# parameter).
#
# === Editing and Deleting Records
#
# Any time you edit or delete a record, you *must* provide the record's internal record
# if. This is not the value in any field. Rather, it is the ID FileMaker assigns to the
# record internally. So an edit or delete is almost always a two-step process:
#
# record = myLayout.find({"Customer ID" => "1234"})[0]
# myLayout.edit(record.record_id, {"First Name" => "Steve"})
#
# The code above first finds a Customer record. It then uses the Record::record_id method
# to discover that record's internal id. That id is passed to the Layout::edit method.
# The edit method also accepts a hash of record changes. In this case, we're changing
# the value in the First Name field to "Steve".
#
# Also, note the [0] on the end of the first line. A find _always_ returns a ResultSet.
# If there's only one record, it is still in an array. This array just happens to have only
# one element. The [0] pulls out that single record.
#
# To delete a record, you would do this instead:
#
# record = myLayout.find({"Customer ID" => "1234"})[0]
# myLayout.delete(record.record_id)
#
# Finally, the Layout::find method can also find a record using its internal id:
#
# record = myLayout.find(some_id)
#
# If the parameter you pass to Layout::find is not a hash, it is converted to a string
# and assumed to be a record id.
#
# === Performing Scripts
#
# Rfm can run a script in conjunction with any other action. For example, you might want
# to find a set of records, then run a script on them all. Or you may want to run a script
# when you delete a record. Here's how:
#
# myLayout.find({"First Name" => "Bill"}, {:post_script => "Process Sales"})
#
# This code finds every record with "Bill" in the First Name field, then runs the script
# called "Process Sales." You can control when the script actually runs, as explained in
# the documentation for Common Options for the Layout class.
#
# You can also pass a parameter to the script when it runs. Here's the deal:
#
# myLayout.find(
# {"First Name" => "Bill"},
# {:post_script => ["Process Sales", "all"]}
# )
#
# This time, the text value "all" is passed to the script as a script parameter.
#
# =Notes on Rfm with Ruby on Rails
#
# Rfm is a great fit for Rails. But it isn't ActiveRecord, so you need to do things
# a little differently.
#
# === Configuration
#
# To avoid having to reconfigure your Server object in every Rails action, you
# might add a configuration hash to the environment.rb. It can include all the
# options you need to connecto to your server:
#
# RFM_CONFIG = {
# :host => "yourhost",
# :account_name => "someone",
# :password => "secret",
# :db => "Customers"
# }
#
# Then you can get a server concisely:
#
# myServer = Server.net(RFM_CONFIG)
# myServer[RFM_CONFIG[:db]]["My Layout"]...
#
# You might even want to add code to your application.rb to centralize access
# to your various layouts.
#
# === Disable ActiveRecord
#
# If you're not using any SQL database in your Rails app, you'll quickly discover
# that Rails insists on a SQL database configuration anyway. This is easy to fix.
# Just turn off ActiveRecord. In the environment.rb, find the line that starts with
# +config.frameworks+. This is where you can disable the parts of Rails you're not
# using. Uncomment the line and make it look like this:
#
# config.frameworks -= [ :active_record ]
#
# Now Rails will no longer insist on a SQL database.
$: << File.expand_path(File.dirname(__FILE__))
require 'rfm_command'
require 'rfm_util'
require 'rfm_result'
require 'rfm_factory'
require 'rfm_error' | 39.366379 | 93 | 0.717398 |
ff796f770135c804645b674bdc361481f42cc364 | 4,053 | require 'spec_helper'
describe Spree::Price, :type => :model do
describe '#amount=' do
let(:price) { Spree::Price.new }
let(:amount) { '3,0A0' }
before do
price.amount = amount
end
it 'is expected to equal to localized number' do
expect(price.amount).to eq(Spree::LocalizedNumber.parse(amount))
end
end
describe '#price' do
let(:price) { Spree::Price.new }
let(:amount) { 3000.00 }
context 'when amount is changed' do
before do
price.amount = amount
end
it 'is expected to equal to price' do
expect(price.amount).to eq(price.price)
end
end
end
describe 'validations' do
let(:variant) { stub_model Spree::Variant }
subject { Spree::Price.new variant: variant, amount: amount }
context 'when the amount is nil' do
let(:amount) { nil }
it { is_expected.to be_valid }
end
context 'when the amount is less than 0' do
let(:amount) { -1 }
it 'has 1 error_on' do
expect(subject.error_on(:amount).size).to eq(1)
end
it 'populates errors' do
subject.valid?
expect(subject.errors.messages[:amount].first).to eq 'must be greater than or equal to 0'
end
end
context 'when the amount is greater than maximum amount' do
let(:amount) { Spree::Price::MAXIMUM_AMOUNT + 1 }
it 'has 1 error_on' do
expect(subject.error_on(:amount).size).to eq(1)
end
it 'populates errors' do
subject.valid?
expect(subject.errors.messages[:amount].first).to eq "must be less than or equal to #{Spree::Price::MAXIMUM_AMOUNT}"
end
end
context 'when the amount is between 0 and the maximum amount' do
let(:amount) { Spree::Price::MAXIMUM_AMOUNT }
it { is_expected.to be_valid }
end
end
describe '#price_including_vat_for(zone)' do
let(:variant) { stub_model Spree::Variant }
let(:default_zone) { Spree::Zone.new }
let(:zone) { Spree::Zone.new }
let(:amount) { 10 }
let(:tax_category) { Spree::TaxCategory.new }
let(:price) { Spree::Price.new variant: variant, amount: amount }
let(:price_options) { { tax_zone: zone } }
subject(:price_with_vat) { price.price_including_vat_for(price_options) }
context 'when called with a non-default zone' do
before do
allow(variant).to receive(:tax_category).and_return(tax_category)
expect(price).to receive(:default_zone).at_least(:once).and_return(default_zone)
allow(price).to receive(:apply_foreign_vat?).and_return(true)
allow(price).to receive(:included_tax_amount).with(tax_zone: default_zone, tax_category: tax_category) { 0.19 }
allow(price).to receive(:included_tax_amount).with(tax_zone: zone, tax_category: tax_category) { 0.25 }
end
it "returns the correct price including another VAT to two digits" do
expect(price_with_vat).to eq(10.50)
end
end
context 'when called from the default zone' do
before do
allow(variant).to receive(:tax_category).and_return(tax_category)
expect(price).to receive(:default_zone).at_least(:once).and_return(zone)
end
it "returns the correct price" do
expect(price).to receive(:price).and_call_original
expect(price_with_vat).to eq(10.00)
end
end
context 'when no default zone is set' do
before do
allow(variant).to receive(:tax_category).and_return(tax_category)
expect(price).to receive(:default_zone).at_least(:once).and_return(nil)
end
it "returns the correct price" do
expect(price).to receive(:price).and_call_original
expect(price.price_including_vat_for(tax_zone: zone)).to eq(10.00)
end
end
end
describe '#display_price_including_vat_for(zone)' do
subject { Spree::Price.new amount: 10 }
it 'calls #price_including_vat_for' do
expect(subject).to receive(:price_including_vat_for)
subject.display_price_including_vat_for(nil)
end
end
end
| 31.418605 | 124 | 0.657538 |
01ad12523d1e584897eeeb5a920e095e2459a495 | 864 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
require File.expand_path('../shared/clone', __FILE__)
describe "Array#dup" do
it_behaves_like :array_clone, :dup # FIX: no, clone and dup are not alike
it "does not copy frozen status from the original" do
a = [1, 2, 3, 4]
b = [1, 2, 3, 4]
a.freeze
aa = a.dup
bb = b.dup
aa.frozen?.should be_false
bb.frozen?.should be_false
end
it "does not copy singleton methods" do
a = [1, 2, 3, 4]
b = [1, 2, 3, 4]
def a.a_singleton_method; end
aa = a.dup
bb = b.dup
a.respond_to?(:a_singleton_method).should be_true
b.respond_to?(:a_singleton_method).should be_false
aa.respond_to?(:a_singleton_method).should be_false
bb.respond_to?(:a_singleton_method).should be_false
end
end
| 27 | 75 | 0.666667 |
080ab00d225c450c2275e17c157b81fea261e0b0 | 453 | # == Schema Information
#
# Table name: mentions
#
# id :integer not null, primary key
# mentioner_type :string(255)
# mentioner_id :string(255)
# mentionable_type :string(255)
# mentionable_id :integer
# created_at :datetime
#
# Indexes
#
# fk_mentionables (mentionable_id,mentionable_type)
# fk_mentions (mentioner_id,mentioner_type)
#
class Mention < Socialization::ActiveRecordStores::Mention
end
| 22.65 | 59 | 0.684327 |
114b3ab5ee67cf7eac3811dde5395054f930e974 | 3,866 | module SiteAnalyzer
require 'robotstxt'
require 'open-uri'
require 'timeout'
# Create site object with all scans
class Site
attr_reader :main_url, :pages, :pages_for_scan, :max_pages, :scanned_pages
def initialize(url, max_pages = 10, use_robot_txt = false)
Stringex::Localization.default_locale = :en
@main_url = url
@pages = []
@use_robot_txt = use_robot_txt
@scanned_pages = []
@pages_for_scan = []
@max_pages = max_pages - 1
@pages << Page.new(convert_to_valid(@main_url))
scan_site!
end
# check if page blocked by robot txt
def robot_txt_allowed?(url)
if @use_robot_txt
Robotstxt.allowed?(url, '*') rescue nil
else
true
end
end
# scan pages: add page to scan, if still can scan do it, add new pages for scan from it and optimize massive of links
def scan_site!
add_pages_for_scan!
while @pages_for_scan.size > 0
page = convert_to_valid @pages_for_scan.pop
next unless page
@max_pages -= 1
add_page convert_to_valid(page)
return if @max_pages <= 0
add_pages_for_scan!
optimize_scan!
end
end
# add pages for scan array, also add bad pages to bad_pages array
def add_pages_for_scan!
@pages_for_scan = []
@bad_pages = []
@pages.each do |page|
@bad_pages << page.page_url unless page.page_a_tags
next unless page.page_a_tags
page.home_a.each do |link|
@pages_for_scan << link
end
end
end
# create Page and add to to site
def add_page(url)
unless robot_txt_allowed?(url)
@scanned_pages << url
return nil
end
page = Page.new(url)
@pages << page
@scanned_pages << url
end
# get all titles on site and return array of them
def all_titles
result = []
@pages.each do |page|
result << [page.page_url, page.all_titles] if page.page_a_tags
end
result
end
# get all meta description tags content and return it as array
def all_descriptions
result = []
@pages.each do |page|
result << [page.page_url, page.meta_desc_content] if page.page_a_tags
end
result
end
# get all h2 tags and return array of it
def all_h2
result = []
@pages.each do |page|
result << [page.page_url, page.h2_text] unless page.page_a_tags
end
result
end
# get all a tags and return array of it
def all_a
result = []
@pages.each do |page|
next unless page.page_a_tags
page.page_a_tags.compact.each do |tag|
tag[0] = '-' unless tag[0]
tag[1] = '-' unless tag[1]
tag[2] = '-' unless tag[2]
result << [page.page_url, tag[0], tag[1], tag[2]]
end
end
result.compact
end
# get all non HLU url and return array
def bad_urls
result = []
@pages.each do |page|
result << page.hlu
end
result.compact!
end
# get new array pages for scan and compact it
def optimize_scan!
@pages_for_scan = @pages_for_scan.compact.uniq
@scanned_pages = @scanned_pages.compact.uniq
@pages_for_scan -= @scanned_pages
end
# check url and try to convert it to valid, remove .jpg links, add scheme to url
def convert_to_valid(url)
return nil if url =~ /.jpg$/i
url.insert(0, @main_url.first(5)) if url.start_with? '//'
link = URI(url)
main_page = URI(@main_url)
if link && link.scheme && link.scheme.empty?
link.scheme = main_page.scheme
elsif link.nil?
return nil
end
if link.scheme =~ /^http/
request = link.to_s
else
request = nil
end
request
rescue
link
end
end
end
| 28.426471 | 121 | 0.600879 |
9198605260b2194ffb5b70ce1d77df967c78032d | 4,271 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
class Queries::WorkPackages::Columns::PropertyColumn < Queries::WorkPackages::Columns::WorkPackageColumn
def caption
WorkPackage.human_attribute_name(name)
end
class_attribute :property_columns
self.property_columns = {
id: {
sortable: "#{WorkPackage.table_name}.id",
groupable: false
},
project: {
association: 'project',
sortable: "name",
groupable: "#{WorkPackage.table_name}.project_id"
},
subject: {
sortable: "#{WorkPackage.table_name}.subject"
},
type: {
association: 'type',
sortable: "position",
groupable: "#{WorkPackage.table_name}.type_id"
},
parent: {
association: 'ancestors_relations',
default_order: 'asc',
sortable: false
},
status: {
association: 'status',
sortable: "position",
highlightable: true,
groupable: "#{WorkPackage.table_name}.status_id"
},
priority: {
association: 'priority',
sortable: "position",
default_order: 'desc',
highlightable: true,
groupable: "#{WorkPackage.table_name}.priority_id"
},
author: {
association: 'author',
sortable: %w(lastname firstname id),
groupable: "#{WorkPackage.table_name}.author_id"
},
assigned_to: {
association: 'assigned_to',
sortable: %w(lastname firstname id),
groupable: "#{WorkPackage.table_name}.assigned_to_id"
},
responsible: {
association: 'responsible',
sortable: %w(lastname firstname id),
groupable: "#{WorkPackage.table_name}.responsible_id"
},
updated_at: {
sortable: "#{WorkPackage.table_name}.updated_at",
default_order: 'desc'
},
category: {
association: 'category',
sortable: "name",
groupable: "#{WorkPackage.table_name}.category_id"
},
version: {
association: 'version',
sortable: [->(table_name = Version.table_name) { Versions::Scopes::OrderBySemverName.semver_sql(table_name) }, 'name'],
default_order: 'ASC',
null_handling: 'NULLS LAST',
groupable: "#{WorkPackage.table_name}.version_id"
},
start_date: {
sortable: "#{WorkPackage.table_name}.start_date",
null_handling: 'NULLS LAST'
},
due_date: {
highlightable: true,
sortable: "#{WorkPackage.table_name}.due_date",
null_handling: 'NULLS LAST'
},
estimated_hours: {
sortable: "#{WorkPackage.table_name}.estimated_hours",
summable: true
},
spent_hours: {
sortable: false,
summable: false
},
done_ratio: {
sortable: "#{WorkPackage.table_name}.done_ratio",
groupable: true,
if: ->(*) { !WorkPackage.done_ratio_disabled? }
},
created_at: {
sortable: "#{WorkPackage.table_name}.created_at",
default_order: 'desc'
}
}
def self.instances(_context = nil)
property_columns.map do |name, options|
next unless !options[:if] || options[:if].call
new(name, options.except(:if))
end.compact
end
end
| 30.29078 | 125 | 0.662374 |
1a9705116c94148d3ecc89259c9313fb9ebb85ba | 3,200 | class IgnitionGui4 < Formula
desc "Common libraries for robotics applications. GUI Library"
homepage "https://github.com/ignitionrobotics/ign-gui"
url "https://github.com/ignitionrobotics/ign-gui/archive/5d1428a3c90302daa73d5b94c14618fb4c25f7d4.tar.gz"
version "3.999.999~0~20200721~5d1428"
sha256 "12a6e69a90f546721fac3ff7e4a0e41705373e3f63508a82599052241573666a"
license "Apache-2.0"
head "https://github.com/ignitionrobotics/ign-gui", branch: "master"
depends_on "cmake" => [:build, :test]
depends_on "pkg-config" => [:build, :test]
depends_on "ignition-cmake2"
depends_on "ignition-common3"
depends_on "ignition-msgs6"
depends_on "ignition-plugin1"
depends_on "ignition-rendering4"
depends_on "ignition-transport9"
depends_on macos: :mojave # c++17
depends_on "qt"
depends_on "qwt"
depends_on "tinyxml2"
def install
ENV.m64
cmake_args = std_cmake_args
cmake_args << "-DQWT_WIN_INCLUDE_DIR=#{HOMEBREW_PREFIX}/lib/qwt.framework/Headers"
cmake_args << "-DQWT_WIN_LIBRARY_DIR=#{HOMEBREW_PREFIX}/lib/qwt.framework"
mkdir "build" do
system "cmake", "..", *cmake_args
system "make", "install"
end
end
test do
(testpath/"test.cpp").write <<-EOS
#include <iostream>
#ifndef Q_MOC_RUN
#include <ignition/gui/qt.h>
#include <ignition/gui/Application.hh>
#include <ignition/gui/MainWindow.hh>
#endif
//////////////////////////////////////////////////
int main(int _argc, char **_argv)
{
std::cout << "Hello, GUI!" << std::endl;
// Increase verboosity so we see all messages
ignition::common::Console::SetVerbosity(4);
// Create app
ignition::gui::Application app(_argc, _argv);
// Load plugins / config
if (!app.LoadPlugin("Publisher"))
{
return 1;
}
// Customize main window
auto win = app.findChild<ignition::gui::MainWindow *>()->QuickWindow();
win->setProperty("title", "Hello Window!");
// Run window
// app.exec();
std::cout << "After run" << std::endl;
return 0;
}
EOS
(testpath/"CMakeLists.txt").write <<-EOS
cmake_minimum_required(VERSION 3.5 FATAL_ERROR)
find_package(ignition-gui4 QUIET REQUIRED)
add_executable(test_cmake test.cpp)
target_link_libraries(test_cmake ignition-gui4::ignition-gui4)
EOS
ENV.append_path "PKG_CONFIG_PATH", Formula["qt"].opt_lib/"pkgconfig"
system "pkg-config", "ignition-gui4"
cflags = `pkg-config --cflags ignition-gui4`.split(" ")
ldflags = `pkg-config --libs ignition-gui4`.split(" ")
system ENV.cc, "test.cpp",
*cflags,
*ldflags,
"-lc++",
"-o", "test"
ENV["IGN_PARTITION"] = rand((1 << 32) - 1).to_s
system "./test"
# test building with cmake
ENV.append_path "CMAKE_PREFIX_PATH", Formula["qt"].opt_prefix
mkdir "build" do
system "cmake", ".."
system "make"
system "./test_cmake"
end
# check for Xcode frameworks in bottle
cmd_not_grep_xcode = "! grep -rnI 'Applications[/]Xcode' #{prefix}"
system cmd_not_grep_xcode
end
end
| 30.47619 | 107 | 0.635938 |
bb9739da5c2acd1904f04683ea734188306076cd | 1,902 | require 'coding_dojo_2'
describe Potter do
it "costs nothing for an empty basket" do
subject.calculate([]).should == 0
end
it "should cost 8 for 1 book" do
subject.calculate([1]).should == 8
subject.calculate([2]).should == 8
subject.calculate([3]).should == 8
subject.calculate([4]).should == 8
subject.calculate([5]).should == 8
end
it "should discount nothing for identical books" do
subject.calculate([1, 1]).should == 8 * 2
subject.calculate([2, 2, 2]).should == 8 * 3
end
it "should discount 5% for 2 different books" do
subject.calculate([1, 2]).should == 8 * 2 * 0.95
end
it "should discount 10% for 3 different books" do
subject.calculate([1, 3, 5]).should == 8 * 3 * 0.9
end
it "should discount 20% for 4 different books" do
subject.calculate([1, 2, 3, 5]).should == 8 * 4 * 0.8
end
it "should discount 25% for 5 different books" do
subject.calculate([1, 2, 3, 4, 5]).should == 8 * 5 * 0.75
end
it "should discount 5% for 2 different books out of 3" do
subject.calculate([1, 1, 2]).should == 8 + (8 * 2 * 0.95)
end
# [1, 1, 2, 2] -> [[1, 2] [1, 2] ]
it "should discount 5% for 2 pairs of different books" do
subject.calculate([1, 1, 2, 2]).should == 2 * (8 * 2 * 0.95)
end
it "should discount 5% and 20% for 2 and 4 different books" do
subject.calculate([1, 1, 2, 3, 3, 4])
.should == (8 * 2 * 0.95) + (8 * 4 * 0.8)
end
it "should discount 25% and nothing for 5 and 1 different books" do
subject.calculate([1, 2, 2, 3, 4, 5])
.should == 8 + (8 * 5 * 0.75)
end
it "should be clever about grouping different books" do
subject.calculate([1, 1, 2, 2, 3, 3, 4, 5])
.should == 2 * (8 * 4 * 0.8)
subject.calculate([1, 1, 1, 1, 1 ,2, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5])
.should == 3 * (8 * 5 * 0.75) + 2 * (8 * 4 * 0.8)
end
end
| 30.190476 | 92 | 0.57939 |
013038c772060b2351ec2f62c1300d3141d58319 | 148 | class AddRecommendedToRatings < ActiveRecord::Migration[5.2]
def change
add_column :ratings, :recommended, :boolean, default: false
end
end
| 24.666667 | 63 | 0.763514 |
d5ca72977ceda748541f1dba40beea6031895268 | 6,217 | # frozen_string_literal: true
require 'beaker-rspec/spec_helper'
require 'beaker-rspec/helpers/serverspec'
require 'beaker/puppet_install_helper'
require 'rspec/retry'
begin
require 'pry'
rescue LoadError # rubocop:disable Lint/HandleExceptions for optional loading
end
# This method allows a block to be passed in and if an exception is raised
# that matches the 'error_matcher' matcher, the block will wait a set number
# of seconds before retrying.
# Params:
# - max_retry_count - Max number of retries
# - retry_wait_interval_secs - Number of seconds to wait before retry
# - error_matcher - Matcher which the exception raised must match to allow retry
# Example Usage:
# retry_on_error_matching(3, 5, /OpenGPG Error/) do
# apply_manifest(pp, :catch_failures => true)
# end
def retry_on_error_matching(max_retry_count = 3, retry_wait_interval_secs = 5, error_matcher = nil)
try = 0
begin
try += 1
yield
rescue StandardError => e
raise unless try < max_retry_count && (error_matcher.nil? || e.message =~ error_matcher)
sleep retry_wait_interval_secs
retry
end
end
run_puppet_install_helper unless ENV['BEAKER_provision'] == 'no'
RSpec.configure do |c|
# Add exclusive filter for Windows untill all the windows functionality is implemented
c.filter_run_excluding win_broken: true
# Project root
proj_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
# Readable test descriptions
c.formatter = :documentation
# show retry status in spec process
c.verbose_retry = true
# show exception that triggers a retry if verbose_retry is set to true
c.display_try_failure_messages = true
# Configure all nodes in nodeset
c.before :suite do
# Install module and dependencies
hosts.each do |host|
next unless not_controller(host)
copy_module_to(host, source: proj_root, module_name: 'docker')
# Due to RE-6764, running yum update renders the machine unable to install
# other software. Thus this workaround.
if fact_on(host, 'operatingsystem') == 'RedHat'
on(host, 'mv /etc/yum.repos.d/redhat.repo /etc/yum.repos.d/internal-mirror.repo')
on(host, 'rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm')
end
on(host, 'yum update -y -q') if fact_on(host, 'osfamily') == 'RedHat'
on host, puppet('module', 'install', 'puppetlabs-stdlib', '--version', '4.24.0'), acceptable_exit_codes: [0, 1]
on host, puppet('module', 'install', 'puppetlabs-apt', '--version', '4.4.1'), acceptable_exit_codes: [0, 1]
on host, puppet('module', 'install', 'puppetlabs-translate', '--version', '1.0.0'), acceptable_exit_codes: [0, 1]
on host, puppet('module', 'install', 'puppetlabs-powershell', '--version', '2.1.5'), acceptable_exit_codes: [0, 1]
on host, puppet('module', 'install', 'puppetlabs-reboot', '--version', '2.0.0'), acceptable_exit_codes: [0, 1]
# net-tools required for netstat utility being used by some tests
if fact_on(host, 'osfamily') == 'RedHat' && fact_on(host, 'operatingsystemmajrelease') == '7'
on(host, 'yum install -y net-tools device-mapper')
end
if fact_on(host, 'operatingsystem') == 'Debian'
on(host, 'apt-get install net-tools')
end
docker_compose_content_v3 = <<-EOS
version: "3.4"
x-images:
&default-image
alpine:3.8
services:
compose_test:
image: *default-image
command: /bin/sh -c "while true; do echo hello world; sleep 1; done"
EOS
docker_compose_override_v3 = <<-EOS
version: "3.4"
x-images:
&default-image
debian:stable-slim
services:
compose_test:
image: *default-image
command: /bin/sh -c "while true; do echo hello world; sleep 1; done"
EOS
docker_stack_override_v3 = <<-EOS
version: "3.4"
x-images:
&default-image
debian:stable-slim
services:
compose_test:
image: *default-image
command: /bin/sh -c "while true; do echo hello world; sleep 1; done"
EOS
docker_compose_content_v3_windows = <<-EOS
version: "3"
services:
compose_test:
image: hello-world:nanoserver
command: cmd.exe /C "ping 8.8.8.8 -t"
networks:
default:
external:
name: nat
EOS
docker_compose_override_v3_windows = <<-EOS
version: "3"
services:
compose_test:
image: hello-world:nanoserver-sac2016
command: cmd.exe /C "ping 8.8.8.8 -t"
networks:
default:
external:
name: nat
EOS
docker_stack_content_windows = <<-EOS
version: "3"
services:
compose_test:
image: hello-world:nanoserver
command: cmd.exe /C "ping 8.8.8.8 -t"
EOS
docker_stack_override_windows = <<-EOS
version: "3"
services:
compose_test:
image: hello-world:nanoserver-sac2016
EOS
if fact_on(host, 'osfamily') == 'windows'
create_remote_file(host, '/tmp/docker-compose-v3.yml', docker_compose_content_v3_windows)
create_remote_file(host, '/tmp/docker-stack.yml', docker_stack_content_windows)
create_remote_file(host, '/tmp/docker-compose-override-v3.yml', docker_compose_override_v3_windows)
create_remote_file(host, '/tmp/docker-stack-override.yml', docker_stack_override_windows)
else
create_remote_file(host, '/tmp/docker-compose-v3.yml', docker_compose_content_v3)
create_remote_file(host, '/tmp/docker-stack.yml', docker_compose_content_v3)
create_remote_file(host, '/tmp/docker-compose-override-v3.yml', docker_compose_override_v3)
create_remote_file(host, '/tmp/docker-stack-override.yml', docker_stack_override_v3)
end
next unless fact_on(host, 'osfamily') == 'windows'
win_host = only_host_with_role(hosts, 'default')
retry_on_error_matching(60, 5, %r{connection failure running}) do
@windows_ip = win_host.ip
end
apply_manifest_on(host, "class { 'docker': docker_ee => true, extra_parameters => '\"insecure-registries\": [ \"#{@windows_ip}:5000\" ]' }")
docker_path = '/cygdrive/c/Program Files/Docker'
host.add_env_var('PATH', docker_path)
host.add_env_var('TEMP', 'C:\Users\Administrator\AppData\Local\Temp')
puts 'Waiting for box to come online'
sleep 300
end
end
end
| 36.145349 | 146 | 0.695673 |
1da69659375a034bda490cf23f3dc708920b00b0 | 879 | # frozen_string_literal: true
module Stupidedi
module Versions
module FunctionalGroups
module FiftyTen
module SegmentDefs
s = Schema
e = ElementDefs
r = ElementReqs
HLH = s::SegmentDef.build(:HLH, "Health Information",
"To provide health information",
e::E1212.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E65 .simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E81 .simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E81 .simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E352 .simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E1213.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E352 .simple_use(r::Optional, s::RepeatCount.bounded(1)))
end
end
end
end
end
| 31.392857 | 72 | 0.59727 |
ab081b278991a156c9dc16167ec838bed5318ffb | 179 | class AddLastSeenIpAddressToEnhancedSecurityTokens < ActiveRecord::Migration
def change
add_column :users_enhanced_security_tokens, :last_seen_ip_address, :string
end
end
| 29.833333 | 78 | 0.843575 |
18227156f4e8404b612526fd02f600e196a072e0 | 6,960 | require "spec_helper"
require "parallel_tests/rspec/runner"
describe ParallelTests::RSpec::Runner do
test_tests_in_groups(ParallelTests::RSpec::Runner, 'spec', '_spec.rb')
describe :run_tests do
before do
File.stub!(:file?).with('script/spec').and_return false
File.stub!(:file?).with('spec/spec.opts').and_return false
File.stub!(:file?).with('spec/parallel_spec.opts').and_return false
File.stub!(:file?).with('.rspec_parallel').and_return false
ParallelTests.stub!(:bundler_enabled?).and_return false
end
def call(*args)
ParallelTests::RSpec::Runner.run_tests(*args)
end
def should_run_with(regex)
ParallelTests::Test::Runner.should_receive(:execute_command).with{|a,b,c,d| a =~ regex}
end
def should_not_run_with(regex)
ParallelTests::Test::Runner.should_receive(:execute_command).with{|a,b,c,d| a !~ regex}
end
it "runs command using nice when specifed" do
ParallelTests::Test::Runner.should_receive(:execute_command_and_capture_output).with{|a,b,c| b =~ %r{^nice rspec}}
call('xxx', 1, 22, :nice => true)
end
it "runs with color when called from cmdline" do
should_run_with %r{ --tty}
$stdout.should_receive(:tty?).and_return true
call('xxx', 1, 22, {})
end
it "runs without color when not called from cmdline" do
should_not_run_with %r{ --tty}
$stdout.should_receive(:tty?).and_return false
call('xxx', 1, 22, {})
end
it "runs with color for rspec 1 when called for the cmdline" do
File.should_receive(:file?).with('script/spec').and_return true
ParallelTests::Test::Runner.should_receive(:execute_command).with { |a, b, c, d| d[:env] == {"RSPEC_COLOR" => "1"} }
$stdout.should_receive(:tty?).and_return true
call('xxx', 1, 22, {})
end
it "runs without color for rspec 1 when not called for the cmdline" do
File.should_receive(:file?).with('script/spec').and_return true
ParallelTests::Test::Runner.should_receive(:execute_command).with { |a, b, c, d| d[:env] == {} }
$stdout.should_receive(:tty?).and_return false
call('xxx', 1, 22, {})
end
it "run bundle exec spec when on bundler rspec 1" do
File.stub!(:file?).with('script/spec').and_return false
ParallelTests.stub!(:bundler_enabled?).and_return true
ParallelTests::RSpec::Runner.stub!(:run).with("bundle show rspec-core").and_return "Could not find gem 'rspec-core' in bundler."
should_run_with %r{bundle exec spec}
call('xxx', 1, 22, {})
end
it "run bundle exec rspec when on bundler rspec 2" do
File.stub!(:file?).with('script/spec').and_return false
ParallelTests.stub!(:bundler_enabled?).and_return true
ParallelTests::RSpec::Runner.stub!(:run).with("bundle show rspec-core").and_return "/foo/bar/rspec-core-2.0.2"
should_run_with %r{bundle exec rspec}
call('xxx', 1, 22, {})
end
it "runs script/spec when script/spec can be found" do
File.should_receive(:file?).with('script/spec').and_return true
should_run_with %r{script/spec}
call('xxx' ,1, 22, {})
end
it "runs spec when script/spec cannot be found" do
File.stub!(:file?).with('script/spec').and_return false
should_not_run_with %r{ script/spec}
call('xxx', 1, 22, {})
end
it "uses bin/rspec when present" do
File.stub(:exists?).with('bin/rspec').and_return true
should_run_with %r{bin/rspec}
call('xxx', 1, 22, {})
end
it "uses no -O when no opts where found" do
File.stub!(:file?).with('spec/spec.opts').and_return false
should_not_run_with %r{spec/spec.opts}
call('xxx', 1, 22, {})
end
it "uses -O spec/spec.opts when found (with script/spec)" do
File.stub!(:file?).with('script/spec').and_return true
File.stub!(:file?).with('spec/spec.opts').and_return true
should_run_with %r{script/spec\s+-O spec/spec.opts}
call('xxx', 1, 22, {})
end
it "uses -O spec/parallel_spec.opts when found (with script/spec)" do
File.stub!(:file?).with('script/spec').and_return true
File.should_receive(:file?).with('spec/parallel_spec.opts').and_return true
should_run_with %r{script/spec\s+-O spec/parallel_spec.opts}
call('xxx', 1, 22, {})
end
it "uses -O .rspec_parallel when found (with script/spec)" do
File.stub!(:file?).with('script/spec').and_return true
File.should_receive(:file?).with('.rspec_parallel').and_return true
should_run_with %r{script/spec\s+-O .rspec_parallel}
call('xxx', 1, 22, {})
end
it "uses -O spec/parallel_spec.opts with rspec1" do
File.should_receive(:file?).with('spec/parallel_spec.opts').and_return true
ParallelTests.stub!(:bundler_enabled?).and_return true
ParallelTests::RSpec::Runner.stub!(:run).with("bundle show rspec-core").and_return "Could not find gem 'rspec-core'."
should_run_with %r{spec\s+-O spec/parallel_spec.opts}
call('xxx', 1, 22, {})
end
it "uses -O spec/parallel_spec.opts with rspec2" do
pending if RUBY_PLATFORM == "java" # FIXME not sure why, but fails on travis
File.should_receive(:file?).with('spec/parallel_spec.opts').and_return true
ParallelTests.stub!(:bundler_enabled?).and_return true
ParallelTests::RSpec::Runner.stub!(:run).with("bundle show rspec-core").and_return "/foo/bar/rspec-core-2.4.2"
should_run_with %r{rspec\s+--color --tty -O spec/parallel_spec.opts}
call('xxx', 1, 22, {})
end
it "uses options passed in" do
should_run_with %r{rspec -f n}
call('xxx', 1, 22, :test_options => '-f n')
end
it "returns the output" do
ParallelTests::RSpec::Runner.should_receive(:execute_command).and_return :x => 1
call('xxx', 1, 22, {}).should == {:x => 1}
end
end
describe :find_results do
def call(*args)
ParallelTests::RSpec::Runner.find_results(*args)
end
it "finds multiple results in spec output" do
output = "
....F...
..
failute fsddsfsd
...
ff.**..
0 examples, 0 failures, 0 pending
ff.**..
1 example, 1 failure, 1 pending
"
call(output).should == ['0 examples, 0 failures, 0 pending','1 example, 1 failure, 1 pending']
end
it "is robust against scrambeled output" do
output = "
....F...
..
failute fsddsfsd
...
ff.**..
0 exFampl*es, 0 failures, 0 pend.ing
ff.**..
1 exampF.les, 1 failures, 1 pend.ing
"
call(output).should == ['0 examples, 0 failures, 0 pending','1 examples, 1 failures, 1 pending']
end
end
describe ".find_tests" do
def call(*args)
ParallelTests::RSpec::Runner.send(:find_tests, *args)
end
it "doesn't find bakup files with the same name as test files" do
with_files(['a/x_spec.rb','a/x_spec.rb.bak']) do |root|
call(["#{root}/"]).should == [
"#{root}/a/x_spec.rb",
]
end
end
end
end
| 34.455446 | 134 | 0.648276 |
611b572d82cd0197cdfcdf56290c1f034b51cf7b | 288 | # frozen_string_literal: true
require 'test_helper'
class ViewRenderingTest < ActionDispatch::IntegrationTest
setup do
$view_engine_rendered = false
end
test 'views in inactive engines are never be rendered' do
visit '/greeting'
refute $view_engine_rendered
end
end
| 20.571429 | 59 | 0.763889 |
4ae275d8e5052439faccedc3092e7a76e5df0091 | 955 | # -*- encoding: utf-8 -*-
# stub: ethon 0.11.0 ruby lib
Gem::Specification.new do |s|
s.name = "ethon"
s.version = "0.11.0"
s.required_rubygems_version = Gem::Requirement.new(">= 1.3.6") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Hans Hasselberg"]
s.date = "2017-10-26"
s.description = "Very lightweight libcurl wrapper."
s.email = ["[email protected]"]
s.homepage = "https://github.com/typhoeus/ethon"
s.licenses = ["MIT"]
s.rubygems_version = "2.5.2.1"
s.summary = "Libcurl wrapper."
s.installed_by_version = "2.5.2.1" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<ffi>, [">= 1.3.0"])
else
s.add_dependency(%q<ffi>, [">= 1.3.0"])
end
else
s.add_dependency(%q<ffi>, [">= 1.3.0"])
end
end
| 28.939394 | 109 | 0.641885 |
0307a49528ad8cd5084776ba284c0a03f8e7fc00 | 1,829 | class Flutter < Formula
require 'json'
desc "Homebrew shell for Flutter"
homepage "https://flutter.io"
stable do
url "https://github.com/flutter/flutter.git", :branch => "stable"
version "stable"
end
devel do
url "https://github.com/flutter/flutter.git", :branch => "master"
version "dev"
end
bottle :unneeded
def install
system "./bin/flutter"
allfiles = File.join(buildpath, "**", "{*,.*}")
mv Dir.glob(allfiles), Dir.glob(prefix), :force => true
end
def post_install
rm File.join(HOMEBREW_PREFIX, "bin", "flutter.bat")
chmod_R "+rwx", File.join(prefix, "bin")
end
def caveats
<<~EOS
Remove the proxy settings for command-line before you begin.
Run the following command to install stable channel:
brew install flutter
Run the following command to install dev channel:
brew install --devel flutter
If you want to change channel,please run the following command:
brew uninstall --force flutter
rm -rf "$(brew --cache)/flutter--git"
brew install (--devel) flutter
If you're located in China, please follow:
https://github.com/flutter/flutter/wiki/Using-Flutter-in-China
After installed , please set `PUB_HOSTED_URL` & `FLUTTER_STORAGE_BASE_URL`
You may wish to add the flutter-ROOT install location to your PATH:
echo 'export PATH="/usr/local/opt/flutter/bin:$PATH"' >> ~/.zshrc
You can use the following command to show flutter version:
flutter --version
Run the following command to see if there are any platform dependencies you need to complete the setup:
flutter doctor
Run the following command to upgrade flutter:
brew reinstall (--devel) flutter
EOS
end
test do
system "false"
end
end
| 27.298507 | 109 | 0.662657 |
110e817fd58322823355cb6dd4b2e0f4c484b43c | 1,029 | describe ManageIQ::Providers::Google::CloudManager::Vm do
context "#is_available?" do
let(:ems) { FactoryBot.create(:ems_google) }
let(:vm) { FactoryBot.create(:vm_google, :ext_management_system => ems) }
let(:power_state_on) { "running" }
let(:power_state_suspended) { "pending" }
context("with :start") do
let(:state) { :start }
include_examples "Vm operation is available when not powered on"
end
context("with :stop") do
let(:state) { :stop }
include_examples "Vm operation is available when powered on"
end
context("with :shutdown_guest") do
let(:state) { :shutdown_guest }
include_examples "Vm operation is not available"
end
context("with :standby_guest") do
let(:state) { :standby_guest }
include_examples "Vm operation is not available"
end
context("with :reset") do
let(:state) { :reset }
include_examples "Vm operation is not available"
end
end
end
| 30.264706 | 96 | 0.626822 |
18b4275b3acfc2481d0659da131dca40c463057c | 7,687 | =begin
#Hydrogen Atom API
#The Hydrogen Atom API
OpenAPI spec version: 1.7.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.14
=end
require 'date'
module NucleusApi
# Questionnaire Object
class Questionnaire
attr_accessor :create_date
# decision_tree_id
attr_accessor :decision_tree_id
# description
attr_accessor :description
attr_accessor :id
# is_active
attr_accessor :is_active
# name
attr_accessor :name
attr_accessor :questions
attr_accessor :secondary_id
# type
attr_accessor :type
attr_accessor :update_date
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'create_date' => :'create_date',
:'decision_tree_id' => :'decision_tree_id',
:'description' => :'description',
:'id' => :'id',
:'is_active' => :'is_active',
:'name' => :'name',
:'questions' => :'questions',
:'secondary_id' => :'secondary_id',
:'type' => :'type',
:'update_date' => :'update_date'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'create_date' => :'DateTime',
:'decision_tree_id' => :'String',
:'description' => :'String',
:'id' => :'String',
:'is_active' => :'BOOLEAN',
:'name' => :'String',
:'questions' => :'Array<Question>',
:'secondary_id' => :'String',
:'type' => :'String',
:'update_date' => :'DateTime'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'create_date')
self.create_date = attributes[:'create_date']
end
if attributes.has_key?(:'decision_tree_id')
self.decision_tree_id = attributes[:'decision_tree_id']
end
if attributes.has_key?(:'description')
self.description = attributes[:'description']
end
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'is_active')
self.is_active = attributes[:'is_active']
end
if attributes.has_key?(:'name')
self.name = attributes[:'name']
end
if attributes.has_key?(:'questions')
if (value = attributes[:'questions']).is_a?(Array)
self.questions = value
end
end
if attributes.has_key?(:'secondary_id')
self.secondary_id = attributes[:'secondary_id']
end
if attributes.has_key?(:'type')
self.type = attributes[:'type']
end
if attributes.has_key?(:'update_date')
self.update_date = attributes[:'update_date']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @name.nil?
invalid_properties.push('invalid value for "name", name cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @name.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
create_date == o.create_date &&
decision_tree_id == o.decision_tree_id &&
description == o.description &&
id == o.id &&
is_active == o.is_active &&
name == o.name &&
questions == o.questions &&
secondary_id == o.secondary_id &&
type == o.type &&
update_date == o.update_date
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[create_date, decision_tree_id, description, id, is_active, name, questions, secondary_id, type, update_date].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
elsif attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", attributes[self.class.attribute_map[key]])
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
value
when :Date
value
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NucleusApi.const_get(type).new
temp_model.build_from_hash(value)
end
end# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 27.851449 | 120 | 0.614284 |
012763f5de07d238df117d49dca96f5be97b0edc | 396 | # Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :phone do
number '650 851-1210'
number_type 'voice'
extension '200'
end
factory :phone_with_extra_whitespace, class: Phone do
country_prefix '33 '
number '650 851-1210 '
department ' Information '
extension '2000 '
vanity_number ' 800-FLY-AWAY '
end
end
| 22 | 68 | 0.699495 |
5d16268f82a773d4f166a35b11a85817c3b8f8a8 | 15,178 | # frozen_string_literal: true
require_relative "../spec_helper"
describe SpartanAPM::Persistence do
def sample_stats(app, action, additional_stats)
stats = []
(1..100).each do |value|
measure = SpartanAPM::Measure.new(app, action)
stats << measure
additional_stats.keys.each do |name|
measure.timers[name] = value.to_f / 1000.0
measure.counts[name] = (value / 10) + 1
end
end
measure = SpartanAPM::Measure.new(app, action)
stats << measure
additional_stats.each do |name, value|
measure.timers[name] = value.to_f / 1000.0
measure.counts[name] = (value / 10) + 1
end
stats.shuffle
end
describe "store!" do
it "should store aggregated statistics for both specific contexts and the root level contexts" do
time = Time.now
sample = (1..100).to_a.shuffle
bucket_1 = SpartanAPM.bucket(time - 120)
bucket_2 = SpartanAPM.bucket(time - 60)
bucket_3 = SpartanAPM.bucket(time)
bucket_1_web_1_measures = sample_stats("web", "context_1", "stat_1" => 500, "stat_2" => 200)
bucket_1_web_1_measures.last.capture_error(ArgumentError.new)
bucket_1_async_1_measures = sample_stats("async", "context_1", "stat_1" => 300, "stat_2" => 400)
bucket_2_web_1_measures = sample_stats("web", "context_1", "stat_1" => 120, "stat_2" => 220)
bucket_2_async_1_measures = sample_stats("async", "context_1", "stat_1" => 320, "stat_2" => 420)
bucket_3_web_1_measures = sample_stats("web", "context_1", "stat_1" => 140, "stat_2" => 240)
bucket_3_async_1_measures = sample_stats("async", "context_1", "stat_1" => 340, "stat_2" => 440)
bucket_1_web_2_measures = sample_stats("web", "context_2", "stat_1" => 160, "stat_2" => 260)
bucket_2_async_2_measures = sample_stats("async", "context_2", "stat_1" => 360, "stat_2" => 460)
bucket_1_web_1_other_host_measures = sample_stats("web", "context_1", "stat_1" => 170, "stat_2" => 270)
bucket_1_web_1_other_host_measures.last.capture_error(ArgumentError.new)
web = SpartanAPM::Persistence.new("web")
async = SpartanAPM::Persistence.new("async")
begin
allow(SpartanAPM).to receive(:host).and_return("testhost_1")
SpartanAPM::Persistence.store!(bucket_1, bucket_1_web_1_measures + bucket_1_async_1_measures + bucket_1_web_2_measures)
SpartanAPM::Persistence.store!(bucket_2, bucket_2_web_1_measures + bucket_2_async_1_measures + bucket_2_async_2_measures)
SpartanAPM::Persistence.store!(bucket_3, bucket_3_web_1_measures + bucket_3_async_1_measures)
allow(SpartanAPM).to receive(:host).and_return("testhost_2")
SpartanAPM::Persistence.store!(bucket_1, bucket_1_web_1_other_host_measures)
SpartanAPM::Persistence.store!(bucket_1, bucket_1_web_1_measures)
web_1_metrics = web.metrics([time - 120, time], action: "context_1")
web_2_metrics = web.metrics([time - 120, time], action: "context_2")
async_1_metrics = async.metrics([time - 120, time], action: "context_1")
async_2_metrics = async.metrics([time - 120, time], action: "context_2")
async_all_metrics = async.metrics([time - 120, time])
web_all_metrics = web.metrics([time - 120, time])
web_1_partial_metrics = web.metrics([time - 60, time], action: "context_1")
web_1_testhost_2_metrics = web.metrics([time - 120, time], action: "context_1", host: "testhost_2")
expect(web_1_metrics.collect(&:time)).to eq [Time.at(bucket_1 * 60), Time.at(bucket_2 * 60), Time.at(bucket_3 * 60)]
expect(web_1_metrics.collect(&:count)).to eq [303, 101, 101]
expect(web_1_metrics.collect(&:avg)).to eq [106, 103, 104]
expect(web_1_metrics.collect(&:p50)).to eq [102, 102, 102]
expect(web_1_metrics.collect(&:p90)).to eq [182, 182, 182]
expect(web_1_metrics.collect(&:p99)).to eq [200, 200, 200]
expect(web_1_metrics.collect(&:error_count)).to eq [3, 0, 0]
expect(web_1_metrics.collect { |m| m.component_request_time(:stat_1) }).to eq [54, 51, 51]
expect(web_1_metrics.collect { |m| m.component_request_time(:stat_2) }).to eq [52, 52, 52]
expect(web_2_metrics.collect(&:time)).to eq [Time.at(bucket_1 * 60)]
expect(web_2_metrics.collect(&:count)).to eq [101]
expect(async_1_metrics.collect(&:time)).to eq [Time.at(bucket_1 * 60), Time.at(bucket_2 * 60), Time.at(bucket_3 * 60)]
expect(async_1_metrics.collect(&:count)).to eq [101, 101, 101]
expect(async_2_metrics.collect(&:time)).to eq [Time.at(bucket_2 * 60)]
expect(async_2_metrics.collect(&:count)).to eq [101]
expect(async_all_metrics.collect(&:time)).to eq [Time.at(bucket_1 * 60), Time.at(bucket_2 * 60), Time.at(bucket_3 * 60)]
expect(async_all_metrics.collect(&:count)).to eq [101, 202, 101]
expect(async_all_metrics.collect(&:avg)).to eq [107, 108, 108]
expect(async_all_metrics.collect(&:p50)).to eq [102, 102, 102]
expect(async_all_metrics.collect(&:p90)).to eq [182, 182, 182]
expect(async_all_metrics.collect(&:p99)).to eq [200, 200, 200]
expect(web_all_metrics.collect(&:time)).to eq [Time.at(bucket_1 * 60), Time.at(bucket_2 * 60), Time.at(bucket_3 * 60)]
expect(web_all_metrics.collect(&:count)).to eq [404, 101, 101]
expect(web_all_metrics.collect(&:avg)).to eq [119, 103, 104]
expect(web_all_metrics.collect(&:p50)).to eq [102, 102, 102]
expect(web_all_metrics.collect(&:p90)).to eq [182, 182, 182]
expect(web_all_metrics.collect(&:p99)).to eq [200, 200, 200]
expect(web_1_partial_metrics.collect(&:time)).to eq [Time.at(bucket_2 * 60), Time.at(bucket_3 * 60)]
expect(web_1_partial_metrics.collect(&:count)).to eq [101, 101]
expect(web_1_testhost_2_metrics.collect(&:time)).to eq [Time.at(bucket_1 * 60)]
expect(web_1_testhost_2_metrics.collect(&:count)).to eq [202]
ensure
web.clear!([time - 120, time])
async.clear!([time - 120, time])
end
end
it "should estimate the counts if the measures are being sampled"
it "should store stats for the last hour", freeze_time: true do
app = SpartanAPM::Persistence.new("app")
base_time = Time.utc(2022, 2, 22, 12, 0)
app.delete_hourly_stats!
begin
61.times do |minute|
measure = SpartanAPM::Measure.new("app", "action")
measure.capture_time("app", 0.01 * (minute + 1))
measure.capture_time("database", 0.02 * (minute + 1))
measure.capture_error(ArgumentError.new) if minute % 4 == 0
bucket = SpartanAPM.bucket(base_time + (minute * 60))
SpartanAPM::Persistence.store!(bucket, [measure])
end
metrics = app.hourly_metrics(base_time)
expect(metrics.size).to eq 1
metric = metrics.first
expect(metric.time).to eq base_time
expect(metric.count).to eq 60
expect(metric.error_count).to eq 15
expect(metric.components).to eq({"app" => [305, 1.0], "database" => [610, 1.0]})
expect(metric.avg).to eq 915
expect(metric.p50).to eq 915
expect(metric.p90).to eq 915
expect(metric.p99).to eq 915
ensure
app.clear!([base_time, base_time + (61 * 60)])
end
end
it "should truncate actions when storing hourly stats", freeze_time: true do
app = SpartanAPM::Persistence.new("app")
base_time = Time.utc(2022, 2, 22, 12, 0)
app.delete_hourly_stats!
save_val = SpartanAPM.max_actions
begin
SpartanAPM.max_actions = 2
3.times do |i|
measure = SpartanAPM::Measure.new("app", "action#{i}")
measure.capture_time("app", 0.01 * (i + 1))
measure.capture_time("database", 0.02 * (i + 1))
bucket = SpartanAPM.bucket(base_time)
SpartanAPM::Persistence.store!(bucket, [measure])
end
SpartanAPM::Persistence.store!(SpartanAPM.bucket(base_time + (60 * 60)), [SpartanAPM::Measure.new("app", "action0")])
expect(app.metrics([base_time, base_time + (60 * 60)], action: "action0").size).to eq 0
expect(app.metrics([base_time, base_time + (60 * 60)], action: "action1").size).to eq 1
expect(app.metrics([base_time, base_time + (60 * 60)], action: "action2").size).to eq 1
expect(app.metrics([base_time, base_time + (60 * 60)]).size).to eq 1
ensure
SpartanAPM.max_actions = save_val
app.clear!([base_time, base_time + (61 * 60)])
end
end
it "should store stats for the last day", freeze_time: true do
app = SpartanAPM::Persistence.new("app")
base_time = Time.utc(2022, 2, 22, 0, 0)
app.delete_hourly_stats!
app.delete_daily_stats!
begin
25.times do |hour|
measure = SpartanAPM::Measure.new("app", "action")
measure.capture_time("app", 0.01 * (hour + 1))
measure.capture_time("database", 0.02 * (hour + 1))
measure.capture_error(ArgumentError.new) if hour % 4 == 0
bucket = SpartanAPM.bucket(base_time + (hour * 60 * 60))
SpartanAPM::Persistence.store!(bucket, [measure])
end
metrics = app.daily_metrics(base_time)
expect(metrics.size).to eq 1
metric = metrics.first
expect(metric.time).to eq base_time
expect(metric.count).to eq 24
expect(metric.error_count).to eq 6
expect(metric.components.collect { |k, vals| [k, vals.collect { |v| v.round(3) }] }).to eq({"app" => [125, 0.017], "database" => [250, 0.017]}.to_a)
expect(metric.avg).to eq 375
expect(metric.p50).to eq 375
expect(metric.p90).to eq 375
expect(metric.p99).to eq 375
ensure
app.clear!([base_time, base_time + (25 * 60 * 60)])
end
end
end
describe "hosts" do
it "should get a list of all hosts" do
measure_1 = SpartanAPM::Measure.new("app", "action_1")
measure_2 = SpartanAPM::Measure.new("app", "action_2")
measure_3 = SpartanAPM::Measure.new("app", "action_3")
measure_1.timers["test"] = 1.0
measure_2.timers["test"] = 2.0
measure_3.timers["test"] = 3.0
time = Time.now
begin
allow(SpartanAPM).to receive(:host).and_return("testhost_1")
SpartanAPM::Persistence.store!(SpartanAPM.bucket(time), [measure_1])
SpartanAPM::Persistence.store!(SpartanAPM.bucket(time), [measure_2])
allow(SpartanAPM).to receive(:host).and_return("testhost_2")
SpartanAPM::Persistence.store!(SpartanAPM.bucket(time), [measure_3])
expect(SpartanAPM::Persistence.new("app").hosts(time)).to match_array(["testhost_1", "testhost_2"])
expect(SpartanAPM::Persistence.new("app").hosts(time, action: "action_1")).to match_array(["testhost_1"])
expect(SpartanAPM::Persistence.new("app").hosts(time, action: "action_3")).to match_array(["testhost_2"])
ensure
SpartanAPM::Persistence.new("app").clear!(time)
end
end
end
describe "actions" do
it "should get a list of all actions with the total time spent percentage", freeze_time: true do
measure_1 = SpartanAPM::Measure.new("app", "action_1")
measure_2 = SpartanAPM::Measure.new("app", "action_2")
measure_3 = SpartanAPM::Measure.new("app", "action_3")
measure_1.timers["test"] = 1.0
measure_2.timers["test"] = 2.0
measure_3.timers["test"] = 3.0
time = Time.now
begin
SpartanAPM::Persistence.store!(SpartanAPM.bucket(time), [measure_1, measure_2, measure_2, measure_3])
actions = SpartanAPM::Persistence.new("app").actions([time, time])
expect(actions).to eq([["action_2", 0.5], ["action_3", 0.375], ["action_1", 0.125]])
actions = SpartanAPM::Persistence.new("app").actions([time, time], limit: 2)
expect(actions).to eq([["action_2", 0.5], ["action_3", 0.375]])
ensure
SpartanAPM::Persistence.new("app").clear!(time)
end
end
end
describe "average_process_count" do
it "should return the average number of processes reporting during a time range" do
time = Time.now
app = SpartanAPM::Persistence.new("app")
begin
measure = SpartanAPM::Measure.new("app", "action")
measure.timers["test"] = 2.0
SpartanAPM::Persistence.store!(SpartanAPM.bucket(time), [measure, measure, measure])
SpartanAPM::Persistence.store!(SpartanAPM.bucket(time), [measure, measure])
SpartanAPM::Persistence.store!(SpartanAPM.bucket(time), [measure])
SpartanAPM::Persistence.store!(SpartanAPM.bucket(time + 60), [measure])
allow(SpartanAPM).to receive(:host).and_return("testhost_2")
SpartanAPM::Persistence.store!(SpartanAPM.bucket(time), [measure])
SpartanAPM::Persistence.store!(SpartanAPM.bucket(time + 60), [measure])
expect(app.average_process_count([time, time + 60])).to eq 3
expect(app.average_process_count([time, time + 120])).to eq 3
expect(app.average_process_count(time)).to eq 4
expect(app.average_process_count(time + 60)).to eq 2
expect(app.average_process_count(time + 120)).to eq 0
expect(app.average_process_count(time, host: "testhost_2")).to eq 1
ensure
app.clear!([time, time + 60])
app.clear!([time, time + 60])
end
end
end
describe "errors" do
it "should store aggregated error info" do
time = Time.now
bucket_1 = SpartanAPM.bucket(time)
bucket_2 = SpartanAPM.bucket(time + 60)
measures_1 = []
begin
raise ArgumentError.new("error other")
rescue => e
m = SpartanAPM::Measure.new("app", "action")
m.capture_error(e)
measures_1 << m
end
measures_1 << SpartanAPM::Measure.new("app", "action")
measures_2 = []
3.times do |i|
begin
if i.zero?
raise StandardError.new("error #{i}")
else
raise ArgumentError.new("error #{i}")
end
rescue => e
m = SpartanAPM::Measure.new("app", "action")
m.capture_error(e)
measures_2 << m
end
end
measures_2 << SpartanAPM::Measure.new("app", "action")
begin
SpartanAPM::Persistence.store!(bucket_1, measures_1)
SpartanAPM::Persistence.store!(bucket_2, measures_2)
errors = SpartanAPM::Persistence.new("app").errors([time, time + 60])
data = errors.collect { |e| [e.time, e.class_name, e.message, e.count, e.backtrace] }
expect(data).to match_array [
[SpartanAPM.bucket_time(bucket_1), "ArgumentError", "error other", 1, measures_1.first.error_backtrace],
[SpartanAPM.bucket_time(bucket_2), "StandardError", "error 0", 1, measures_2.first.error_backtrace],
[SpartanAPM.bucket_time(bucket_2), "ArgumentError", "error 1", 2, measures_2[1].error_backtrace]
]
expect(SpartanAPM::Persistence.new("app").errors([time, time]).collect(&:count)).to eq [1]
ensure
SpartanAPM::Persistence.new("app").clear!([time, time + 60])
end
end
end
end
| 47.579937 | 156 | 0.640598 |
01c2d5b37e292cd34f477a0cb8556069f8212ca1 | 279 | class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable, :trackable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
end
| 39.857143 | 71 | 0.763441 |
acd975e688c30a1b17b7947bef83c7946bc67b48 | 246 | # { a => b, c => d }
# ==>
# rb2py.OrderedDict(((a: b), (c: d)))
class HashNode
def real_gen
$pygen.call('OrderedDict', 'rb2py') {
# Make tuple around whole values
$pygen.paren {
gen_children
}
}
end
end
| 17.571429 | 41 | 0.51626 |
33f703ad703b110f11cd3d4288683f837d791427 | 7,350 | module EffectiveResourcesHelper
# effective_bootstrap
def effective_submit(form, options = {}, &block)
actions = (controller.respond_to?(:effective_resource) ? controller.class : find_effective_resource).submits
actions = actions.select { |k, v| v[:default] != true } if options.delete(:defaults) == false
actions = permitted_resource_actions(form.object, actions)
submits = actions.map { |name, opts| form.save(name, opts.except(:action, :title, 'data-method') }.join.html_safe
form.submit('', options) do
(block_given? ? capture(&block) : ''.html_safe) + submits
end
end
# effective_form_inputs
def simple_form_submit(form, options = {}, &block)
actions = (controller.respond_to?(:effective_resource) ? controller.class : find_effective_resource).submits
actions = permitted_resource_actions(form.object, actions)
submits = actions.map { |name, opts| form.button(:submit, name, opts.except(:action, :title, 'data-method')) }
# I think this is a bug. I can't override default button class when passing my own class: variable. it merges them.
if (btn_class = SimpleForm.button_class).present?
submits = submits.map { |submit| submit.sub(btn_class, '') }
end
submits = submits.join(' ').html_safe
wrapper_options = { class: 'form-actions' }.merge(options.delete(:wrapper_html) || {})
content_tag(:div, wrapper_options) do
(block_given? ? capture(&block) : ''.html_safe) + submits
end
end
def render_resource_buttons(resource, atts = {}, &block)
effective_resource = find_effective_resource
actions = (controller.respond_to?(:effective_resource) ? controller.class : effective_resource).buttons
actions = if resource.kind_of?(Class)
actions.select { |_, v| effective_resource.collection_get_actions.include?(v[:action]) }
elsif resource.respond_to?(:persisted?) && resource.persisted?
actions.select { |_, v| effective_resource.member_actions.include?(v[:action]) }
else
{}
end
render_resource_actions(resource, atts.merge(actions: actions), &block)
end
# Renders the effective/resource view partial for this resource
# resource is an ActiveRecord thing, an Array of ActiveRecord things, or nil
# Atts are everything else. Interesting ones include:
# partial: :dropleft|:glyphicons|string
# locals: {} render locals
# you can also pass all action names and true/false such as edit: true, show: false
def render_resource_actions(resource, atts = {}, &block)
raise 'expected first argument to be an ActiveRecord::Base object or Array of objects' unless resource.kind_of?(ActiveRecord::Base) || resource.kind_of?(Class) || resource.kind_of?(Array)
raise 'expected attributes to be a Hash' unless atts.kind_of?(Hash)
btn_class = atts[:btn_class]
locals = atts[:locals] || {}
partial = atts[:partial]
spacer_template = locals[:spacer_template]
effective_resource = (atts[:effective_resource] || find_effective_resource)
namespace = atts[:namespace] || (effective_resource.namespace.to_sym if effective_resource.namespace)
# Assign actions
actions = if atts.key?(:actions) # We filter out any actions passed to us that aren't supported
available = effective_resource.actions # [:new, :edit, ...]
atts[:actions].inject({}) { |h, (commit, opts)| h[commit] = opts if available.include?(opts[:action]); h }
else
(resource.kind_of?(Class) ? effective_resource.resource_klass_actions : effective_resource.resource_actions)
end
# Filter out false and proc false
actions = actions.select { |_, v| atts[v[:action]].respond_to?(:call) ? Effective::ResourceExec.new(self, resource).instance_exec(&atts[v[:action]]) : (atts[v[:action]] != false) }
# Select Partial
partial = ['effective/resource/actions', partial.to_s].join('_') if partial.kind_of?(Symbol)
partial = (partial.presence || 'effective/resource/actions') + '.html'
# Assign Locals
locals = { resource: resource, effective_resource: effective_resource, namespace: namespace, actions: actions, btn_class: btn_class.to_s }.compact.merge(locals)
# Render
if resource.kind_of?(Array)
locals[:format_block] = block if block_given?
render(partial: partial, collection: resource, as: :resource, locals: locals.except(:resource), spacer_template: spacer_template)
elsif block_given?
render(partial, locals) { yield }
else
render(partial, locals)
end
end
# When called from /admin/things/new.html.haml this will render 'admin/things/form', or 'things/form', or 'thing/form'
def render_resource_form(resource, atts = {})
unless resource.kind_of?(ActiveRecord::Base) || resource.class.ancestors.include?(ActiveModel::Model)
raise 'expected first argument to be an ActiveRecord or ActiveModel object'
end
raise 'expected attributes to be a Hash' unless atts.kind_of?(Hash)
effective_resource = (atts.delete(:effective_resource) || find_effective_resource)
action = atts.delete(:action)
atts = { :namespace => (effective_resource.namespace.to_sym if effective_resource.namespace), effective_resource.name.to_sym => resource }.compact.merge(atts)
if lookup_context.template_exists?("form_#{action}", controller._prefixes, :partial)
render "form_#{action}", atts
elsif lookup_context.template_exists?('form', controller._prefixes, :partial)
render 'form', atts
elsif lookup_context.template_exists?('form', effective_resource.plural_name, :partial)
render "#{effective_resource.plural_name}/form", atts
elsif lookup_context.template_exists?('form', effective_resource.name, :partial)
render "#{effective_resource.name}/form", atts
else
render 'form', atts # Will raise the regular error
end
end
# Tableize attributes
# This is used by effective_orders, effective_logging, effective_trash and effective_mergery
def tableize_hash(obj, table: 'table', th: true, sub_table: 'table', sub_th: true, flatten: true)
case obj
when Hash
if flatten && obj[:attributes].kind_of?(Hash)
obj = obj[:attributes].merge(obj.except(:attributes))
end
content_tag(:table, class: table.presence) do
content_tag(:tbody) do
obj.map do |key, value|
content_tag(:tr, class: key.to_param) do
content_tag((th == true ? :th : :td), key) +
content_tag(:td) { tableize_hash(value, table: sub_table, th: sub_th, sub_table: sub_table, sub_th: sub_th, flatten: flatten) }
end
end.join.html_safe
end
end
when Array
obj.map { |value| tableize_hash(value, table: sub_table, th: sub_th, sub_table: sub_table, sub_th: sub_th, flatten: flatten) }.join('<br>')
when Symbol
":#{obj}"
when NilClass
'-'
else
obj.to_s.presence || '""'
end.html_safe
end
def format_resource_value(value)
@format_resource_tags ||= ActionView::Base.sanitized_allowed_tags.to_a + ['table', 'thead', 'tbody', 'tfoot', 'tr', 'td', 'th']
@format_resource_atts ||= ActionView::Base.sanitized_allowed_attributes.to_a + ['colspan', 'rowspan']
simple_format(sanitize(value.to_s, tags: @format_resource_tags, attributes: @format_resource_atts), {}, sanitize: false)
end
end
| 44.817073 | 191 | 0.700272 |
bf7e3263fb8d3c7e5c6ac3084d4aba218226c38e | 72 | module BookReleasesCliApp
end
require_relative '../config/environment'
| 14.4 | 40 | 0.833333 |
ffd69e045b07b2b01c744c93124b5166894fe563 | 480 | require 'spec_helper'
describe Plink::IntuitAccountToRemoveRecord do
it { should allow_mass_assignment_of(:intuit_account_id) }
it { should allow_mass_assignment_of(:users_institution_id) }
it { should allow_mass_assignment_of(:user_id) }
let(:valid_params) {
{
intuit_account_id: 3,
users_institution_id: 2,
user_id: 1
}
}
it 'can be persisted' do
Plink::IntuitAccountToRemoveRecord.create(valid_params).should be_persisted
end
end
| 24 | 79 | 0.7375 |
38d2ef14ab5bec985c4fbe0fac021b7026b64c0e | 1,589 | # -*- encoding: utf-8 -*-
# stub: html-pipeline 2.12.2 ruby lib
Gem::Specification.new do |s|
s.name = "html-pipeline"
s.version = "2.12.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Ryan Tomayko", "Jerry Cheung", "Garen J. Torikian"]
s.date = "2019-11-14"
s.description = "GitHub HTML processing filters and utilities"
s.email = ["[email protected]", "[email protected]", "[email protected]"]
s.homepage = "https://github.com/jch/html-pipeline"
s.licenses = ["MIT"]
s.post_install_message = "-------------------------------------------------\nThank you for installing html-pipeline!\nYou must bundle Filter gem dependencies.\nSee html-pipeline README.md for more details.\nhttps://github.com/jch/html-pipeline#dependencies\n-------------------------------------------------\n"
s.rubygems_version = "2.5.2.1"
s.summary = "Helpers for processing content through a chain of filters"
s.installed_by_version = "2.5.2.1" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>, [">= 2"])
s.add_runtime_dependency(%q<nokogiri>, [">= 1.4"])
else
s.add_dependency(%q<activesupport>, [">= 2"])
s.add_dependency(%q<nokogiri>, [">= 1.4"])
end
else
s.add_dependency(%q<activesupport>, [">= 2"])
s.add_dependency(%q<nokogiri>, [">= 1.4"])
end
end
| 42.945946 | 312 | 0.637508 |
e8ea79efcfb6e8fc95e6c674b96f0012b6ce2aed | 29,797 | require 'shared/all_success_return_values'
shared_examples_for "connection" do
before :each do
ensure_node(zk, path, data)
end
after :each do
ensure_node(zk, path, data)
end
after :all do
logger.warn "running shared examples after :all"
with_open_zk(connection_string) do |z|
rm_rf(z, path)
end
end
# unfortunately, we can't test w/o exercising other parts of the driver, so
# if "set" is broken, this test will fail as well (but whaddyagonnado?)
describe :get do
describe :sync, :sync => true do
it_should_behave_like "all success return values"
before do
@rv = zk.get(:path => path)
end
it %[should return the data] do
@rv[:data].should == data
end
it %[should return a stat] do
@rv[:stat].should_not be_nil
@rv[:stat].should be_kind_of(Zookeeper::Stat)
end
end
describe :sync_watch, :sync => true do
it_should_behave_like "all success return values"
before do
@event = nil
@watcher = Zookeeper::Callbacks::WatcherCallback.new
@rv = zk.get(:path => path, :watcher => @watcher, :watcher_context => path)
end
it %[should return the data] do
@rv[:data].should == data
end
it %[should set a watcher on the node] do
# test the watcher by changing node data
zk.set(:path => path, :data => 'blah')[:rc].should be_zero
wait_until(1.0) { @watcher.completed? }
@watcher.path.should == path
@watcher.context.should == path
@watcher.should be_completed
@watcher.type.should == Zookeeper::ZOO_CHANGED_EVENT
end
end
describe :async, :async => true do
before do
@cb = Zookeeper::Callbacks::DataCallback.new
@rv = zk.get(:path => path, :callback => @cb, :callback_context => path)
wait_until(1.0) { @cb.completed? }
@cb.should be_completed
end
it_should_behave_like "all success return values"
it %[should have a return code of ZOK] do
@cb.return_code.should == Zookeeper::ZOK
end
it %[should have the stat object in the callback] do
@cb.stat.should_not be_nil
@cb.stat.should be_kind_of(Zookeeper::Stat)
end
it %[should have the data] do
@cb.data.should == data
end
end
describe :async_watch, :async => true, :method => :get, :watch => true do
it_should_behave_like "all success return values"
before do
logger.debug { "-----------------> MAKING ASYNC GET REQUEST WITH WATCH <--------------------" }
@cb = Zookeeper::Callbacks::DataCallback.new
@watcher = Zookeeper::Callbacks::WatcherCallback.new
@rv = zk.get(:path => path, :callback => @cb, :callback_context => path, :watcher => @watcher, :watcher_context => path)
wait_until(1.0) { @cb.completed? }
@cb.should be_completed
logger.debug { "-----------------> ASYNC GET REQUEST WITH WATCH COMPLETE <--------------------" }
end
it %[should have the stat object in the callback] do
@cb.stat.should_not be_nil
@cb.stat.should be_kind_of(Zookeeper::Stat)
end
it %[should have the data] do
@cb.data.should == data
end
it %[should have a return code of ZOK] do
@cb.return_code.should == Zookeeper::ZOK
end
it %[should set a watcher on the node] do
zk.set(:path => path, :data => 'blah')[:rc].should be_zero
wait_until(2) { @watcher.completed? }
@watcher.should be_completed
@watcher.path.should == path
@watcher.context.should == path
end
end
describe 'bad arguments' do
it %[should barf with a BadArguments error] do
lambda { zk.get(:bad_arg => 'what!?') }.should raise_error(Zookeeper::Exceptions::BadArguments)
end
end
end # get
describe :set do
before do
@new_data = "Four score and \007 years ago"
@stat = zk.stat(:path => path)[:stat]
end
describe :sync, :sync => true do
describe 'without version' do
it_should_behave_like "all success return values"
before do
@rv = zk.set(:path => path, :data => @new_data)
end
it %[should return the new stat] do
@rv[:stat].should_not be_nil
@rv[:stat].should be_kind_of(Zookeeper::Stat)
@rv[:stat].version.should > @stat.version
end
end
describe 'with current version' do
it_should_behave_like "all success return values"
before do
@rv = zk.set(:path => path, :data => @new_data, :version => @stat.version)
end
it %[should return the new stat] do
@rv[:stat].should_not be_nil
@rv[:stat].should be_kind_of(Zookeeper::Stat)
@rv[:stat].version.should > @stat.version
end
end
describe 'with outdated version' do
before do
# need to do a couple of sets to ramp up the version
3.times { |n| @stat = zk.set(:path => path, :data => "#{@new_data}#{n}")[:stat] }
@rv = zk.set(:path => path, :data => @new_data, :version => 0)
end
it %[should have a return code of ZBADVERSION] do
@rv[:rc].should == Zookeeper::ZBADVERSION
end
it %[should return a stat with !exists] do
@rv[:stat].exists.should be_false
end
end
describe 'error' do
it %[should barf if the data size is too large], :input_size => true do
large_data = '0' * (1024 ** 2)
lambda { zk.set(:path => path, :data => large_data) }.should raise_error(Zookeeper::Exceptions::DataTooLargeException)
end
end
end # sync
describe :async, :async => true do
before do
@cb = Zookeeper::Callbacks::StatCallback.new
end
describe 'without version' do
it_should_behave_like "all success return values"
before do
@rv = zk.set(:path => path, :data => @new_data, :callback => @cb, :callback_context => path)
wait_until(2) { @cb.completed? }
@cb.should be_completed
end
it %[should have the stat in the callback] do
@cb.stat.should_not be_nil
@cb.stat.version.should > @stat.version
end
it %[should have a return code of ZOK] do
@cb.return_code.should == Zookeeper::ZOK
end
end
describe 'with current version' do
it_should_behave_like "all success return values"
before do
@rv = zk.set(:path => path, :data => @new_data, :callback => @cb, :callback_context => path, :version => @stat.version)
wait_until(2) { @cb.completed? }
@cb.should be_completed
end
it %[should have the stat in the callback] do
@cb.stat.should_not be_nil
@cb.stat.version.should > @stat.version
end
it %[should have a return code of ZOK] do
@cb.return_code.should == Zookeeper::ZOK
end
end
describe 'with outdated version' do
before do
# need to do a couple of sets to ramp up the version
3.times { |n| @stat = zk.set(:path => path, :data => "#{@new_data}#{n}")[:stat] }
@rv = zk.set(:path => path, :data => @new_data, :callback => @cb, :callback_context => path, :version => 0)
wait_until(2) { @cb.completed? }
@cb.should be_completed
end
it %[should have a return code of ZBADVERSION] do
@cb.return_code.should == Zookeeper::ZBADVERSION
end
it %[should return a stat with !exists] do
@cb.stat.exists.should be_false
end
end
describe 'error' do
it %[should barf if the data size is too large], :input_size => true do
large_data = '0' * (1024 ** 2)
lambda { zk.set(:path => path, :data => large_data, :callback => @cb, :callback_context => path) }.should raise_error(Zookeeper::Exceptions::DataTooLargeException)
end
end
end # async
end # set
describe :add_auth do
it %[should return ZOK if everything goes swimingly] do
result = zk.add_auth(:scheme => 'digest', :cert => 'test_user:test_password')
rv = result[:rc]
# gahhh, this shouldn't be like this.... :P
rv = rv.respond_to?(:intValue) ? rv.intValue : rv
rv.should == Zookeeper::ZOK
end
end
describe :get_children do
before do
@children = %w[child0 child1 child2]
@children.each do |name|
zk.create(:path => "#{path}/#{name}", :data => name)
end
end
after do
@children.each do |name|
zk.delete(:path => "#{path}/#{name}")
end
end
describe :sync, :sync => true do
it_should_behave_like "all success return values"
before do
@rv = zk.get_children(:path => path)
end
it %[should have an array of names of the children] do
@rv[:children].should be_kind_of(Array)
@rv[:children].length.should == 3
@rv[:children].sort.should == @children.sort
end
# "Three shall be the number of the counting, and the number of the counting shall be 3"
it %[should have a stat object whose num_children is 3] do
@rv[:stat].should_not be_nil
@rv[:stat].should be_kind_of(Zookeeper::Stat)
@rv[:stat].num_children.should == 3
end
end
describe :sync_watch, :sync => true do
it_should_behave_like "all success return values"
before do
@addtl_child = 'child3'
@watcher = Zookeeper::Callbacks::WatcherCallback.new
@rv = zk.get_children(:path => path, :watcher => @watcher, :watcher_context => path)
end
after do
zk.delete(:path => "#{path}/#{@addtl_child}")
end
it %[should have an array of names of the children] do
@rv[:children].should be_kind_of(Array)
@rv[:children].length.should == 3
@rv[:children].sort.should == @children.sort
end
it %[should have a stat object whose num_children is 3] do
@rv[:stat].should_not be_nil
@rv[:stat].should be_kind_of(Zookeeper::Stat)
@rv[:stat].num_children.should == 3
end
it %[should set a watcher for children on the node] do
@watcher.should_not be_completed
zk.create(:path => "#{path}/#{@addtl_child}", :data => '')[:rc].should == Zookeeper::ZOK
wait_until { @watcher.completed? }
@watcher.should be_completed
@watcher.path.should == path
@watcher.context.should == path
@watcher.type.should == Zookeeper::ZOO_CHILD_EVENT
end
end
describe :async, :async => true do
it_should_behave_like "all success return values"
before do
@cb = Zookeeper::Callbacks::StringsCallback.new
@rv = zk.get_children(:path => path, :callback => @cb, :callback_context => path)
wait_until { @cb.completed? }
@cb.should be_completed
end
it %[should succeed] do
@cb.return_code.should == Zookeeper::ZOK
end
it %[should return an array of children] do
@cb.children.should be_kind_of(Array)
@cb.children.length.should == 3
@cb.children.sort.should == @children.sort
end
it %[should have a stat object whose num_children is 3] do
@cb.stat.should_not be_nil
@cb.stat.should be_kind_of(Zookeeper::Stat)
@cb.stat.num_children.should == 3
end
end
describe :async_watch, :async => true do
it_should_behave_like "all success return values"
before do
@addtl_child = 'child3'
@watcher = Zookeeper::Callbacks::WatcherCallback.new
@cb = Zookeeper::Callbacks::StringsCallback.new
@rv = zk.get_children(:path => path, :watcher => @watcher, :watcher_context => path, :callback => @cb, :callback_context => path)
wait_until { @cb.completed? }
@cb.should be_completed
end
after do
zk.delete(:path => "#{path}/#{@addtl_child}")
end
it %[should succeed] do
@cb.return_code.should == Zookeeper::ZOK
end
it %[should return an array of children] do
@cb.children.should be_kind_of(Array)
@cb.children.length.should == 3
@cb.children.sort.should == @children.sort
end
it %[should have a stat object whose num_children is 3] do
@cb.stat.should_not be_nil
@cb.stat.should be_kind_of(Zookeeper::Stat)
@cb.stat.num_children.should == 3
end
it %[should set a watcher for children on the node] do
@watcher.should_not be_completed
zk.create(:path => "#{path}/#{@addtl_child}", :data => '')[:rc].should == Zookeeper::ZOK
wait_until { @watcher.completed? }
@watcher.should be_completed
@watcher.path.should == path
@watcher.context.should == path
@watcher.type.should == Zookeeper::ZOO_CHILD_EVENT
end
end
end
# NOTE: the jruby version of stat on non-existent node will have a
# return_code of 0, but the C version will have a return_code of -101
describe :stat do
describe :sync, :sync => true do
it_should_behave_like "all success return values"
before do
@rv = zk.stat(:path => path)
end
it %[should have a stat object] do
@rv[:stat].should be_kind_of(Zookeeper::Stat)
end
end
describe :sync_watch, :sync => true do
it_should_behave_like "all success return values"
before do
@watcher = Zookeeper::Callbacks::WatcherCallback.new
@rv = zk.stat(:path => path, :watcher => @watcher, :watcher_context => path)
end
it %[should have a stat object] do
@rv[:stat].should be_kind_of(Zookeeper::Stat)
end
it %[should set a watcher for data changes on the node] do
@watcher.should_not be_completed
zk.set(:path => path, :data => 'skunk')[:rc].should == Zookeeper::ZOK
wait_until { @watcher.completed? }
@watcher.should be_completed
@watcher.path.should == path
@watcher.context.should == path
@watcher.type.should == Zookeeper::ZOO_CHANGED_EVENT
end
end
describe :async, :async => true do
it_should_behave_like "all success return values"
before do
@cb = Zookeeper::Callbacks::StatCallback.new
@rv = zk.stat(:path => path, :callback => @cb, :callback_context => path)
wait_until { @cb.completed? }
@cb.should be_completed
end
it %[should succeed] do
@cb.return_code.should == Zookeeper::ZOK
end
it %[should have a stat object] do
@cb.stat.should be_kind_of(Zookeeper::Stat)
end
end
describe :async_watch, :async => true do
it_should_behave_like "all success return values"
before do
@addtl_child = 'child3'
@watcher = Zookeeper::Callbacks::WatcherCallback.new
@cb = Zookeeper::Callbacks::StatCallback.new
@rv = zk.stat(:path => path, :callback => @cb, :callback_context => path, :watcher => @watcher, :watcher_context => path)
wait_until { @cb.completed? }
@cb.should be_completed
end
after do
zk.delete(:path => "#{path}/#{@addtl_child}")
end
it %[should succeed] do
@cb.return_code.should == Zookeeper::ZOK
end
it %[should have a stat object] do
@cb.stat.should be_kind_of(Zookeeper::Stat)
end
it %[should set a watcher for data changes on the node] do
@watcher.should_not be_completed
zk.set(:path => path, :data => 'skunk')[:rc].should == Zookeeper::ZOK
wait_until { @watcher.completed? }
@watcher.should be_completed
@watcher.path.should == path
@watcher.context.should == path
@watcher.type.should == Zookeeper::ZOO_CHANGED_EVENT
end
end
end # stat
describe :create do
before do
# remove the path set up by the global 'before' block
zk.delete(:path => path)
end
describe :sync, :sync => true do
describe 'error' do
it %[should barf if the data size is too large], :input_size => true do
large_data = '0' * (1024 ** 2)
lambda { zk.create(:path => path, :data => large_data) }.should raise_error(Zookeeper::Exceptions::DataTooLargeException)
end
end
describe :default_flags do
it_should_behave_like "all success return values"
before do
@rv = zk.create(:path => path)
end
it %[should return the path that was set] do
@rv[:path].should == path
end
it %[should have created a permanent node] do
st = zk.stat(:path => path)
st[:rc].should == Zookeeper::ZOK
st[:stat].ephemeral_owner.should == 0
end
end
describe :ephemeral do
it_should_behave_like "all success return values"
before do
@rv = zk.create(:path => path, :ephemeral => true)
end
it %[should return the path that was set] do
@rv[:path].should == path
end
it %[should have created a ephemeral node] do
st = zk.stat(:path => path)
st[:rc].should == Zookeeper::ZOK
st[:stat].ephemeral_owner.should_not be_zero
end
end
describe :sequence do
it_should_behave_like "all success return values"
before do
@orig_path = path
@rv = zk.create(:path => path, :sequence => true)
@s_path = @rv[:path] # make sure this gets cleaned up
end
after do
zk.delete(:path => @s_path)
end
it %[should return the path that was set] do
@rv[:path].should_not == @orig_path
end
it %[should have created a permanent node] do
st = zk.stat(:path => @s_path)
st[:rc].should == Zookeeper::ZOK
st[:stat].ephemeral_owner.should be_zero
end
end
describe :ephemeral_sequence do
it_should_behave_like "all success return values"
before do
@orig_path = path
@rv = zk.create(:path => path, :sequence => true, :ephemeral => true)
@s_path = @rv[:path] # make sure this gets cleaned up
end
after do
zk.delete(:path => @s_path)
end
it %[should return the path that was set] do
@rv[:path].should_not == @orig_path
end
it %[should have created an ephemeral node] do
st = zk.stat(:path => @s_path)
st[:rc].should == Zookeeper::ZOK
st[:stat].ephemeral_owner.should_not be_zero
end
end
describe :acl do
it %[should work] do
pending "need to write acl tests"
end
end
end
describe :async, :async => true do
before do
@cb = Zookeeper::Callbacks::StringCallback.new
end
describe :default_flags do
it_should_behave_like "all success return values"
before do
@rv = zk.create(:path => path, :callback => @cb, :callback_context => path)
wait_until(2) { @cb.completed? }
@cb.should be_completed
end
it %[should have a path] do
@cb.path.should_not be_nil
end
it %[should return the path that was set] do
@cb.path.should == path
end
it %[should have created a permanent node] do
st = zk.stat(:path => path)
st[:rc].should == Zookeeper::ZOK
st[:stat].ephemeral_owner.should == 0
end
end
describe 'error' do
it %[should barf if the data size is too large], :input_size => true do
large_data = '0' * (1024 ** 2)
lambda do
zk.create(:path => path, :data => large_data, :callback => @cb, :callback_context => path)
end.should raise_error(Zookeeper::Exceptions::DataTooLargeException)
end
end
describe :ephemeral do
it_should_behave_like "all success return values"
before do
@rv = zk.create(:path => path, :ephemeral => true, :callback => @cb, :callback_context => path)
wait_until(2) { @cb.completed? }
@cb.should be_completed
end
it %[should have a path] do
@cb.path.should_not be_nil
end
it %[should return the path that was set] do
@cb.path.should == path
end
it %[should have created a ephemeral node] do
st = zk.stat(:path => path)
st[:rc].should == Zookeeper::ZOK
st[:stat].ephemeral_owner.should_not be_zero
end
end
describe :sequence do
it_should_behave_like "all success return values"
before do
@orig_path = path
@rv = zk.create(:path => path, :sequence => true, :callback => @cb, :callback_context => path)
wait_until(2) { @cb.completed? }
@cb.should be_completed
@s_path = @cb.path
end
after do
zk.delete(:path => @s_path)
end
it %[should have a path] do
@cb.path.should_not be_nil
end
it %[should return the path that was set] do
@cb.path.should_not == @orig_path
end
it %[should have created a permanent node] do
st = zk.stat(:path => @s_path)
st[:rc].should == Zookeeper::ZOK
st[:stat].ephemeral_owner.should be_zero
end
end
describe :ephemeral_sequence do
it_should_behave_like "all success return values"
before do
@orig_path = path
@rv = zk.create(:path => path, :sequence => true, :ephemeral => true, :callback => @cb, :callback_context => path)
path = @rv[:path] # make sure this gets cleaned up
wait_until(2) { @cb.completed? }
@cb.should be_completed
@s_path = @cb.path
end
after do
zk.delete(:path => @s_path)
end
it %[should have a path] do
@cb.path.should_not be_nil
end
it %[should return the path that was set] do
@s_path.should_not == @orig_path
end
it %[should have created an ephemeral node] do
st = zk.stat(:path => @s_path)
st[:rc].should == Zookeeper::ZOK
st[:stat].ephemeral_owner.should_not be_zero
end
end # ephemeral_sequence
end # async
end # create
describe :delete do
describe :sync, :sync => true do
describe 'without version' do
it_should_behave_like "all success return values"
before do
zk.create(:path => path)
@rv = zk.delete(:path => path)
end
it %[should have deleted the node] do
zk.stat(:path => path)[:stat].exists.should be_false
end
end
describe 'with current version' do
it_should_behave_like "all success return values"
before do
zk.create(:path => path)
@stat = zk.stat(:path => path)[:stat]
@stat.exists.should be_true
@rv = zk.delete(:path => path, :version => @stat.version)
end
it %[should have deleted the node] do
zk.stat(:path => path)[:stat].exists.should be_false
end
end
describe 'with old version' do
before do
3.times { |n| @stat = zk.set(:path => path, :data => n.to_s)[:stat] }
@rv = zk.delete(:path => path, :version => 0)
end
it %[should have a return code of ZBADVERSION] do
@rv[:rc].should == Zookeeper::ZBADVERSION
end
end
end # sync
describe :async, :async => true do
before do
@cb = Zookeeper::Callbacks::VoidCallback.new
end
describe 'without version' do
it_should_behave_like "all success return values"
before do
@rv = zk.delete(:path => path, :callback => @cb, :callback_context => path)
wait_until { @cb.completed? }
@cb.should be_completed
end
it %[should have a success return_code] do
@cb.return_code.should == Zookeeper::ZOK
end
it %[should have deleted the node] do
zk.stat(:path => path)[:stat].exists.should be_false
end
end
describe 'with current version' do
it_should_behave_like "all success return values"
before do
@stat = zk.stat(:path => path)[:stat]
@rv = zk.delete(:path => path, :version => @stat.version, :callback => @cb, :callback_context => path)
wait_until { @cb.completed? }
@cb.should be_completed
end
it %[should have a success return_code] do
@cb.return_code.should == Zookeeper::ZOK
end
it %[should have deleted the node] do
zk.stat(:path => path)[:stat].exists.should be_false
end
end
describe 'with old version' do
before do
3.times { |n| @stat = zk.set(:path => path, :data => n.to_s)[:stat] }
@rv = zk.delete(:path => path, :version => 0, :callback => @cb, :callback_context => path)
wait_until { @cb.completed? }
@cb.should be_completed
end
it %[should have a return code of ZBADVERSION] do
@cb.return_code.should == Zookeeper::ZBADVERSION
end
end
end # async
end # delete
describe :get_acl do
describe :sync, :sync => true do
it_should_behave_like "all success return values"
before do
@rv = zk.get_acl(:path => path)
end
it %[should return a stat for the path] do
@rv[:stat].should be_kind_of(Zookeeper::Stat)
end
it %[should return the acls] do
acls = @rv[:acl]
acls.should be_kind_of(Array)
h = acls.first
h.should be_kind_of(Hash)
h[:perms].should == Zookeeper::ZOO_PERM_ALL
h[:id][:scheme].should == 'world'
h[:id][:id].should == 'anyone'
end
end
describe :async, :async => true do
it_should_behave_like "all success return values"
before do
@cb = Zookeeper::Callbacks::ACLCallback.new
@rv = zk.get_acl(:path => path, :callback => @cb, :callback_context => path)
wait_until(2) { @cb.completed? }
@cb.should be_completed
end
it %[should return a stat for the path] do
@cb.stat.should be_kind_of(Zookeeper::Stat)
end
it %[should return the acls] do
acls = @cb.acl
acls.should be_kind_of(Array)
acl = acls.first
acl.should be_kind_of(Zookeeper::ACLs::ACL)
acl.perms.should == Zookeeper::ZOO_PERM_ALL
acl.id.scheme.should == 'world'
acl.id.id.should == 'anyone'
end
end
end
describe :set_acl do
before do
@perms = 5
@new_acl = [Zookeeper::ACLs::ACL.new(:perms => @perms, :id => Zookeeper::Constants::ZOO_ANYONE_ID_UNSAFE)]
pending("No idea how to set ACLs")
end
describe :sync, :sync => true do
it_should_behave_like "all success return values"
before do
@rv = zk.set_acl(:path => path, :acl => @new_acl)
end
end
end
describe :session_id do
it %[should return the session_id as a Fixnum] do
zk.session_id.should be_kind_of(Integer)
end
end
describe :session_passwd do
it %[should return the session passwd as a String] do
zk.session_passwd.should be_kind_of(String)
end
end
describe :sync, :sync => true do
describe :success do
it_should_behave_like "all success return values"
before do
@cb = Zookeeper::Callbacks::StringCallback.new
@rv = zk.sync(:path => path, :callback => @cb)
wait_until(2) { @cb.completed }
@cb.should be_completed
end
end
describe :errors do
it %[should barf with BadArguments if :callback is not given] do
lambda { zk.sync(:path => path) }.should raise_error(Zookeeper::Exceptions::BadArguments)
end
end
end
describe :event_dispatch_thread? do
it %[should return true when called on the event dispatching thread] do
@result = nil
cb = lambda do |hash|
@result = zk.event_dispatch_thread?
end
@rv = zk.sync(:path => path, :callback => cb)
wait_until(2) { @result == true }.should be_true
end
it %[should return false when not on the event dispatching thread] do
zk.event_dispatch_thread?.should_not be_true
end
end
describe :close do
describe 'from the event dispatch thread' do
it %[should not deadlock] do
evil_cb = lambda do |*|
logger.debug { "calling close event_dispatch_thread? #{zk.event_dispatch_thread?}" }
zk.close
end
zk.stat(:path => path, :callback => evil_cb)
wait_until { zk.closed? }
zk.should be_closed
end
end
end
unless defined?(::JRUBY_VERSION)
describe 'fork protection' do
it %[should raise an InheritedConnectionError if the current Process.pid is different from the one that created the client] do
pid = Process.pid
begin
Process.stub(:pid => -1)
lambda { zk.stat(:path => path) }.should raise_error(Zookeeper::Exceptions::InheritedConnectionError)
ensure
# ensure we reset this, only want it to fail during the test
Process.stub(:pid => pid)
end
end
end
end
end
| 28.405148 | 173 | 0.58224 |
21d234a50c442fd2e0f3289bcf7245baf8310dfa | 1,787 | class UsersController < ApplicationController
def add_routine
current_user.routines << Routine.find(params[:user_id])
current_user.save
redirect_to user_path(current_user)
end
def show
@user = current_user
end
def routines
@routines = Routine.order_my_routines(current_user)
render 'routines/index'
end
def tasks
@tasks = current_user.tasks.uniq
render 'tasks/index'
end
def new
@user = User.new
end
def create
@user = User.create(user_params)
if @user.valid? != true
flash[:messages] = @user.errors.full_messages
render 'new'
else
session[:user_id] = @user.id
redirect_to user_path(@user)
end
end
def edit
@user = current_user
end
def update
if !params[:user][:routine_to_delete].nil?
current_user.routines.delete(Routine.find(params[:user][:routine_to_delete]))
current_user.save
redirect_to user_path(current_user)
else
current_user.update(user_params)
if current_user.valid? != true
flash[:messages] = current_user.errors.full_messages
redirect_to edit_user_path(current_user)
else
current_user.routines.clear
params[:user][:routine_ids].each do |routine|
if !routine.empty?
current_user.routines << Routine.find(routine)
end
end
current_user.save
redirect_to user_path(current_user)
end
end
end
def destroy
if current_user = User.find(params[:id])
current_user.destroy
session.clear
logout_path
else
redirect_to user_path(User.find(params[:id]))
end
end
def user_params
params.require(:user).permit(:name, :username, :email, :password, :password_confirmation)
end
end
| 22.061728 | 93 | 0.663123 |
6ade159ef3744e6476aae4bf21434f4804b4ddf9 | 1,376 | # Copyright 2011-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
module AWS
class DynamoDB
class PrimaryKeyElement
attr_reader :name
attr_reader :type
ATTRIBUTE_TYPES = {
"S" => :string,
"N" => :number,
"B" => :binary,
}
def initialize(hash)
@name = hash[:name] || hash["AttributeName"]
@type = hash[:type] || ATTRIBUTE_TYPES[hash["AttributeType"]]
end
def self.from_description(description)
(name, type, *extra) = description.to_a.flatten
raise(ArgumentError,
"key element may contain only one name/type pair") unless
extra.empty?
raise ArgumentError, "unsupported type #{type.inspect}" unless
ATTRIBUTE_TYPES.values.include?(type.to_sym)
new(:name => name.to_s, :type => type)
end
end
end
end
| 28.081633 | 78 | 0.65189 |
f86b31764a039170edbab003cd159e1466c0caf6 | 535 | module Babeltrace2
BT_LOGGING_LEVEL_TRACE = 1
private_constant :BT_LOGGING_LEVEL_TRACE
BT_LOGGING_LEVEL_DEBUG = 2
private_constant :BT_LOGGING_LEVEL_DEBUG
BT_LOGGING_LEVEL_INFO = 3
private_constant :BT_LOGGING_LEVEL_INFO
BT_LOGGING_LEVEL_WARNING = 4
private_constant :BT_LOGGING_LEVEL_WARNING
BT_LOGGING_LEVEL_ERROR = 5
private_constant :BT_LOGGING_LEVEL_ERROR
BT_LOGGING_LEVEL_FATAL = 6
private_constant :BT_LOGGING_LEVEL_FATAL
BT_LOGGING_LEVEL_NONE = 0xff
private_constant :BT_LOGGING_LEVEL_NONE
end
| 26.75 | 44 | 0.841121 |
3395b06767c65bbdd396d9f7d3b9d66fdbb1d974 | 360 | cask 'openkey' do
version '1.2.3'
sha256 '803e1f96232b40740a43c235748dea6ac292473b777b4b4bcb7ba8198af416fb'
url "https://github.com/tuyenvm/OpenKey/releases/download/v#{version}/OpenKey#{version}.dmg"
appcast 'https://github.com/tuyenvm/OpenKey/releases.atom'
name 'OpenKey'
homepage 'https://github.com/tuyenvm/OpenKey/'
app 'OpenKey.app'
end
| 30 | 94 | 0.763889 |
08b664de2d75b4dc1a90d15c0620a359a8c1b66b | 199 | class API::V1::SizesController < ApiController
# GET /sizes
def index
sizes = Size.select("id, name, icon, volume").all.where(published: true)
render json: sizes, status: 200
end
end
| 19.9 | 76 | 0.683417 |
6a48db5a5da37a5eda5c7b411c1309814efe94e9 | 3,675 | require 'json'
class Product
attr_accessor :name
attr_accessor :price
attr_accessor :on_stock
def initialize (name, price, on_stock)
@name = name
@price = price
@on_stock = on_stock
end
def Add_Attribute(attr, value)
self.class.class_eval { attr_accessor attr }
send("#{attr}=", value)
end
def self.create_product(parent, prod)
pr = Product.new(prod[0], prod[1]['price'], prod[1]['on-stock'])
pr.Add_Attribute("category", parent.name)
prod[1].each { |e|
if e[0] != "price" && e[0] != "on-stock" then
pr.Add_Attribute(e[0], e[1])
end
}
return pr
end
def list_attributes
self.instance_variables.map do |attribute|
{ attribute => self.instance_variable_get(attribute) }
end
end
def print
result = ""
root = Category.new("category", nil, Category.hash)
c = root.category_list(self)
self.category = c
attr = list_attributes
attr.each do |a|
prop = a.keys[0][1..-1]
val = a.values[0]
result += "#{prop} = #{val}\r\n"
end
result
end
end
class Category
attr_accessor :name
attr_accessor :siblings
attr_accessor :products
@@hash = {}
def initialize(name, jsonFile = nil, hash = nil)
@name = name
@siblings = []
@products = []
if !jsonFile.nil?
@@hash = load(name, jsonFile, hash)
build_from_hash(@@hash, self)
elsif !hash.nil?
build_from_hash(hash, self)
end
end
def self.hash
h = @@hash
return h
end
def build_from_hash(input, parent)
input.each { |item|
if item[1].key?("price") then
pr = Product.create_product(parent, item)
parent.products << pr
else
cat = Category.new(item[0])
parent.siblings << cat
build_from_hash(item[1], cat)
end
}
end
def list_attributes
self.instance_variables.map do |attribute|
{ attribute => self.instance_variable_get(attribute) }
end
end
def get_sorted_products
products = get_products(self)
products = products.sort_by { |prd| prd.name }
prd = ""
products.each do |pr|
prd += pr.print + "============================\r\n"
end
return prd
end
def category_list(item, root = nil)
if root.nil?
root = Category.new("category", nil, @@hash)
return root.category_list(item, root)
end
if root.products.select { |prd| prd.name == item.name }.length == 1
return item.name
end
if root.siblings.size == 0
return ""
end
root.siblings.each do |e|
s = category_list(item, e)
if s.length > 0
return e.name + '/' + s
end
end
return ""
end
def get_sorted_product_names
products = get_products(self)
products = products.sort_by { |prd| prd.name }
result = ""
products.each { |prd|
if result.length == 0
result = prd.name
else
result += ', ' + prd.name
end
}
return result
end
def get_products(catalog = self)
if catalog == nil
return []
end
if catalog.products.length > 0
return catalog.products
end
result = []
catalog.siblings.each { |item|
n = get_products(item)
if n.length > 0
result += n
end
}
return result
end
private
def load(name, jsonFile, hash)
if hash.nil?
file = File.read(jsonFile)
hash = JSON.parse(file)
end
hash = hash[name]
return hash
end
end
| 20.646067 | 72 | 0.557551 |
6239e2b992ba8728eefff0c22c1c698ad5efdc21 | 2,852 | # frozen_string_literal: true
require 'time'
RSpec.describe MiniTarball::HeaderWriter do
describe "#write" do
let(:io) { StringIO.new.binmode }
subject { MiniTarball::HeaderWriter.new(io) }
let!(:default_options) do
{
mode: 0644,
mtime: Time.parse("2021-02-15T20:11:34Z"),
uname: "discourse",
gname: "www-data",
uid: 1001,
gid: 33
}
end
it "correctly outputs header for small file" do
header = MiniTarball::Header.new(name: "small_file", size: 536_870_913, **default_options)
subject.write(header)
expect(io.string).to eq(fixture("headers/small_file_header"))
end
it "correctly outputs header for large file" do
header = MiniTarball::Header.new(name: "large_file", size: 10_737_418_241, **default_options)
subject.write(header)
expect(io.string).to eq(fixture("headers/large_file_header"))
end
it "correctly outputs header for file with long name" do
header = MiniTarball::Header.new(name: "this_is_an_extremely_long_file_name_with_many_underscores_and_" \
"lots_of_ascii_characters_in_it_and_will_be_used_to_test_gnu_tar.txt", size: 4, **default_options)
subject.write(header)
expect(io.string).to eq(fixture("headers/long_filename_header"))
end
it "correctly outputs header for file with Unicode name" do
header = MiniTarball::Header.new(name: "这是一个测试.txt", size: 4, **default_options)
subject.write(header)
expect(io.string).to eq(fixture("headers/unicode_filename_header"))
end
it "correctly outputs header for file with long Unicode name" do
header = MiniTarball::Header.new(
name: "这是一个很长的中文句子,用于测试我们的实现在计算文件名长度时是否使用字节大小.txt",
size: 4,
**default_options
)
subject.write(header)
expect(io.string).to eq(fixture("headers/long_unicode_filename_header"))
end
it "correctly outputs header for file stored in short path" do
header = MiniTarball::Header.new(name: "this/is/a/short/path/test.txt", size: 4, **default_options)
subject.write(header)
expect(io.string).to eq(fixture("headers/short_path_header"))
end
it "correctly outputs header for file stored in long path" do
header = MiniTarball::Header.new(name: "this/is/a/very/long/path/with/lots/of/sub/directories/to/test/" \
"how/gnu/tar/behaves/when/files/are/stored/in/a/very/long/path/test.txt", size: 4, **default_options)
subject.write(header)
expect(io.string).to eq(fixture("headers/long_path_header"))
end
it "ignores file type bitfields" do
header = MiniTarball::Header.new(name: "small_file", size: 536_870_913, mode: 0100664, **default_options)
subject.write(header)
expect(io.string).to eq(fixture("headers/small_file_header"))
end
end
end
| 37.526316 | 111 | 0.686536 |
395bb73a5b45acdb8708b67bec8e9e4be3598209 | 4,685 | # frozen_string_literal: true
##
# Report Sidekiq::Stats to prometheus on a defined interval
#
# Global Metrics reporting requires Sidekiq::Enterprise as it uses the leader
# election functionality to ensure that the global metrics are only reported by
# one worker.
#
# @see https://github.com/mperham/sidekiq/wiki/Ent-Leader-Election
# @see https://github.com/mperham/sidekiq/blob/master/lib/sidekiq/api.rb
class SidekiqPrometheus::PeriodicMetrics
# @return [Boolean] When +true+ will stop the reporting loop.
attr_accessor :done
# @return [Integer] Interval in seconds to record metrics. Default: [SidekiqPrometheus.periodic_reporting_interval]
attr_reader :interval
attr_reader :senate, :sidekiq_stats, :sidekiq_queue
GLOBAL_STATS = %i[failed processed retry_size dead_size scheduled_size workers_size].freeze
GC_STATS = {
counters: %i[major_gc_count minor_gc_count total_allocated_objects],
gauges: %i[heap_live_slots heap_free_slots],
}.freeze
REDIS_STATS = %w[connected_clients used_memory used_memory_peak].freeze
##
# Instance of SidekiqPrometheus::PeriodicMetrics
# @return [SidekiqPrometheus:PeriodicMetrics]
def self.reporter
@reporter ||= new
end
##
# @param interval [Integer] Interval in seconds to record metrics.
# @param sidekiq_stats [Sidekiq::Stats]
# @param senate [#leader?] Sidekiq::Senate
def initialize(interval: SidekiqPrometheus.periodic_reporting_interval, sidekiq_stats: Sidekiq::Stats, sidekiq_queue: Sidekiq::Queue, senate: nil)
self.done = false
@interval = interval
@sidekiq_stats = sidekiq_stats
@sidekiq_queue = sidekiq_queue
@senate = if senate.nil?
if Object.const_defined?('Sidekiq::Senate')
Sidekiq::Senate
else
Senate
end
else
senate
end
end
##
# Start the period mettric reporter
def start
Sidekiq.logger.info('SidekiqPrometheus: Starting periodic metrics reporting')
@thread = Thread.new(&method(:run))
end
##
## Stop the periodic metric reporter
def stop
self.done = true
end
##
# Record GC and RSS metrics
def report_gc_metrics
stats = GC.stat
GC_STATS[:counters].each do |stat|
SidekiqPrometheus["sidekiq_#{stat}"]&.increment(labels: {}, by: stats[stat])
end
GC_STATS[:gauges].each do |stat|
SidekiqPrometheus["sidekiq_#{stat}"]&.set(stats[stat], labels: {})
end
SidekiqPrometheus[:sidekiq_rss]&.set(rss, labels: {})
end
##
# Records Sidekiq global metrics
def report_global_metrics
current_stats = sidekiq_stats.new
GLOBAL_STATS.each do |stat|
SidekiqPrometheus["sidekiq_#{stat}"]&.set(current_stats.send(stat), labels: {})
end
sidekiq_queue.all.each do |queue|
SidekiqPrometheus[:sidekiq_enqueued]&.set(queue.size, labels: { queue: queue.name })
SidekiqPrometheus[:sidekiq_queue_latency]&.observe(queue.latency, labels: { queue: queue.name })
end
end
##
# Records metrics from Redis
def report_redis_metrics
redis_info = begin
Sidekiq.redis_info
rescue Redis::BaseConnectionError
nil
end
return if redis_info.nil?
REDIS_STATS.each do |stat|
SidekiqPrometheus["sidekiq_redis_#{stat}"]&.set(redis_info[stat].to_i, labels: {})
end
db_stats = redis_info.select { |k, _v| k.match(/^db/) }
db_stats.each do |db, stat|
label = { database: db }
values = stat.scan(/\d+/)
SidekiqPrometheus[:sidekiq_redis_keys]&.set(values[0].to_i, labels: label)
SidekiqPrometheus[:sidekiq_redis_expires]&.set(values[1].to_i, labels: label)
end
end
##
# Fetch rss from proc filesystem
# @see https://github.com/discourse/prometheus_exporter/blob/v0.3.3/lib/prometheus_exporter/instrumentation/process.rb#L39-L42
def rss
pid = Process.pid
@pagesize ||= `getconf PAGESIZE`.to_i rescue 4096
File.read("/proc/#{pid}/statm").split(' ')[1].to_i * @pagesize rescue 0
end
##
# Report metrics and sleep for @interval seconds in a loop.
# Runs until @done is true
def run
until done
begin
report_global_metrics if SidekiqPrometheus.global_metrics_enabled? && senate.leader?
report_redis_metrics if SidekiqPrometheus.global_metrics_enabled? && senate.leader?
report_gc_metrics if SidekiqPrometheus.gc_metrics_enabled?
rescue StandardError => e
Sidekiq.logger.error e
ensure
sleep interval
end
end
end
##
# Fake Senate class to guard against undefined constant errors.
# @private
class Senate
def leader?
false
end
end
end
| 30.032051 | 148 | 0.690928 |
6a41e10c4e8dd60de36a7ebd39ba5755a8afa666 | 5,499 | =begin
#DocuSign REST API
#The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
OpenAPI spec version: v2.1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.13-SNAPSHOT
=end
require 'date'
module DocuSign_eSign
class TemplateDocumentVisibilityList
#
attr_accessor :document_visibility
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'document_visibility' => :'documentVisibility'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'document_visibility' => :'Array<DocumentVisibility>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'documentVisibility')
if (value = attributes[:'documentVisibility']).is_a?(Array)
self.document_visibility = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
document_visibility == o.document_visibility
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[document_visibility].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = DocuSign_eSign.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.25 | 123 | 0.629023 |
6a399bf6c9b321e15605d0b5d5c37eeec0c97767 | 94 | module StatusPage
class Engine < ::Rails::Engine
isolate_namespace StatusPage
end
end
| 15.666667 | 32 | 0.755319 |
bb52c8f49e33d5bc16846c6e794312e3cbf91ad1 | 355 | require 'heroku-true-relic'
module HerokuTrueRelic
class Rails2
def self.add_middleware
if defined?(::Rails.configuration) && ::Rails.configuration.respond_to?(:middleware)
::Rails.configuration.middleware.insert_after 'Rack::Lock', HerokuTrueRelic::QueueTimeLogger
end
end
end
end
HerokuTrueRelic::Rails2.add_middleware
| 25.357143 | 100 | 0.746479 |
1c5db19d3bc2c85eec89d8d25aa816c53f6a1ad6 | 294 | # frozen_string_literal: true
require 'dry-struct'
module LoveLetterApplication
module Results
module Nodes
class CardPlayedNode < Dry::Struct
attribute :player_id, ::Types::Strict::Integer
attribute :card_id, ::Types::Strict::Integer
end
end
end
end
| 18.375 | 54 | 0.687075 |
ac65c34b0d24ab5f527f6047a894f4a7534913f4 | 6,053 | # frozen_string_literal: true
require "bundler/settings"
RSpec.describe Bundler::Env do
let(:git_proxy_stub) { Bundler::Source::Git::GitProxy.new(nil, nil, nil) }
describe "#report" do
it "prints the environment" do
out = described_class.report
expect(out).to include("Environment")
expect(out).to include(Bundler::VERSION)
expect(out).to include(Gem::VERSION)
expect(out).to include(described_class.send(:ruby_version))
expect(out).to include(described_class.send(:git_version))
expect(out).to include(OpenSSL::OPENSSL_VERSION)
end
describe "rubygems paths" do
it "prints gem home" do
with_clear_paths("GEM_HOME", "/a/b/c") do
out = described_class.report
expect(out).to include("Gem Home /a/b/c")
end
end
it "prints gem path" do
with_clear_paths("GEM_PATH", "/a/b/c#{File::PATH_SEPARATOR}d/e/f") do
out = described_class.report
expect(out).to include("Gem Path /a/b/c#{File::PATH_SEPARATOR}d/e/f")
end
end
it "prints user home" do
skip "needs to use a valid HOME" if Gem.win_platform? && RUBY_VERSION < "2.6.0"
with_clear_paths("HOME", "/a/b/c") do
out = described_class.report
expect(out).to include("User Home /a/b/c")
end
end
it "prints user path" do
skip "needs to use a valid HOME" if Gem.win_platform? && RUBY_VERSION < "2.6.0"
with_clear_paths("HOME", "/a/b/c") do
allow(File).to receive(:exist?)
allow(File).to receive(:exist?).with("/a/b/c/.gem").and_return(true)
out = described_class.report
expect(out).to include("User Path /a/b/c/.gem")
end
end
it "prints bin dir" do
with_clear_paths("GEM_HOME", "/a/b/c") do
out = described_class.report
expect(out).to include("Bin Dir /a/b/c/bin")
end
end
private
def with_clear_paths(env_var, env_value)
old_env_var = ENV[env_var]
ENV[env_var] = env_value
Gem.clear_paths
yield
ensure
ENV[env_var] = old_env_var
end
end
context "when there is a Gemfile and a lockfile and print_gemfile is true" do
before do
gemfile "gem 'rack', '1.0.0'"
lockfile <<-L
GEM
remote: #{file_uri_for(gem_repo1)}/
specs:
rack (1.0.0)
DEPENDENCIES
rack
BUNDLED WITH
1.10.0
L
allow(Bundler::SharedHelpers).to receive(:find_gemfile).and_return(bundled_app_gemfile)
end
let(:output) { described_class.report(:print_gemfile => true) }
it "prints the Gemfile" do
expect(output).to include("Gemfile")
expect(output).to include("'rack', '1.0.0'")
end
it "prints the lockfile" do
expect(output).to include("Gemfile.lock")
expect(output).to include("rack (1.0.0)")
end
end
context "when there no Gemfile and print_gemfile is true" do
let(:output) { described_class.report(:print_gemfile => true) }
it "prints the environment" do
expect(output).to start_with("## Environment")
end
end
context "when Gemfile contains a gemspec and print_gemspecs is true" do
let(:gemspec) do
strip_whitespace(<<-GEMSPEC)
Gem::Specification.new do |gem|
gem.name = "foo"
gem.author = "Fumofu"
end
GEMSPEC
end
before do
gemfile("gemspec")
File.open(bundled_app.join("foo.gemspec"), "wb") do |f|
f.write(gemspec)
end
allow(Bundler::SharedHelpers).to receive(:find_gemfile).and_return(bundled_app_gemfile)
end
it "prints the gemspec" do
output = described_class.report(:print_gemspecs => true)
expect(output).to include("foo.gemspec")
expect(output).to include(gemspec)
end
end
context "when eval_gemfile is used" do
it "prints all gemfiles" do
create_file bundled_app("other/Gemfile-other"), "gem 'rack'"
create_file bundled_app("other/Gemfile"), "eval_gemfile 'Gemfile-other'"
create_file bundled_app("Gemfile-alt"), <<-G
source "#{file_uri_for(gem_repo1)}"
eval_gemfile "other/Gemfile"
G
gemfile "eval_gemfile #{bundled_app("Gemfile-alt").to_s.dump}"
allow(Bundler::SharedHelpers).to receive(:find_gemfile).and_return(bundled_app_gemfile)
allow(Bundler::SharedHelpers).to receive(:pwd).and_return(bundled_app)
output = described_class.report(:print_gemspecs => true)
expect(output).to include(strip_whitespace(<<-ENV))
## Gemfile
### Gemfile
```ruby
eval_gemfile #{bundled_app("Gemfile-alt").to_s.dump}
```
### Gemfile-alt
```ruby
source "#{file_uri_for(gem_repo1)}"
eval_gemfile "other/Gemfile"
```
### other/Gemfile
```ruby
eval_gemfile 'Gemfile-other'
```
### other/Gemfile-other
```ruby
gem 'rack'
```
### Gemfile.lock
```
<No #{bundled_app_lock} found>
```
ENV
end
end
context "when the git version is OS specific" do
it "includes OS specific information with the version number" do
expect(git_proxy_stub).to receive(:git).with("--version").
and_return("git version 1.2.3 (Apple Git-BS)")
expect(Bundler::Source::Git::GitProxy).to receive(:new).and_return(git_proxy_stub)
expect(described_class.report).to include("Git 1.2.3 (Apple Git-BS)")
end
end
end
describe ".version_of" do
let(:parsed_version) { described_class.send(:version_of, "ruby") }
it "strips version of new line characters" do
expect(parsed_version).to_not end_with("\n")
end
end
end
| 28.551887 | 95 | 0.589295 |
387791bad1979389b4630d85f5273b5b51bfc1b3 | 69 | json.extract! @news, :id, :title, :content, :created_at, :updated_at
| 34.5 | 68 | 0.710145 |
f74c5b60f987e782438f85cee14be1d2fd79bda7 | 15,607 | require 'fastercsv'
class SubmissionsController < ApplicationController
include SubmissionsHelper
before_filter :authorize_only_for_admin, :except => [:populate_file_manager, :browse,
:index, :file_manager, :update_files,
:download, :populate_submissions_table, :collect_and_begin_grading,
:manually_collect_and_begin_grading, :repo_browser, :populate_repo_browser]
before_filter :authorize_for_ta_and_admin, :only => [:browse, :index, :populate_submissions_table, :collect_and_begin_grading,
:manually_collect_and_begin_garding, :repo_browser, :populate_repo_browser]
def repo_browser
@grouping = Grouping.find(params[:id])
@assignment = @grouping.assignment
@path = params[:path] || '/'
@previous_path = File.split(@path).first
@repository_name = @grouping.group.repository_name
begin
if !params[:revision_timestamp].nil?
@revision_number = @grouping.group.repo.get_revision_by_timestamp(Time.parse(params[:revision_timestamp])).revision_number
else
@revision_number = params[:revision_number] || @grouping.group.repo.get_latest_revision.revision_number
end
@revision_timestamp = @grouping.group.repo.get_latest_revision.timestamp
rescue Exception => e
flash[:error] = e.message
end
end
def populate_repo_browser
@grouping = Grouping.find(params[:id])
@assignment = @grouping.assignment
@path = params[:path] || '/'
@revision_number = params[:revision_number]
@previous_path = File.split(@path).first
repo = @grouping.group.repo
begin
@revision = repo.get_revision(params[:revision_number].to_i)
@directories = @revision.directories_at_path(File.join(@assignment.repository_folder, @path))
@files = @revision.files_at_path(File.join(@assignment.repository_folder, @path))
rescue Exception => @find_revision_error
render :"repo_browser/find_revision_error"
return
end
@table_rows = {}
@files.sort.each do |file_name, file|
@table_rows[file.id] = construct_repo_browser_table_row(file_name, file)
end
@directories.sort.each do |directory_name, directory|
@table_rows[directory.id] = construct_repo_browser_directory_table_row(directory_name, directory)
end
render :"repo_browser/populate_repo_browser"
end
def file_manager
@assignment = Assignment.find(params[:id])
@grouping = current_user.accepted_grouping_for(@assignment.id)
if @grouping.nil?
redirect_to :controller => 'assignments', :action => 'student_interface', :id => params[:id]
return
end
user_group = @grouping.group
@path = params[:path] || '/'
repo = user_group.repo
@revision = repo.get_latest_revision
@files = @revision.files_at_path(File.join(@assignment.repository_folder, @path))
@missing_assignment_files = []
@assignment.assignment_files.each do |assignment_file|
if [email protected]_exists?(File.join(@assignment.repository_folder,
assignment_file.filename))
@missing_assignment_files.push(assignment_file)
end
end
end
def populate_file_manager
@assignment = Assignment.find(params[:id])
@grouping = current_user.accepted_grouping_for(@assignment.id)
user_group = @grouping.group
revision_number= params[:revision_number]
@path = params[:path] || '/'
@previous_path = File.split(@path).first
repo = user_group.repo
if revision_number.nil?
@revision = repo.get_latest_revision
else
@revision = repo.get_revision(revision_number.to_i)
end
@directories = @revision.directories_at_path(File.join(@assignment.repository_folder, @path))
@files = @revision.files_at_path(File.join(@assignment.repository_folder, @path))
@table_rows = {}
@files.sort.each do |file_name, file|
@table_rows[file.id] = construct_file_manager_table_row(file_name, file)
end
if @grouping.group.repository_external_commits_only?
@directories.sort.each do |directory_name, directory|
@table_rows[directory.id] = construct_file_manager_dir_table_row(directory_name, directory)
end
end
render :file_manager_populate
end
def manually_collect_and_begin_grading
grouping = Grouping.find(params[:id])
assignment = grouping.assignment
revision_number = params[:current_revision_number].to_i
new_submission = Submission.create_by_revision_number(grouping, revision_number)
new_submission = assignment.submission_rule.apply_submission_rule(new_submission)
result = new_submission.result
redirect_to :controller => 'results', :action => 'edit', :id => result.id
end
def collect_and_begin_grading
assignment = Assignment.find(params[:id])
grouping = Grouping.find(params[:grouping_id])
if !assignment.submission_rule.can_collect_now?
flash[:error] = "Could not collect submission for group #{grouping.group.group_name} - the collection date has not been reached yet."
else
time = assignment.submission_rule.calculate_collection_time.localtime
# Create a new Submission by timestamp.
# A Result is automatically attached to this Submission, thanks to some callback
# logic inside the Submission model
new_submission = Submission.create_by_timestamp(grouping, time)
# Apply the SubmissionRule
new_submission = assignment.submission_rule.apply_submission_rule(new_submission)
result = new_submission.result
redirect_to :controller => 'results', :action => 'edit', :id => result.id
return
end
redirect_to :action => 'browse', :id => assignment.id
end
def populate_submissions_table
assignment = Assignment.find(params[:id], :include => [{:groupings => [{:student_memberships => :user, :ta_memberships => :user}, :accepted_students, :group, {:submissions => :result}]}, {:submission_rule => :periods}])
@details = params[:details]
# If the current user is a TA, then we need to get the Groupings
# that are assigned for them to mark. If they're an Admin, then
# we need to give them a list of all Groupings for this Assignment.
if current_user.ta?
groupings = []
assignment.ta_memberships.find_all_by_user_id(current_user.id).each do |membership|
groupings.push(membership.grouping)
end
elsif current_user.admin?
groupings = assignment.groupings
end
@table_rows = {}
groupings.each do |grouping|
@table_rows[grouping.id] = construct_submissions_table_row(grouping, assignment)
end
render :submission_table_populate
end
def browse
@assignment = Assignment.find(params[:id])
@details = params[:details]
end
def index
@assignments = Assignment.all(:order => :id)
render :index, :layout => 'sidebar'
end
# controller handles transactional submission of files
def update_files
assignment_id = params[:id]
assignment = Assignment.find(assignment_id)
path = params[:path] || '/'
grouping = current_user.accepted_grouping_for(assignment_id)
if !grouping.is_valid?
redirect_to :action => :file_manager, :id => assignment_id
return
end
repo = grouping.group.repo
assignment_folder = File.join(assignment.repository_folder, path)
# Get the revision numbers for the files that we've seen - these
# values will be the "expected revision numbers" that we'll provide
# to the transaction to ensure that we don't overwrite a file that's
# been revised since the user last saw it.
file_revisions = params[:file_revisions].nil? ? [] : params[:file_revisions]
# The files that will be replaced - just give an empty array
# if params[:replace_files] is nil
replace_files = params[:replace_files].nil? ? {} : params[:replace_files]
# The files that will be deleted
delete_files = params[:delete_files].nil? ? {} : params[:delete_files]
# The files that will be added
new_files = params[:new_files].nil? ? {} : params[:new_files]
# Create transaction, setting the author. Timestamp is implicit.
txn = repo.get_transaction(current_user.user_name)
begin
# delete files marked for deletion
delete_files.keys.each do |filename|
txn.remove(File.join(assignment_folder, filename), file_revisions[filename])
end
# Replace files
replace_files.each do |filename, file_object|
txn.replace(File.join(assignment_folder, filename), file_object.read, file_object.content_type, file_revisions[filename])
end
# Add new files
new_files.each do |file_object|
# sanitize_file_name in SubmissionsHelper
if file_object.original_filename.nil?
raise "Invalid file name on submitted file"
end
txn.add(File.join(assignment_folder, sanitize_file_name(file_object.original_filename)), file_object.read, file_object.content_type)
end
# finish transaction
if !txn.has_jobs?
flash[:transaction_warning] = "No actions were detected in the last submit. Nothing was changed."
redirect_to :action => "file_manager", :id => assignment_id
return
end
if !repo.commit(txn)
flash[:update_conflicts] = txn.conflicts
end
# Are we past collection time?
if assignment.submission_rule.can_collect_now?
flash[:commit_notice] = assignment.submission_rule.commit_after_collection_message(grouping)
end
redirect_to :action => "file_manager", :id => assignment_id
rescue Exception => e
flash[:commit_error] = e.message
redirect_to :action => "file_manager", :id => assignment_id
end
end
def download
@assignment = Assignment.find(params[:id])
# find_appropriate_grouping can be found in SubmissionsHelper
@grouping = find_appropriate_grouping(@assignment.id, params)
revision_number = params[:revision_number]
path = params[:path] || '/'
repo = @grouping.group.repo
if revision_number.nil?
@revision = repo.get_latest_revision
else
@revision = repo.get_revision(revision_number.to_i)
end
begin
file = @revision.files_at_path(File.join(@assignment.repository_folder, path))[params[:file_name]]
file_contents = repo.download_as_string(file)
rescue Exception => e
render :text => "Could not download #{params[:file_name]}: #{e.message}. File may be missing."
return
end
if SubmissionFile.is_binary?(file_contents)
# If the file appears to be binary, send it as a download
send_data file_contents, :disposition => 'attachment', :filename => params[:file_name]
else
# Otherwise, blast it out to the screen
render :text => file_contents, :layout => 'sanitized_html'
end
end
def update_submissions
return unless request.post?
if params[:groupings].nil?
flash[:release_results] = "Select a group"
else
if params[:release_results]
flash[:release_errors] = []
params[:groupings].each do |grouping_id|
grouping = Grouping.find(grouping_id)
if !grouping.has_submission?
# TODO: Neaten this up...
flash[:release_errors].push("#{grouping.group.group_name} had no submission")
next
end
submission = grouping.get_submission_used
if !submission.has_result?
# TODO: Neaten this up...
flash[:release_errors].push("#{grouping.group.group_name} had no result")
next
end
if submission.result.marking_state != Result::MARKING_STATES[:complete]
flash[:release_errors].push(I18n.t("marking_state.not_complete", :group_name => grouping.group.group_name))
next
end
if flash[:release_errors].nil? or flash[:release_errors].size == 0
flash[:release_errors] = nil
end
submission.result.released_to_students = true
submission.result.save
end
elsif params[:unrelease_results]
params[:groupings].each do |g|
grouping = Grouping.find(g)
grouping.get_submission_used.result.unrelease_results
end
end
end
redirect_to :action => 'browse', :id => params[:id]
if !params[:groupings].nil?
grouping = Grouping.find(params[:groupings].first)
grouping.assignment.set_results_average
end
end
def unrelease
return unless request.post?
if params[:groupings].nil?
flash[:release_results] = "Select a group"
else
params[:groupings].each do |g|
g.unrelease_results
end
end
redirect_to :action => 'browse', :id => params[:id]
end
def download_simple_csv_report
assignment = Assignment.find(params[:id])
students = Student.all
csv_string = FasterCSV.generate do |csv|
students.each do |student|
final_result = []
final_result.push(student.user_name)
grouping = student.accepted_grouping_for(assignment.id)
if grouping.nil? || !grouping.has_submission?
final_result.push('')
else
submission = grouping.get_submission_used
final_result.push(submission.result.total_mark)
end
csv << final_result
end
end
send_data csv_string, :disposition => 'attachment', :type => 'application/vnd.ms-excel', :filename => "#{assignment.short_identifier} simple report.csv"
end
def download_detailed_csv_report
assignment = Assignment.find(params[:id])
students = Student.all
rubric_criteria = assignment.rubric_criteria
csv_string = FasterCSV.generate do |csv|
students.each do |student|
final_result = []
final_result.push(student.user_name)
grouping = student.accepted_grouping_for(assignment.id)
if grouping.nil? || !grouping.has_submission?
final_result.push('')
rubric_criteria.each do |rubric_criterion|
final_result.push('')
final_result.push(rubric_criterion.weight)
end
final_result.push('')
final_result.push('')
final_result.push(0)
else
submission = grouping.get_submission_used
final_result.push(submission.result.total_mark)
rubric_criteria.each do |rubric_criterion|
mark = submission.result.marks.find_by_rubric_criterion_id(rubric_criterion.id)
if mark.nil?
final_result.push('')
else
final_result.push(mark.mark || '')
end
final_result.push(rubric_criterion.weight)
end
final_result.push(submission.result.get_total_extra_points)
final_result.push(submission.result.get_total_extra_percentage)
membership = grouping.student_memberships.find_by_user_id(student.id)
grace_period_deductions = student.grace_period_deductions.find_by_membership_id(membership.id)
final_result.push(grace_period_deductions || 0)
end
csv << final_result
end
end
send_data csv_string, :disposition => 'attachment', :type => 'application/vnd.ms-excel', :filename => "#{assignment.short_identifier} detailed report.csv"
end
# See Assignment.get_svn_commands for details
def download_svn_export_commands
assignment = Assignment.find(params[:id])
string = assignment.get_svn_commands
send_data string, :disposition => 'attachment', :type => 'text/plain', :filename => "#{assignment.short_identifier}_svn_exports"
end
end
| 37.973236 | 223 | 0.689691 |
f7d96a2518647d62036efe034fdc29c84031bdd1 | 117 | module Api
class PxeImagesController < BaseController
include Subcollections::CustomizationTemplates
end
end
| 19.5 | 50 | 0.820513 |
1ae5ef62cbe7ac77e18e38f219602f93a02d613f | 594 | Pod::Spec.new do |s|
s.name = "GCMFormTableView"
s.version = "0.2.5"
s.summary = "Form building framework."
s.homepage = "https://github.com/gamechanger/GCMFormTableView"
s.author = { "Jerry Hsu" => "[email protected]" }
s.source = { :git => "https://github.com/gamechanger/GCMFormTableView.git", :tag => "0.2.5" }
s.source_files = "GCMFormTableView/GCMFormTableView/*.{h,m}"
s.license = { :type => 'MIT', :type => 'LICENSE' }
s.requires_arc = true
s.ios.deployment_target = '6.0'
s.dependency 'GCMCocoaExtensions', '~> 0.2.0'
end
| 42.428571 | 101 | 0.619529 |
38d83b11d16a42b45d562fdff38d3012f059231a | 702 | require_relative 'template_creator'
require_relative 'pdf_config_creator'
module Bookbinder
module Subnav
class SubnavGeneratorFactory
def initialize(fs, output_locations)
@fs = fs
@output_locations = output_locations
end
def produce(json_generator)
SubnavGenerator.new(json_generator, template_creator, pdf_config_creator, output_locations)
end
attr_reader :fs, :output_locations
private
def template_creator
@template_creator ||= TemplateCreator.new(fs, output_locations)
end
def pdf_config_creator
@pdf_config_creator ||= PdfConfigCreator.new(fs, output_locations)
end
end
end
end
| 23.4 | 99 | 0.709402 |
627e9cd2546ea09ff4b0b7081ea5a1c81961487d | 3,126 | require 'xmlrpc/client'
require 'cipisek/response'
require 'cipisek/response/client_get'
require 'cipisek/response/campaigns_list'
require 'cipisek/response/groups_list'
require 'cipisek/response/groups_stats'
require 'cipisek/response/keywords_list'
require 'cipisek/response/keywords_stats'
# :(
XMLRPC::Config.module_eval {remove_const(:ENABLE_NIL_PARSER)}
XMLRPC::Config.const_set(:ENABLE_NIL_PARSER, true)
module Cipisek
class Error < Exception; end;
class Client
attr_accessor :client, :logged, :session, :login, :password
def initialize(login, password, params = {})
self.client = XMLRPC::Client.new3(default_connect_params.merge(params))
self.login = login
self.password = password
end
def connect
return if logged?
response = client.call("client.login", self.login, self.password)
self.session = response["session"]
end
def call(method, klass = Cipisek::Response, *args)
connect unless logged?
args[0] ||= {}
args[0].merge!(session: self.session)
response_hash = self.client.call(method, *args)
response = klass.new(response_hash)
self.session = response.session if response.session
if response.status == 200
response
else
raise Cipisek::Error.new("#{response.status} #{response.status_message}\n #{response.diagnostics}")
end
end
def client_get
self.call('client.get', Cipisek::Response::ClientGet)
end
def campaigns_list(user_id = nil)
self.call('campaigns.list', Cipisek::Response::CampaignsList, userId: user_id)
end
def groups_list(user_id = nil, campaign_ids = [])
self.call('groups.list', Cipisek::Response::GroupsList, {userId: user_id}, {campaignIds: campaign_ids})
end
def groups_stats(user_id = nil, group_ids = [], date_from = nil, date_to = nil, granularity = nil)
date_from = XMLRPC::DateTime.new(date_from.year, date_from.month, date_from.day, 0, 0, 0)
date_to = XMLRPC::DateTime.new(date_to.year, date_to.month, date_to.day, 0, 0, 0)
self.call('groups.stats', Cipisek::Response::GroupsStats, {userId: user_id}, group_ids, {dateFrom: date_from, dateTo: date_to, granularity: granularity})
end
def keywords_list(user_id = nil, group_ids = [], only_id = false)
self.call('keywords.list', Cipisek::Response::KeywordsList, {userId: user_id}, {groupIds: group_ids}, {onlyId: only_id})
end
def keywords_stats(user_id = nil, keyword_ids = [], date_from = nil, date_to = nil, granularity = 'daily')
date_from = XMLRPC::DateTime.new(date_from.year, date_from.month, date_from.day, 0, 0, 0)
date_to = XMLRPC::DateTime.new(date_to.year, date_to.month, date_to.day, 0, 0, 0)
self.call('keywords.stats', Cipisek::Response::KeywordsStats, {userId: user_id}, keyword_ids, {dateFrom: date_from, dateTo: date_to, granularity: granularity})
end
def logged?
!!self.session
end
def default_connect_params
{
host: 'api.sklik.cz',
path: '/cipisek/RPC2',
port: 443,
use_ssl: true
}
end
end
end
| 34.351648 | 165 | 0.683621 |
62d412de8c6ad6244b1dffae2909df2f8b37492f | 51,757 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module ClouduseraccountsBeta
# Cloud User Accounts API
#
# API for the Google Cloud User Accounts service.
#
# @example
# require 'google/apis/clouduseraccounts_beta'
#
# Clouduseraccounts = Google::Apis::ClouduseraccountsBeta # Alias the module
# service = Clouduseraccounts::CloudUserAccountsService.new
#
# @see https://cloud.google.com/compute/docs/access/user-accounts/api/latest/
class CloudUserAccountsService < Google::Apis::Core::BaseService
# @return [String]
# API key. Your API key identifies your project and provides you with API access,
# quota, and reports. Required unless you provide an OAuth 2.0 token.
attr_accessor :key
# @return [String]
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
attr_accessor :quota_user
# @return [String]
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
attr_accessor :user_ip
def initialize
super('https://www.googleapis.com/', 'clouduseraccounts/beta/projects/')
end
# Deletes the specified operation resource.
# @param [String] project
# Project ID for this request.
# @param [String] operation
# Name of the Operations resource to delete.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [NilClass] No result returned for this method
# @yieldparam err [StandardError] error object if request failed
#
# @return [void]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def delete_global_accounts_operation(project, operation, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:delete, '{project}/global/operations/{operation}', options)
command.params['project'] = project unless project.nil?
command.params['operation'] = operation unless operation.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Retrieves the specified operation resource.
# @param [String] project
# Project ID for this request.
# @param [String] operation
# Name of the Operations resource to return.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_global_accounts_operation(project, operation, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:get, '{project}/global/operations/{operation}', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::Operation::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::Operation
command.params['project'] = project unless project.nil?
command.params['operation'] = operation unless operation.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Retrieves the list of operation resources contained within the specified
# project.
# @param [String] project
# Project ID for this request.
# @param [String] filter
# Sets a filter expression for filtering listed resources, in the form filter=`
# expression`. Your `expression` must be in the format: FIELD_NAME
# COMPARISON_STRING LITERAL_STRING.
# The FIELD_NAME is the name of the field you want to compare. Only atomic field
# types are supported (string, number, boolean). The COMPARISON_STRING must be
# either eq (equals) or ne (not equals). The LITERAL_STRING is the string value
# to filter to. The literal value must be valid for the type of field (string,
# number, boolean). For string fields, the literal value is interpreted as a
# regular expression using RE2 syntax. The literal value must match the entire
# field.
# For example, filter=name ne example-instance.
# @param [Fixnum] max_results
# Maximum count of results to be returned.
# @param [String] order_by
# Sorts list results by a certain order. By default, results are returned in
# alphanumerical order based on the resource name.
# You can also sort results in descending order based on the creation timestamp
# using orderBy="creationTimestamp desc". This sorts results based on the
# creationTimestamp field in reverse chronological order (newest result first).
# Use this to sort resources like operations so that the newest operation is
# returned first.
# Currently, only sorting by name or creationTimestamp desc is supported.
# @param [String] page_token
# Specifies a page token to use. Use this parameter if you want to list the next
# page of results. Set pageToken to the nextPageToken returned by a previous
# list request.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::OperationList] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::OperationList]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def list_global_accounts_operations(project, filter: nil, max_results: nil, order_by: nil, page_token: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:get, '{project}/global/operations', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::OperationList::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::OperationList
command.params['project'] = project unless project.nil?
command.query['filter'] = filter unless filter.nil?
command.query['maxResults'] = max_results unless max_results.nil?
command.query['orderBy'] = order_by unless order_by.nil?
command.query['pageToken'] = page_token unless page_token.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Adds users to the specified group.
# @param [String] project
# Project ID for this request.
# @param [String] group_name
# Name of the group for this request.
# @param [Google::Apis::ClouduseraccountsBeta::GroupsAddMemberRequest] groups_add_member_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def add_group_member(project, group_name, groups_add_member_request_object = nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:post, '{project}/global/groups/{groupName}/addMember', options)
command.request_representation = Google::Apis::ClouduseraccountsBeta::GroupsAddMemberRequest::Representation
command.request_object = groups_add_member_request_object
command.response_representation = Google::Apis::ClouduseraccountsBeta::Operation::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::Operation
command.params['project'] = project unless project.nil?
command.params['groupName'] = group_name unless group_name.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Deletes the specified Group resource.
# @param [String] project
# Project ID for this request.
# @param [String] group_name
# Name of the Group resource to delete.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def delete_group(project, group_name, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:delete, '{project}/global/groups/{groupName}', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::Operation::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::Operation
command.params['project'] = project unless project.nil?
command.params['groupName'] = group_name unless group_name.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Returns the specified Group resource.
# @param [String] project
# Project ID for this request.
# @param [String] group_name
# Name of the Group resource to return.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::Group] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::Group]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_group(project, group_name, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:get, '{project}/global/groups/{groupName}', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::Group::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::Group
command.params['project'] = project unless project.nil?
command.params['groupName'] = group_name unless group_name.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Creates a Group resource in the specified project using the data included in
# the request.
# @param [String] project
# Project ID for this request.
# @param [Google::Apis::ClouduseraccountsBeta::Group] group_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def insert_group(project, group_object = nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:post, '{project}/global/groups', options)
command.request_representation = Google::Apis::ClouduseraccountsBeta::Group::Representation
command.request_object = group_object
command.response_representation = Google::Apis::ClouduseraccountsBeta::Operation::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::Operation
command.params['project'] = project unless project.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Retrieves the list of groups contained within the specified project.
# @param [String] project
# Project ID for this request.
# @param [String] filter
# Sets a filter expression for filtering listed resources, in the form filter=`
# expression`. Your `expression` must be in the format: FIELD_NAME
# COMPARISON_STRING LITERAL_STRING.
# The FIELD_NAME is the name of the field you want to compare. Only atomic field
# types are supported (string, number, boolean). The COMPARISON_STRING must be
# either eq (equals) or ne (not equals). The LITERAL_STRING is the string value
# to filter to. The literal value must be valid for the type of field (string,
# number, boolean). For string fields, the literal value is interpreted as a
# regular expression using RE2 syntax. The literal value must match the entire
# field.
# For example, filter=name ne example-instance.
# @param [Fixnum] max_results
# Maximum count of results to be returned.
# @param [String] order_by
# Sorts list results by a certain order. By default, results are returned in
# alphanumerical order based on the resource name.
# You can also sort results in descending order based on the creation timestamp
# using orderBy="creationTimestamp desc". This sorts results based on the
# creationTimestamp field in reverse chronological order (newest result first).
# Use this to sort resources like operations so that the newest operation is
# returned first.
# Currently, only sorting by name or creationTimestamp desc is supported.
# @param [String] page_token
# Specifies a page token to use. Use this parameter if you want to list the next
# page of results. Set pageToken to the nextPageToken returned by a previous
# list request.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::GroupList] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::GroupList]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def list_groups(project, filter: nil, max_results: nil, order_by: nil, page_token: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:get, '{project}/global/groups', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::GroupList::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::GroupList
command.params['project'] = project unless project.nil?
command.query['filter'] = filter unless filter.nil?
command.query['maxResults'] = max_results unless max_results.nil?
command.query['orderBy'] = order_by unless order_by.nil?
command.query['pageToken'] = page_token unless page_token.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Removes users from the specified group.
# @param [String] project
# Project ID for this request.
# @param [String] group_name
# Name of the group for this request.
# @param [Google::Apis::ClouduseraccountsBeta::GroupsRemoveMemberRequest] groups_remove_member_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def remove_group_member(project, group_name, groups_remove_member_request_object = nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:post, '{project}/global/groups/{groupName}/removeMember', options)
command.request_representation = Google::Apis::ClouduseraccountsBeta::GroupsRemoveMemberRequest::Representation
command.request_object = groups_remove_member_request_object
command.response_representation = Google::Apis::ClouduseraccountsBeta::Operation::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::Operation
command.params['project'] = project unless project.nil?
command.params['groupName'] = group_name unless group_name.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Returns a list of authorized public keys for a specific user account.
# @param [String] project
# Project ID for this request.
# @param [String] zone
# Name of the zone for this request.
# @param [String] user
# The user account for which you want to get a list of authorized public keys.
# @param [String] instance
# The fully-qualified URL of the virtual machine requesting the view.
# @param [Boolean] login
# Whether the view was requested as part of a user-initiated login.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::LinuxGetAuthorizedKeysViewResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::LinuxGetAuthorizedKeysViewResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_linux_authorized_keys_view(project, zone, user, instance, login: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:post, '{project}/zones/{zone}/authorizedKeysView/{user}', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::LinuxGetAuthorizedKeysViewResponse::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::LinuxGetAuthorizedKeysViewResponse
command.params['project'] = project unless project.nil?
command.params['zone'] = zone unless zone.nil?
command.params['user'] = user unless user.nil?
command.query['instance'] = instance unless instance.nil?
command.query['login'] = login unless login.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Retrieves a list of user accounts for an instance within a specific project.
# @param [String] project
# Project ID for this request.
# @param [String] zone
# Name of the zone for this request.
# @param [String] instance
# The fully-qualified URL of the virtual machine requesting the views.
# @param [String] filter
# Sets a filter expression for filtering listed resources, in the form filter=`
# expression`. Your `expression` must be in the format: FIELD_NAME
# COMPARISON_STRING LITERAL_STRING.
# The FIELD_NAME is the name of the field you want to compare. Only atomic field
# types are supported (string, number, boolean). The COMPARISON_STRING must be
# either eq (equals) or ne (not equals). The LITERAL_STRING is the string value
# to filter to. The literal value must be valid for the type of field (string,
# number, boolean). For string fields, the literal value is interpreted as a
# regular expression using RE2 syntax. The literal value must match the entire
# field.
# For example, filter=name ne example-instance.
# @param [Fixnum] max_results
# Maximum count of results to be returned.
# @param [String] order_by
# Sorts list results by a certain order. By default, results are returned in
# alphanumerical order based on the resource name.
# You can also sort results in descending order based on the creation timestamp
# using orderBy="creationTimestamp desc". This sorts results based on the
# creationTimestamp field in reverse chronological order (newest result first).
# Use this to sort resources like operations so that the newest operation is
# returned first.
# Currently, only sorting by name or creationTimestamp desc is supported.
# @param [String] page_token
# Specifies a page token to use. Use this parameter if you want to list the next
# page of results. Set pageToken to the nextPageToken returned by a previous
# list request.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::LinuxGetLinuxAccountViewsResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::LinuxGetLinuxAccountViewsResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_linux_linux_account_views(project, zone, instance, filter: nil, max_results: nil, order_by: nil, page_token: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:post, '{project}/zones/{zone}/linuxAccountViews', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::LinuxGetLinuxAccountViewsResponse::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::LinuxGetLinuxAccountViewsResponse
command.params['project'] = project unless project.nil?
command.params['zone'] = zone unless zone.nil?
command.query['filter'] = filter unless filter.nil?
command.query['instance'] = instance unless instance.nil?
command.query['maxResults'] = max_results unless max_results.nil?
command.query['orderBy'] = order_by unless order_by.nil?
command.query['pageToken'] = page_token unless page_token.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Adds a public key to the specified User resource with the data included in the
# request.
# @param [String] project
# Project ID for this request.
# @param [String] user
# Name of the user for this request.
# @param [Google::Apis::ClouduseraccountsBeta::PublicKey] public_key_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def add_user_public_key(project, user, public_key_object = nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:post, '{project}/global/users/{user}/addPublicKey', options)
command.request_representation = Google::Apis::ClouduseraccountsBeta::PublicKey::Representation
command.request_object = public_key_object
command.response_representation = Google::Apis::ClouduseraccountsBeta::Operation::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::Operation
command.params['project'] = project unless project.nil?
command.params['user'] = user unless user.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Deletes the specified User resource.
# @param [String] project
# Project ID for this request.
# @param [String] user
# Name of the user resource to delete.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def delete_user(project, user, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:delete, '{project}/global/users/{user}', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::Operation::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::Operation
command.params['project'] = project unless project.nil?
command.params['user'] = user unless user.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Returns the specified User resource.
# @param [String] project
# Project ID for this request.
# @param [String] user
# Name of the user resource to return.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::User] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::User]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_user(project, user, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:get, '{project}/global/users/{user}', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::User::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::User
command.params['project'] = project unless project.nil?
command.params['user'] = user unless user.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Creates a User resource in the specified project using the data included in
# the request.
# @param [String] project
# Project ID for this request.
# @param [Google::Apis::ClouduseraccountsBeta::User] user_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def insert_user(project, user_object = nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:post, '{project}/global/users', options)
command.request_representation = Google::Apis::ClouduseraccountsBeta::User::Representation
command.request_object = user_object
command.response_representation = Google::Apis::ClouduseraccountsBeta::Operation::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::Operation
command.params['project'] = project unless project.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Retrieves a list of users contained within the specified project.
# @param [String] project
# Project ID for this request.
# @param [String] filter
# Sets a filter expression for filtering listed resources, in the form filter=`
# expression`. Your `expression` must be in the format: FIELD_NAME
# COMPARISON_STRING LITERAL_STRING.
# The FIELD_NAME is the name of the field you want to compare. Only atomic field
# types are supported (string, number, boolean). The COMPARISON_STRING must be
# either eq (equals) or ne (not equals). The LITERAL_STRING is the string value
# to filter to. The literal value must be valid for the type of field (string,
# number, boolean). For string fields, the literal value is interpreted as a
# regular expression using RE2 syntax. The literal value must match the entire
# field.
# For example, filter=name ne example-instance.
# @param [Fixnum] max_results
# Maximum count of results to be returned.
# @param [String] order_by
# Sorts list results by a certain order. By default, results are returned in
# alphanumerical order based on the resource name.
# You can also sort results in descending order based on the creation timestamp
# using orderBy="creationTimestamp desc". This sorts results based on the
# creationTimestamp field in reverse chronological order (newest result first).
# Use this to sort resources like operations so that the newest operation is
# returned first.
# Currently, only sorting by name or creationTimestamp desc is supported.
# @param [String] page_token
# Specifies a page token to use. Use this parameter if you want to list the next
# page of results. Set pageToken to the nextPageToken returned by a previous
# list request.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::UserList] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::UserList]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def list_users(project, filter: nil, max_results: nil, order_by: nil, page_token: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:get, '{project}/global/users', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::UserList::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::UserList
command.params['project'] = project unless project.nil?
command.query['filter'] = filter unless filter.nil?
command.query['maxResults'] = max_results unless max_results.nil?
command.query['orderBy'] = order_by unless order_by.nil?
command.query['pageToken'] = page_token unless page_token.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
# Removes the specified public key from the user.
# @param [String] project
# Project ID for this request.
# @param [String] user
# Name of the user for this request.
# @param [String] fingerprint
# The fingerprint of the public key to delete. Public keys are identified by
# their fingerprint, which is defined by RFC4716 to be the MD5 digest of the
# public key.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# Overrides userIp if both are provided.
# @param [String] user_ip
# IP address of the site where the request originates. Use this if you want to
# enforce per-user limits.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::ClouduseraccountsBeta::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::ClouduseraccountsBeta::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def remove_user_public_key(project, user, fingerprint, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
command = make_simple_command(:post, '{project}/global/users/{user}/removePublicKey', options)
command.response_representation = Google::Apis::ClouduseraccountsBeta::Operation::Representation
command.response_class = Google::Apis::ClouduseraccountsBeta::Operation
command.params['project'] = project unless project.nil?
command.params['user'] = user unless user.nil?
command.query['fingerprint'] = fingerprint unless fingerprint.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
execute_or_queue_command(command, &block)
end
protected
def apply_command_defaults(command)
command.query['key'] = key unless key.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
command.query['userIp'] = user_ip unless user_ip.nil?
end
end
end
end
end
| 60.534503 | 195 | 0.668798 |
bb3c66cbb609290f0c0a214bf5e6186929ebaf58 | 312 | ENV['SINATRA_ENV'] ||= "development"
require 'bundler/setup'
Bundler.require(:default, ENV['SINATRA_ENV'])
ActiveRecord::Base.establish_connection(
:adapter => "sqlite3",
:database => "db/#{ENV['SINATRA_ENV']}.sqlite"
)
require 'date'
require './app/controllers/application_controller'
require_all 'app'
| 20.8 | 50 | 0.727564 |
5d55834093fb271c6644ae6f7338228d7731e38b | 4,585 | require 'nokogiri'
module DocxTemplater
class TemplateProcessor
attr_reader :data, :escape_html, :skip_unmatched
# data is expected to be a hash of symbols => string or arrays of hashes.
def initialize(data, escape_html = true, skip_unmatched: false)
@data = data
@escape_html = escape_html
@skip_unmatched = skip_unmatched
end
def render(document)
document.force_encoding(Encoding::UTF_8) if document.respond_to?(:force_encoding)
data.each do |key, value|
case value
when Array
document = enter_multiple_values(document, key, data[key])
document.gsub!("#SUM:#{key.to_s.upcase}#", value.count.to_s)
when TrueClass, FalseClass
document = enter_boolean_values(document, value, key)
else
document.gsub!("$#{key.to_s.upcase}$", safe(value))
end
end
document
end
private
def enter_boolean_values doc, value, key
else_condition_present = doc.match("#ELSE:#{key.to_s.upcase}#")
if value
doc.gsub!(/\#ELSE:#{key.to_s.upcase}\#.*?\#ENDIF:#{key.to_s.upcase}\#/m, '') if else_condition_present
else
doc.gsub!(/\#IF:#{key.to_s.upcase}\#.*?\#ELSE:#{key.to_s.upcase}\#/m, '') if else_condition_present
end
doc.gsub!(/\#(ENDIF|ELSE|IF):#{key.to_s.upcase}\#/, '')
doc
end
def safe(text)
if escape_html
text.to_s.gsub('&', '&').gsub('>', '>').gsub('<', '<')
else
text.to_s
end
end
def enter_multiple_values xml, key, values
xml = Nokogiri::XML(xml)
begin_row = "#BEGIN_ROW:#{key.to_s.upcase}#"
end_row = "#END_ROW:#{key.to_s.upcase}#"
begin_row_template = xml.xpath("//w:tr[contains(., '#{begin_row}')]", xml.root.namespaces).first
end_row_template = xml.xpath("//w:tr[contains(., '#{end_row}')]", xml.root.namespaces).first
DocxTemplater.log("begin_row_template: #{begin_row_template}")
DocxTemplater.log("end_row_template: #{end_row_template}")
unless begin_row_template && end_row_template
return as_result(xml) if @skip_unmatched
raise "unmatched template markers: #{begin_row} nil: #{begin_row_template.nil?}, #{end_row} nil: #{end_row_template.nil?}. This could be because word broke up tags with it's own xml entries. See README."
end
row_templates = []
row = begin_row_template.next_sibling
while row != end_row_template
row_templates.unshift(row)
row = row.next_sibling
end
DocxTemplater.log("row_templates: (#{row_templates.count}) #{row_templates.map(&:to_s).inspect}")
# for each data, reversed so they come out in the right order
values.reverse_each do |data|
DocxTemplater.log("each_data: #{data.inspect}")
rt = row_templates.map(&:dup)
each_data = {}
data.each do |k, v|
if v.is_a? Array
doc = Nokogiri::XML::Document.new
root = doc.create_element 'pseudo_root', xml.root.namespaces
root.inner_html = rt.reverse.map{|x| x.to_xml}.join
q = enter_multiple_values root.to_xml, k, v
rt = xml.parse(q).reverse
else
each_data[k] = v
end
end
# dup so we have new nodes to append
rt.map(&:dup).each do |new_row|
DocxTemplater.log(" new_row: #{new_row}")
innards = new_row.inner_html
matches = innards.scan(/\$EACH:([^\$]+)\$/)
each_data.each do |key, value|
if value.is_a?(TrueClass) || value.is_a?(FalseClass)
innards = enter_boolean_values(innards, value, key)
end
end
unless matches.empty?
DocxTemplater.log(" matches: #{matches.inspect}")
matches.map(&:first).each do |each_key|
DocxTemplater.log(" each_key: #{each_key}")
innards.gsub!("$EACH:#{each_key}$", safe(each_data[each_key.downcase.to_sym]))
end
end
# change all the internals of the new node, even if we did not template
new_row.inner_html = innards
# DocxTemplater::log("new_row new innards: #{new_row.inner_html}")
begin_row_template.add_next_sibling(new_row)
end
end
(row_templates + [begin_row_template, end_row_template]).each(&:unlink)
as_result xml
end
def as_result xml
if xml.root.name == 'pseudo_root'
xml.root.inner_html
else
xml.to_s
end
end
end
end
| 34.473684 | 211 | 0.606761 |
38028a742efb831ab2f6d46af11ce00d6f3967d7 | 20,541 | # encoding: utf-8
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
require 'azure_event_grid'
module Azure::EventGrid::Profiles::Latest
module Models
ContainerRegistryArtifactEventTarget = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryArtifactEventTarget
StorageBlobCreatedEventData = Azure::EventGrid::V2018_01_01::Models::StorageBlobCreatedEventData
ContainerRegistryArtifactEventData = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryArtifactEventData
EventHubCaptureFileCreatedEventData = Azure::EventGrid::V2018_01_01::Models::EventHubCaptureFileCreatedEventData
ServiceBusActiveMessagesAvailableWithNoListenersEventData = Azure::EventGrid::V2018_01_01::Models::ServiceBusActiveMessagesAvailableWithNoListenersEventData
ResourceWriteFailureData = Azure::EventGrid::V2018_01_01::Models::ResourceWriteFailureData
ServiceBusDeadletterMessagesAvailableWithNoListenersEventData = Azure::EventGrid::V2018_01_01::Models::ServiceBusDeadletterMessagesAvailableWithNoListenersEventData
ResourceDeleteSuccessData = Azure::EventGrid::V2018_01_01::Models::ResourceDeleteSuccessData
MediaJobStateChangeEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobStateChangeEventData
ResourceDeleteCancelData = Azure::EventGrid::V2018_01_01::Models::ResourceDeleteCancelData
MediaJobErrorDetail = Azure::EventGrid::V2018_01_01::Models::MediaJobErrorDetail
ResourceActionFailureData = Azure::EventGrid::V2018_01_01::Models::ResourceActionFailureData
MediaJobError = Azure::EventGrid::V2018_01_01::Models::MediaJobError
EventGridEvent = Azure::EventGrid::V2018_01_01::Models::EventGridEvent
MediaJobOutput = Azure::EventGrid::V2018_01_01::Models::MediaJobOutput
SubscriptionValidationResponse = Azure::EventGrid::V2018_01_01::Models::SubscriptionValidationResponse
StorageBlobDeletedEventData = Azure::EventGrid::V2018_01_01::Models::StorageBlobDeletedEventData
ContainerRegistryEventRequest = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryEventRequest
MediaLiveEventEncoderConnectedEventData = Azure::EventGrid::V2018_01_01::Models::MediaLiveEventEncoderConnectedEventData
ContainerRegistryEventActor = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryEventActor
MediaLiveEventEncoderDisconnectedEventData = Azure::EventGrid::V2018_01_01::Models::MediaLiveEventEncoderDisconnectedEventData
ContainerRegistryEventSource = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryEventSource
DeviceTwinMetadata = Azure::EventGrid::V2018_01_01::Models::DeviceTwinMetadata
ContainerRegistryEventData = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryEventData
DeviceTwinInfoProperties = Azure::EventGrid::V2018_01_01::Models::DeviceTwinInfoProperties
MediaLiveEventTrackDiscontinuityDetectedEventData = Azure::EventGrid::V2018_01_01::Models::MediaLiveEventTrackDiscontinuityDetectedEventData
DeviceTwinInfo = Azure::EventGrid::V2018_01_01::Models::DeviceTwinInfo
ResourceWriteSuccessData = Azure::EventGrid::V2018_01_01::Models::ResourceWriteSuccessData
DeviceConnectionStateEventInfo = Azure::EventGrid::V2018_01_01::Models::DeviceConnectionStateEventInfo
ResourceWriteCancelData = Azure::EventGrid::V2018_01_01::Models::ResourceWriteCancelData
DeviceTelemetryEventProperties = Azure::EventGrid::V2018_01_01::Models::DeviceTelemetryEventProperties
ResourceDeleteFailureData = Azure::EventGrid::V2018_01_01::Models::ResourceDeleteFailureData
MediaLiveEventIncomingVideoStreamsOutOfSyncEventData = Azure::EventGrid::V2018_01_01::Models::MediaLiveEventIncomingVideoStreamsOutOfSyncEventData
ResourceActionSuccessData = Azure::EventGrid::V2018_01_01::Models::ResourceActionSuccessData
MediaLiveEventIngestHeartbeatEventData = Azure::EventGrid::V2018_01_01::Models::MediaLiveEventIngestHeartbeatEventData
ResourceActionCancelData = Azure::EventGrid::V2018_01_01::Models::ResourceActionCancelData
MediaLiveEventConnectionRejectedEventData = Azure::EventGrid::V2018_01_01::Models::MediaLiveEventConnectionRejectedEventData
SubscriptionValidationEventData = Azure::EventGrid::V2018_01_01::Models::SubscriptionValidationEventData
DeviceTwinProperties = Azure::EventGrid::V2018_01_01::Models::DeviceTwinProperties
SubscriptionDeletedEventData = Azure::EventGrid::V2018_01_01::Models::SubscriptionDeletedEventData
DeviceLifeCycleEventProperties = Azure::EventGrid::V2018_01_01::Models::DeviceLifeCycleEventProperties
MediaLiveEventIncomingStreamsOutOfSyncEventData = Azure::EventGrid::V2018_01_01::Models::MediaLiveEventIncomingStreamsOutOfSyncEventData
MediaJobOutputProgressEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobOutputProgressEventData
ContainerRegistryEventTarget = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryEventTarget
MediaJobOutputStateChangeEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobOutputStateChangeEventData
DeviceTwinInfoX509Thumbprint = Azure::EventGrid::V2018_01_01::Models::DeviceTwinInfoX509Thumbprint
AppConfigurationKeyValueDeletedEventData = Azure::EventGrid::V2018_01_01::Models::AppConfigurationKeyValueDeletedEventData
MediaLiveEventIncomingDataChunkDroppedEventData = Azure::EventGrid::V2018_01_01::Models::MediaLiveEventIncomingDataChunkDroppedEventData
AppConfigurationKeyValueModifiedEventData = Azure::EventGrid::V2018_01_01::Models::AppConfigurationKeyValueModifiedEventData
DeviceConnectionStateEventProperties = Azure::EventGrid::V2018_01_01::Models::DeviceConnectionStateEventProperties
MapsGeofenceEventProperties = Azure::EventGrid::V2018_01_01::Models::MapsGeofenceEventProperties
MediaLiveEventIncomingStreamReceivedEventData = Azure::EventGrid::V2018_01_01::Models::MediaLiveEventIncomingStreamReceivedEventData
MapsGeofenceGeometry = Azure::EventGrid::V2018_01_01::Models::MapsGeofenceGeometry
IotHubDeviceCreatedEventData = Azure::EventGrid::V2018_01_01::Models::IotHubDeviceCreatedEventData
IotHubDeviceDeletedEventData = Azure::EventGrid::V2018_01_01::Models::IotHubDeviceDeletedEventData
IotHubDeviceConnectedEventData = Azure::EventGrid::V2018_01_01::Models::IotHubDeviceConnectedEventData
IotHubDeviceDisconnectedEventData = Azure::EventGrid::V2018_01_01::Models::IotHubDeviceDisconnectedEventData
IotHubDeviceTelemetryEventData = Azure::EventGrid::V2018_01_01::Models::IotHubDeviceTelemetryEventData
ContainerRegistryImagePushedEventData = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryImagePushedEventData
ContainerRegistryImageDeletedEventData = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryImageDeletedEventData
ContainerRegistryChartPushedEventData = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryChartPushedEventData
ContainerRegistryChartDeletedEventData = Azure::EventGrid::V2018_01_01::Models::ContainerRegistryChartDeletedEventData
MediaJobOutputAsset = Azure::EventGrid::V2018_01_01::Models::MediaJobOutputAsset
MediaJobScheduledEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobScheduledEventData
MediaJobProcessingEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobProcessingEventData
MediaJobCancelingEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobCancelingEventData
MediaJobFinishedEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobFinishedEventData
MediaJobCanceledEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobCanceledEventData
MediaJobErroredEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobErroredEventData
MediaJobOutputCanceledEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobOutputCanceledEventData
MediaJobOutputCancelingEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobOutputCancelingEventData
MediaJobOutputErroredEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobOutputErroredEventData
MediaJobOutputFinishedEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobOutputFinishedEventData
MediaJobOutputProcessingEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobOutputProcessingEventData
MediaJobOutputScheduledEventData = Azure::EventGrid::V2018_01_01::Models::MediaJobOutputScheduledEventData
MapsGeofenceEnteredEventData = Azure::EventGrid::V2018_01_01::Models::MapsGeofenceEnteredEventData
MapsGeofenceExitedEventData = Azure::EventGrid::V2018_01_01::Models::MapsGeofenceExitedEventData
MapsGeofenceResultEventData = Azure::EventGrid::V2018_01_01::Models::MapsGeofenceResultEventData
MediaJobState = Azure::EventGrid::V2018_01_01::Models::MediaJobState
MediaJobErrorCode = Azure::EventGrid::V2018_01_01::Models::MediaJobErrorCode
MediaJobErrorCategory = Azure::EventGrid::V2018_01_01::Models::MediaJobErrorCategory
MediaJobRetry = Azure::EventGrid::V2018_01_01::Models::MediaJobRetry
end
#
# EventGridDataClass
#
class EventGridDataClass
attr_reader :configurable, :base_url, :options, :model_classes
def initialize(options = {})
if options.is_a?(Hash) && options.length == 0
@options = setup_default_options
else
@options = options
end
reset!(options)
@configurable = self
@base_url = options[:base_url].nil? ? nil:options[:base_url]
@options = options[:options].nil? ? nil:options[:options]
@client_0 = Azure::EventGrid::V2018_01_01::EventGridClient.new(configurable.credentials, options)
if(@client_0.respond_to?(:subscription_id))
@client_0.subscription_id = configurable.subscription_id
end
add_telemetry(@client_0)
@model_classes = ModelClasses.new
end
def add_telemetry(client)
profile_information = 'Profiles/Latest/EventGrid'
client.add_user_agent_information(profile_information)
end
def method_missing(method, *args)
if @client_0.respond_to?method
@client_0.send(method, *args)
else
super
end
end
end
class ModelClasses
def container_registry_artifact_event_target
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryArtifactEventTarget
end
def storage_blob_created_event_data
Azure::EventGrid::V2018_01_01::Models::StorageBlobCreatedEventData
end
def container_registry_artifact_event_data
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryArtifactEventData
end
def event_hub_capture_file_created_event_data
Azure::EventGrid::V2018_01_01::Models::EventHubCaptureFileCreatedEventData
end
def service_bus_active_messages_available_with_no_listeners_event_data
Azure::EventGrid::V2018_01_01::Models::ServiceBusActiveMessagesAvailableWithNoListenersEventData
end
def resource_write_failure_data
Azure::EventGrid::V2018_01_01::Models::ResourceWriteFailureData
end
def service_bus_deadletter_messages_available_with_no_listeners_event_data
Azure::EventGrid::V2018_01_01::Models::ServiceBusDeadletterMessagesAvailableWithNoListenersEventData
end
def resource_delete_success_data
Azure::EventGrid::V2018_01_01::Models::ResourceDeleteSuccessData
end
def media_job_state_change_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobStateChangeEventData
end
def resource_delete_cancel_data
Azure::EventGrid::V2018_01_01::Models::ResourceDeleteCancelData
end
def media_job_error_detail
Azure::EventGrid::V2018_01_01::Models::MediaJobErrorDetail
end
def resource_action_failure_data
Azure::EventGrid::V2018_01_01::Models::ResourceActionFailureData
end
def media_job_error
Azure::EventGrid::V2018_01_01::Models::MediaJobError
end
def event_grid_event
Azure::EventGrid::V2018_01_01::Models::EventGridEvent
end
def media_job_output
Azure::EventGrid::V2018_01_01::Models::MediaJobOutput
end
def subscription_validation_response
Azure::EventGrid::V2018_01_01::Models::SubscriptionValidationResponse
end
def storage_blob_deleted_event_data
Azure::EventGrid::V2018_01_01::Models::StorageBlobDeletedEventData
end
def container_registry_event_request
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryEventRequest
end
def media_live_event_encoder_connected_event_data
Azure::EventGrid::V2018_01_01::Models::MediaLiveEventEncoderConnectedEventData
end
def container_registry_event_actor
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryEventActor
end
def media_live_event_encoder_disconnected_event_data
Azure::EventGrid::V2018_01_01::Models::MediaLiveEventEncoderDisconnectedEventData
end
def container_registry_event_source
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryEventSource
end
def device_twin_metadata
Azure::EventGrid::V2018_01_01::Models::DeviceTwinMetadata
end
def container_registry_event_data
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryEventData
end
def device_twin_info_properties
Azure::EventGrid::V2018_01_01::Models::DeviceTwinInfoProperties
end
def media_live_event_track_discontinuity_detected_event_data
Azure::EventGrid::V2018_01_01::Models::MediaLiveEventTrackDiscontinuityDetectedEventData
end
def device_twin_info
Azure::EventGrid::V2018_01_01::Models::DeviceTwinInfo
end
def resource_write_success_data
Azure::EventGrid::V2018_01_01::Models::ResourceWriteSuccessData
end
def device_connection_state_event_info
Azure::EventGrid::V2018_01_01::Models::DeviceConnectionStateEventInfo
end
def resource_write_cancel_data
Azure::EventGrid::V2018_01_01::Models::ResourceWriteCancelData
end
def device_telemetry_event_properties
Azure::EventGrid::V2018_01_01::Models::DeviceTelemetryEventProperties
end
def resource_delete_failure_data
Azure::EventGrid::V2018_01_01::Models::ResourceDeleteFailureData
end
def media_live_event_incoming_video_streams_out_of_sync_event_data
Azure::EventGrid::V2018_01_01::Models::MediaLiveEventIncomingVideoStreamsOutOfSyncEventData
end
def resource_action_success_data
Azure::EventGrid::V2018_01_01::Models::ResourceActionSuccessData
end
def media_live_event_ingest_heartbeat_event_data
Azure::EventGrid::V2018_01_01::Models::MediaLiveEventIngestHeartbeatEventData
end
def resource_action_cancel_data
Azure::EventGrid::V2018_01_01::Models::ResourceActionCancelData
end
def media_live_event_connection_rejected_event_data
Azure::EventGrid::V2018_01_01::Models::MediaLiveEventConnectionRejectedEventData
end
def subscription_validation_event_data
Azure::EventGrid::V2018_01_01::Models::SubscriptionValidationEventData
end
def device_twin_properties
Azure::EventGrid::V2018_01_01::Models::DeviceTwinProperties
end
def subscription_deleted_event_data
Azure::EventGrid::V2018_01_01::Models::SubscriptionDeletedEventData
end
def device_life_cycle_event_properties
Azure::EventGrid::V2018_01_01::Models::DeviceLifeCycleEventProperties
end
def media_live_event_incoming_streams_out_of_sync_event_data
Azure::EventGrid::V2018_01_01::Models::MediaLiveEventIncomingStreamsOutOfSyncEventData
end
def media_job_output_progress_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobOutputProgressEventData
end
def container_registry_event_target
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryEventTarget
end
def media_job_output_state_change_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobOutputStateChangeEventData
end
def device_twin_info_x509_thumbprint
Azure::EventGrid::V2018_01_01::Models::DeviceTwinInfoX509Thumbprint
end
def app_configuration_key_value_deleted_event_data
Azure::EventGrid::V2018_01_01::Models::AppConfigurationKeyValueDeletedEventData
end
def media_live_event_incoming_data_chunk_dropped_event_data
Azure::EventGrid::V2018_01_01::Models::MediaLiveEventIncomingDataChunkDroppedEventData
end
def app_configuration_key_value_modified_event_data
Azure::EventGrid::V2018_01_01::Models::AppConfigurationKeyValueModifiedEventData
end
def device_connection_state_event_properties
Azure::EventGrid::V2018_01_01::Models::DeviceConnectionStateEventProperties
end
def maps_geofence_event_properties
Azure::EventGrid::V2018_01_01::Models::MapsGeofenceEventProperties
end
def media_live_event_incoming_stream_received_event_data
Azure::EventGrid::V2018_01_01::Models::MediaLiveEventIncomingStreamReceivedEventData
end
def maps_geofence_geometry
Azure::EventGrid::V2018_01_01::Models::MapsGeofenceGeometry
end
def iot_hub_device_created_event_data
Azure::EventGrid::V2018_01_01::Models::IotHubDeviceCreatedEventData
end
def iot_hub_device_deleted_event_data
Azure::EventGrid::V2018_01_01::Models::IotHubDeviceDeletedEventData
end
def iot_hub_device_connected_event_data
Azure::EventGrid::V2018_01_01::Models::IotHubDeviceConnectedEventData
end
def iot_hub_device_disconnected_event_data
Azure::EventGrid::V2018_01_01::Models::IotHubDeviceDisconnectedEventData
end
def iot_hub_device_telemetry_event_data
Azure::EventGrid::V2018_01_01::Models::IotHubDeviceTelemetryEventData
end
def container_registry_image_pushed_event_data
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryImagePushedEventData
end
def container_registry_image_deleted_event_data
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryImageDeletedEventData
end
def container_registry_chart_pushed_event_data
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryChartPushedEventData
end
def container_registry_chart_deleted_event_data
Azure::EventGrid::V2018_01_01::Models::ContainerRegistryChartDeletedEventData
end
def media_job_output_asset
Azure::EventGrid::V2018_01_01::Models::MediaJobOutputAsset
end
def media_job_scheduled_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobScheduledEventData
end
def media_job_processing_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobProcessingEventData
end
def media_job_canceling_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobCancelingEventData
end
def media_job_finished_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobFinishedEventData
end
def media_job_canceled_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobCanceledEventData
end
def media_job_errored_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobErroredEventData
end
def media_job_output_canceled_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobOutputCanceledEventData
end
def media_job_output_canceling_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobOutputCancelingEventData
end
def media_job_output_errored_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobOutputErroredEventData
end
def media_job_output_finished_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobOutputFinishedEventData
end
def media_job_output_processing_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobOutputProcessingEventData
end
def media_job_output_scheduled_event_data
Azure::EventGrid::V2018_01_01::Models::MediaJobOutputScheduledEventData
end
def maps_geofence_entered_event_data
Azure::EventGrid::V2018_01_01::Models::MapsGeofenceEnteredEventData
end
def maps_geofence_exited_event_data
Azure::EventGrid::V2018_01_01::Models::MapsGeofenceExitedEventData
end
def maps_geofence_result_event_data
Azure::EventGrid::V2018_01_01::Models::MapsGeofenceResultEventData
end
def media_job_state
Azure::EventGrid::V2018_01_01::Models::MediaJobState
end
def media_job_error_code
Azure::EventGrid::V2018_01_01::Models::MediaJobErrorCode
end
def media_job_error_category
Azure::EventGrid::V2018_01_01::Models::MediaJobErrorCategory
end
def media_job_retry
Azure::EventGrid::V2018_01_01::Models::MediaJobRetry
end
end
end
| 53.215026 | 168 | 0.810671 |
4a7da896bd75c8268066f591a0bc5a83fbdd6c89 | 17,302 | # frozen_string_literal: true
class Encoding
class CompatibilityError < StandardError; end
def initialize(name)
@name = name
end
ASCII_8BIT = new('ASCII-8BIT')
BINARY = ASCII_8BIT
US_ASCII = new('US-ASCII')
ASCII = US_ASCII
EUC_JP = new('EUC-JP')
IBM437 = new('IBM437')
ISO_8859_1 = new('ISO-8859-1')
Shift_JIS = new('Shift_JIS')
SHIFT_JIS = Shift_JIS
UTF_8 = new('UTF-8')
def self.default_external
UTF_8
end
def self.default_external=(_enc)
UTF_8
end
def self.default_internal
UTF_8
end
def self.default_internal=(_enc)
UTF_8
end
def self.find(string)
new(string)
end
attr_reader :name
def ascii_compatible?
true
end
def dummy?
true
end
def inspect
"#<#{self.class}:#{@name}>"
end
def names
[name]
end
def replicate(name)
new(name)
end
def to_s
name
end
end
class String
include Comparable
def self.try_convert(obj = nil)
raise ArgumentError if obj.nil?
return obj if obj.is_a?(String)
str = obj.to_str
return nil if str.nil?
raise TypeError unless str.is_a?(String)
str
rescue NoMethodError
nil
end
def %(other)
if other.is_a?(Array)
sprintf(self, *other) # rubocop:disable Style/FormatString
else
sprintf(self, other) # rubocop:disable Style/FormatString
end
end
def +@
return dup if frozen?
self
end
def -@
return self if frozen?
dup.freeze
end
def <<(obj)
raise TypeError if obj.nil?
obj = obj.chr if obj.is_a?(Integer)
self[0..-1] = "#{self}#{obj}"
self
end
alias concat <<
def =~(other)
return other.match(self)&.begin(0) if other.is_a?(Regexp)
raise TypeError, "type mismatch: #{other.class} given" if other.is_a?(String)
return other =~ self if other.respond_to?(:=~)
nil
end
alias __old_element_reference []
def [](*args)
raise ArgumentError, 'wrong number of arguments (given 0, expected 1..2)' if args.empty? || args.length > 2
element =
if (regexp = args[0]).is_a?(Regexp)
capture = args.fetch(1, 0)
capture =
begin
capture.to_int
rescue NoMethodError
capture
end
regexp.match(self)&.[](capture)
elsif args.length == 1
index, = *args
index =
begin
index.to_int
rescue NoMethodError
index
end
__old_element_reference(index)
else
index, length = *args
index =
begin
index.to_int
rescue NoMethodError
index
end
length =
begin
length.to_int
rescue NoMethodError
length
end
__old_element_reference(index, length)
end
return nil if element.nil?
if self.class == String
element
else
self.class.new(element)
end
end
alias slice []
alias __old_element_assignment []=
def []=(*args)
return __old_element_assignment(*args) unless args[0].is_a?(Regexp) # rubocop:disable Lint/ReturnInVoidContext
*args, replace = *args
regexp = args[0]
capture = args.fetch(1, 0)
match = regexp.match(self)
return if match.nil?
self[match.begin(capture)...match.end(capture)] = replace
end
def ascii_only?
bytes.length == length
end
def b
# mruby has no Encoding, so there is no difference between an ASCII_8BIT
# String and a UTF-8 String.
dup
end
def byteslice(*args)
if args[0].is_a?(Integer)
position, len = *args
len = 1 if len.nil?
position = length + position if position.negative?
slice = bytes[position...position + len]
slice.pack('c*')
elsif args.length == 1 && args[0].is_a?(Range)
range, = *args
position = range.begin
len = range.size
slice = bytes[position...position + len]
slice.pack('c*')
else
raise ArgumentError
end
end
def casecmp(str)
return nil unless String.try_convert(str)
downcase <=> str.downcase
end
def casecmp?(str)
casecmp(str)&.zero? == true
end
def center(width, padstr = ' ')
return self if length >= width
left_pad = (width - length) / 2
left_pad = (padstr * left_pad)[0...left_pad]
right_pad = (width - length) / 2 + (width - length) % 2
right_pad = (padstr * right_pad)[0...right_pad]
"#{left_pad}#{self}#{right_pad}"
end
def chars
if block_given?
split('').each do |char|
yield char
end
self
else
split('')
end
end
def chr
dup[0]
end
def clear
self[0..-1] = ''
end
def codepoints
each_codepoint.to_a
end
def count
raise NotImplementedError
end
def crypt(_salt)
raise NotImplementedError
end
def delete(*args)
args.inject(self) { |string, pattern| string.tr(pattern, '') }
end
def delete!(*args)
replaced = delete(*args)
self[0..-1] = replaced unless self == replaced
end
def delete_prefix(prefix)
raise TypeError, "no implicit conversion of #{prefix.class} into String" unless prefix.is_a?(String)
return self[prefix.length..-1] if start_with?(prefix)
dup
end
def delete_prefix!(prefix)
replaced = delete_prefix(prefix)
self[0..-1] = replaced unless self == replaced
end
def delete_suffix(suffix)
raise TypeError, "no implicit conversion of #{suffix.class} into String" unless suffix.is_a?(String)
return self[0..-suffix.length] if end_with?(suffix)
dup
end
def delete_suffix!(prefix)
replaced = delete_suffix(prefix)
self[0..-1] = replaced unless self == replaced
end
def dump
raise NotImplementedError
end
def each_byte(&block)
return to_enum(:each_byte, &block) unless block
bytes = self.bytes
pos = 0
while pos < bytes.size
block.call(bytes[pos])
pos += 1
end
self
end
def each_codepoint
return to_enum(:each_codepoint) unless block_given?
split('').each do |c|
yield c.ord
end
end
def each_grapheme_cluster
raise NotImplementedError
end
def each_line(separator = $/, getline_args = nil) # rubocop:disable Style/SpecialGlobalVars
return to_enum(:each_line, separator, getline_args) unless block_given?
if separator.nil?
yield self
return self
end
raise TypeError if separator.is_a?(Symbol)
raise TypeError if (separator = String.try_convert(separator)).nil?
paragraph_mode = false
if separator.empty?
paragraph_mode = true
separator = "\n\n"
end
start = 0
string = dup
self_len = length
sep_len = separator.length
should_yield_subclass_instances = self.class != String
while (pointer = string.index(separator, start))
pointer += sep_len
pointer += 1 while paragraph_mode && string[pointer] == "\n"
if should_yield_subclass_instances
yield self.class.new(string[start, pointer - start])
else
yield string[start, pointer - start]
end
start = pointer
end
return self if start == self_len
if should_yield_subclass_instances
yield self.class.new(string[start, self_len - start])
else
yield string[start, self_len - start]
end
self
end
def encode(*_args)
# mruby does not support encoding, all Strings are UTF-8. This method is a
# NOOP and is here for compatibility.
dup
end
def encode!(*_args)
# mruby does not support encoding, all Strings are UTF-8. This method is a
# NOOP and is here for compatibility.
self
end
def encoding
# mruby does not support encoding, all Strings are UTF-8. This method is a
# stub and is here for compatibility.
Encoding::UTF_8
end
def end_with?(*suffixes)
suffixes.each do |suffix|
return true if self[-suffix.length..-1] == suffix
end
false
end
def force_encoding(*_args)
# mruby does not support encoding, all Strings are UTF-8. This method is a
# NOOP and is here for compatibility.
self
end
def getbyte(index)
bytes[index]
end
def grapheme_clusters
each_grapheme_cluster.to_a
end
# TODO: Support backrefs
#
# "hello".gsub(/([aeiou])/, '<\1>') #=> "h<e>ll<o>"
# "hello".gsub(/(?<foo>[aeiou])/, '{\k<foo>}') #=> "h{e}ll{o}"
def gsub(pattern, replacement = nil)
return to_enum(:gsub, pattern) if replacement.nil? && !block_given?
replace =
if replacement.nil?
->(old) { (yield old).to_s }
elsif replacement.is_a?(Hash)
->(old) { replacement[old].to_s }
else
->(_old) { replacement.to_s }
end
pattern = Regexp.compile(Regexp.escape(pattern)) if pattern.is_a?(String)
match = pattern.match(self)
return dup if match.nil?
buf = ''
remainder = dup
until match.nil? || remainder.empty?
buf << remainder[0..match.begin(0) - 1] if match.begin(0).positive?
buf << replace.call(match[0])
remainder = remainder[match.end(0)..-1]
remainder = remainder[1..-1] if match.begin(0) == match.end(0)
match = pattern.match(remainder)
end
buf << remainder
end
def gsub!(pattern, replacement = nil, &blk)
replaced = gsub(pattern, replacement, &blk)
self[0..-1] = replaced unless self == replaced
self
end
def hex
raise NotImplementedError
end
def insert(index, other_str)
return self << other_str if index == -1
index += 1 if index.negative?
self[index, 0] = other_str
self
end
def lines(*args)
each_line(*args).to_a
end
def ljust(integer, padstr = ' ')
raise ArgumentError, 'zero width padding' if padstr == ''
return self if integer <= length
pad_repetitions = (integer / padstr.length).ceil
padding = (padstr * pad_repetitions)[0...(integer - length)]
"#{self}#{padding}"
end
def lstrip
strip_pointer = 0
string_end = length - 1
strip_pointer += 1 while strip_pointer <= string_end && " \f\n\r\t\v".include?(self[strip_pointer])
return '' if string_end.zero?
dup[strip_pointer..string_end]
end
def lstrip!
replaced = lstrip
self[0..-1] = replaced unless self == replaced
end
def match(pattern, pos = 0)
pattern = Regexp.compile(Regexp.escape(pattern)) if pattern.is_a?(String)
pattern.match(self[pos..-1])
end
def match?(pattern, pos = 0)
pattern = Regexp.compile(Regexp.escape(pattern)) if pattern.is_a?(String)
# TODO: Don't set $~ and other Regexp globals
pattern.match?(self[pos..-1])
end
def next
raise NotImplementedError
end
alias succ next
def next!
raise NotImplementedError
end
alias succ! next!
def oct
raise NotImplementedError
end
def partition(pattern)
pattern = Regexp.compile(Regexp.escape(pattern)) if pattern.is_a?(String)
match = pattern.match(self)
[match.pre_match, match[0], match.post_match]
end
def prepend(*args)
insert(0, args.join(''))
end
def rjust(integer, padstr = ' ')
raise ArgumentError, 'zero width padding' if padstr == ''
return self if integer <= length
pad_repetitions = (integer / padstr.length).ceil
padding = (padstr * pad_repetitions)[0...(integer - length)]
"#{padding}#{self}"
end
def rpartition(pattern)
pattern = Regexp.compile(Regexp.escape(pattern)) if pattern.is_a?(String)
_ = pattern
raise NotImplementedError
end
def rstrip
strip_pointer = length - 1
string_start = 0
strip_pointer -= 1 while strip_pointer >= string_start && " \f\n\r\t\v".include?(self[strip_pointer])
return '' if strip_pointer.zero?
dup[string_start..strip_pointer]
end
def rstrip!
replaced = rstrip
self[0..-1] = replaced unless self == replaced
end
def scrub
# TODO: This is a stub. Implement scrub correctly.
self
end
def scrub!
# TODO: This is a stub. Implement scrub! correctly.
self
end
def setbyte(index, integer)
slice = bytes
slice[index] = integer
self[0..-1] = slice.pack('c*')
end
def split(pattern, limit = nil)
parts = []
return parts if self == ''
pattern = Regexp.compile(Regexp.escape(pattern)) if pattern.is_a?(String)
if pattern.source == ''
length.times do |i|
yield self[i].dup if block_given?
parts << self[i].dup
end
return parts
end
remainder = dup
match = pattern.match(remainder)
if limit&.positive?
until match.nil? || remainder.nil? || parts.length >= limit - 1
parts << remainder[0...match.begin(0)]
remainder = remainder[match.end(0)..-1]
remainder = remainder[1..-1] if match.begin(0) == match.end(0)
match = nil
match = pattern.match(remainder) unless remainder.nil?
end
parts << remainder unless remainder.nil?
else
until match.nil? || remainder.nil?
parts << remainder[0...match.begin(0)]
remainder = remainder[match.end(0)..-1]
remainder = remainder[1..-1] if match.begin(0) == match.end(0)
match = nil
match = pattern.match(remainder) unless remainder.nil?
end
parts << remainder unless remainder.nil?
if limit&.negative? && -limit > parts.length
(-limit - parts.length).times do
parts << ''
end
end
end
parts.each { |part| yield part } if block_given?
parts
end
def squeeze(*_args)
raise NotImplementedError
end
def start_with?(*prefixes)
prefixes.each do |prefix|
return true if self[0...prefix.length] == prefix
end
false
end
def strip
result = lstrip
result = self if result.nil?
result.rstrip
end
def strip!
replaced = strip
self[0..-1] = replaced unless self == replaced
end
def sub(pattern, replacement = nil)
return to_enum(:sub, pattern) if replacement.nil? && !block_given?
replace =
if replacement.nil?
->(old) { (yield old).to_s }
elsif replacement.is_a?(Hash)
->(old) { replacement[old].to_s }
else
->(_old) { replacement.to_s }
end
pattern = Regexp.compile(Regexp.escape(pattern)) if pattern.is_a?(String)
match = pattern.match(self)
return dup if match.nil?
buf = ''
remainder = dup
buf << remainder[0..match.begin(0) - 1] if match.begin(0).positive?
buf << replace.call(match[0])
remainder = remainder[match.end(0)..-1]
remainder = remainder[1..-1] if match.begin(0) == match.end(0)
buf << remainder
buf
end
def sub!(pattern, replacement = nil, &blk)
replaced = sub(pattern, replacement, &blk)
self[0..-1] = replaced unless self == replaced
end
def sum
raise NotImplementedError
end
def swapcase(*_args)
raise NotImplementedError
end
def swapcase!(*_args)
raise NotImplementedError
end
def to_c
raise NotImplementedError
end
def to_r
raise NotImplementedError
end
def to_str
dup
end
def tr(from_str, to_str)
# TODO: Support character ranges c1-c2
# TODO: Support backslash escapes
to_str = to_str.rjust(from_str.length, to_str[-1]) if to_str.length.positive?
gsub(Regexp.compile("[#{from_str}]")) do |char|
to_str[from_str.index(char)] || ''
end
end
def tr!(from_str, to_str)
raise 'frozen string' if frozen?
replaced = tr(from_str, to_str)
self[0..-1] = replaced unless self == replaced
end
def tr_s(_from_str, _to_str)
# TODO: Support character ranges c1-c2
# TODO: Support backslash escapes
raise NotImplementedError
end
def tr_s!(_from_str, _to_str)
raise 'frozen string' if frozen?
# TODO: Support character ranges c1-c2
# TODO: Support backslash escapes
raise NotImplementedError
end
def undump
raise NotImplementedError
end
def unicode_normalize(_form = :nfc)
raise NotImplementedError
end
def unicode_normalize!(_form = :nfc)
raise NotImplementedError
end
def unicode_normalized?(_form = :nfc)
raise NotImplementedError
end
def upto(max, exclusive = false, &block)
return to_enum(:upto, max, exclusive) unless block
raise TypeError, "no implicit conversion of #{max.class} into String" unless max.is_a?(String)
len = length
maxlen = max.length
# single character
if len == 1 && maxlen == 1
c = ord
e = max.ord
while c <= e
break if exclusive && c == e
yield c.chr
c += 1
end
return self
end
# both edges are all digits
bi = to_i(10)
ei = max.to_i(10)
if (bi.positive? || bi == '0' * len) && (ei.positive? || ei == '0' * maxlen)
while bi <= ei
break if exclusive && bi == ei
s = bi.to_s
s = s.rjust(len, '0') if s.length < len
yield s
bi += 1
end
return self
end
bs = self
loop do
n = (bs <=> max)
break if n.positive?
break if exclusive && n.zero?
yield bs
break if n.zero?
bs = bs.succ
end
self
end
def valid_encoding?
# mruby does not support encoding, all Strings are UTF-8. This method is a
# NOOP and is here for compatibility.
true
end
end
| 21.873578 | 114 | 0.623049 |
5d79951be3abe365fa8500aef3b45152a0d6080e | 1,424 | # frozen_string_literal: true
# A PitchClassSet represents a pitch-class set or pitch collection.
# See also: PitchSet, PitchClass
class HeadMusic::PitchClassSet
attr_reader :pitch_classes
delegate :empty?, to: :pitch_classes
alias empty_set? empty?
def initialize(identifiers)
@pitch_classes = identifiers.map { |identifier| HeadMusic::PitchClass.get(identifier) }.uniq.sort
end
def inspect
pitch_classes.map(&:to_s).join(' ')
end
def to_s
pitch_classes.map(&:to_s).join(' ')
end
def ==(other)
pitch_classes == other.pitch_classes
end
def equivalent?(other)
pitch_classes.sort == other.pitch_classes.sort
end
def size
@size ||= pitch_classes.length
end
def monochord?
pitch_classes.length == 1
end
alias monad? monochord?
def dichord?
pitch_classes.length == 2
end
alias dyad? dichord?
def trichord?
pitch_classes.length == 3
end
def tetrachord?
pitch_classes.length == 4
end
def pentachord?
pitch_classes.length == 5
end
def hexachord?
pitch_classes.length == 6
end
def heptachord?
pitch_classes.length == 7
end
def octachord?
pitch_classes.length == 8
end
def nonachord?
pitch_classes.length == 9
end
def decachord?
pitch_classes.length == 10
end
def undecachord?
pitch_classes.length == 11
end
def dodecachord?
pitch_classes.length == 12
end
end
| 16.752941 | 101 | 0.686798 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.