hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
8759b70ff0d17944444e3235eed8244e9f49baa3
| 234 |
# -*- coding: binary -*-
require 'net/ssh/transport/hmac/md5'
module Net::SSH::Transport::HMAC
# The MD5-96 HMAC algorithm. This returns only the first 12 bytes of
# the digest.
class MD5_96 < MD5
mac_length 12
end
end
| 18 | 70 | 0.683761 |
f7fa5903b737b6d3c55f6ad6b15408aa01f88754
| 12,739 |
require 'spec_helper'
require 'pdk/module/build'
require 'pathspec'
describe PDK::Module::Build do
subject { described_class.new(initialize_options) }
let(:initialize_options) { {} }
let(:root_dir) { Gem.win_platform? ? 'C:/' : '/' }
shared_context 'with mock metadata' do
let(:mock_metadata) { PDK::Module::Metadata.new('name' => 'my-module') }
before(:each) do
allow(PDK::Module::Metadata).to receive(:from_file).with(anything).and_return(mock_metadata)
end
end
describe '.invoke' do
it 'creates a new PDK::Module::Build instance and calls #build' do
build_double = instance_double(described_class, build: true)
expect(described_class).to receive(:new).with(module_dir: 'test').and_return(build_double)
expect(build_double).to receive(:build)
described_class.invoke(module_dir: 'test')
end
end
describe '#initialize' do
before(:each) do
allow(Dir).to receive(:pwd).and_return(pwd)
end
let(:pwd) { File.join(root_dir, 'path', 'to', 'module') }
context 'by default' do
it 'uses the current working directory as the module directory' do
is_expected.to have_attributes(module_dir: pwd)
end
it 'places the built packages in the pkg directory in the module' do
is_expected.to have_attributes(target_dir: File.join(pwd, 'pkg'))
end
end
context 'if module_dir has been customised' do
let(:initialize_options) do
{
module_dir: File.join(root_dir, 'some', 'other', 'module'),
}
end
it 'uses the provided path as the module directory' do
is_expected.to have_attributes(module_dir: initialize_options[:module_dir])
end
it 'places the built packages in the pkg directory in the module' do
is_expected.to have_attributes(target_dir: File.join(initialize_options[:module_dir], 'pkg'))
end
end
context 'if target_dir has been customised' do
let(:initialize_options) do
{
:'target-dir' => File.join(root_dir, 'tmp'),
}
end
it 'uses the current working directory as the module directory' do
is_expected.to have_attributes(module_dir: pwd)
end
it 'places the built packages in the provided path' do
is_expected.to have_attributes(target_dir: initialize_options[:'target-dir'])
end
end
context 'if both module_dir and target_dir have been customised' do
let(:initialize_options) do
{
:'target-dir' => File.join(root_dir, 'var', 'cache'),
module_dir: File.join(root_dir, 'tmp', 'git', 'my-module'),
}
end
it 'uses the provided module_dir path as the module directory' do
is_expected.to have_attributes(module_dir: initialize_options[:module_dir])
end
it 'places the built packages in the provided target_dir path' do
is_expected.to have_attributes(target_dir: initialize_options[:'target-dir'])
end
end
end
describe '#metadata' do
subject { described_class.new.metadata }
include_context 'with mock metadata'
it { is_expected.to be_a(Hash) }
it { is_expected.to include('name' => 'my-module', 'version' => '0.1.0') }
end
describe '#release_name' do
subject { described_class.new.release_name }
include_context 'with mock metadata'
it { is_expected.to eq('my-module-0.1.0') }
end
describe '#package_file' do
subject { described_class.new(:'target-dir' => target_dir).package_file }
let(:target_dir) { File.join(root_dir, 'tmp') }
include_context 'with mock metadata'
it { is_expected.to eq(File.join(target_dir, 'my-module-0.1.0.tar.gz')) }
end
describe '#build_dir' do
subject { described_class.new(:'target-dir' => target_dir).build_dir }
let(:target_dir) { File.join(root_dir, 'tmp') }
include_context 'with mock metadata'
it { is_expected.to eq(File.join(target_dir, 'my-module-0.1.0')) }
end
describe '#stage_module_in_build_dir' do
let(:instance) { described_class.new(module_dir: module_dir) }
let(:module_dir) { File.join(root_dir, 'tmp', 'my-module') }
before(:each) do
allow(instance).to receive(:ignored_files).and_return(PathSpec.new("/spec/\n"))
allow(Find).to receive(:find).with(module_dir).and_yield(found_file)
end
after(:each) do
instance.stage_module_in_build_dir
end
context 'when it finds a non-ignored path' do
let(:found_file) { File.join(module_dir, 'metadata.json') }
it 'stages the path into the build directory' do
expect(instance).to receive(:stage_path).with(found_file)
end
end
context 'when it finds an ignored path' do
let(:found_file) { File.join(module_dir, 'spec', 'spec_helper.rb') }
it 'does not stage the path' do
expect(Find).to receive(:prune)
expect(instance).not_to receive(:stage_path).with(found_file)
end
end
context 'when it finds the module directory itself' do
let(:found_file) { module_dir }
it 'does not stage the path' do
expect(instance).not_to receive(:stage_path).with(module_dir)
end
end
end
describe '#stage_path' do
let(:instance) { described_class.new(module_dir: module_dir) }
let(:module_dir) { File.join(root_dir, 'tmp', 'my-module') }
let(:path_to_stage) { File.join(module_dir, 'test') }
let(:path_in_build_dir) { File.join(module_dir, 'pkg', release_name, 'test') }
let(:release_name) { 'my-module-0.0.1' }
before(:each) do
allow(instance).to receive(:release_name).and_return(release_name)
end
context 'when the path is a directory' do
before(:each) do
allow(PDK::Util::Filesystem).to receive(:directory?).with(path_to_stage).and_return(true)
allow(PDK::Util::Filesystem).to receive(:stat).with(path_to_stage).and_return(instance_double(File::Stat, mode: 0o100755))
end
it 'creates the directory in the build directory' do
expect(PDK::Util::Filesystem).to receive(:mkdir_p).with(path_in_build_dir, mode: 0o100755)
instance.stage_path(path_to_stage)
end
end
context 'when the path is a symlink' do
before(:each) do
allow(PDK::Util::Filesystem).to receive(:directory?).with(path_to_stage).and_return(false)
allow(PDK::Util::Filesystem).to receive(:symlink?).with(path_to_stage).and_return(true)
end
it 'warns the user about the symlink and skips over it' do
expect(instance).to receive(:warn_symlink).with(path_to_stage)
expect(PDK::Util::Filesystem).not_to receive(:mkdir_p).with(any_args)
expect(PDK::Util::Filesystem).not_to receive(:cp).with(any_args)
instance.stage_path(path_to_stage)
end
end
context 'when the path is a regular file' do
before(:each) do
allow(PDK::Util::Filesystem).to receive(:directory?).with(path_to_stage).and_return(false)
allow(PDK::Util::Filesystem).to receive(:symlink?).with(path_to_stage).and_return(false)
end
it 'copies the file into the build directory, preserving the permissions' do
expect(PDK::Util::Filesystem).to receive(:cp).with(path_to_stage, path_in_build_dir, preserve: true)
instance.stage_path(path_to_stage)
end
context 'when the path is too long' do
let(:path_to_stage) { File.join(*['thing'] * 30) }
it 'exits with an error' do
expect {
instance.stage_path(path_to_stage)
}.to raise_error(PDK::CLI::ExitWithError)
end
end
end
end
describe '#path_too_long?' do
subject(:instance) { described_class.new }
good_paths = [
File.join('a' * 155, 'b' * 100),
File.join('a' * 151, *['qwer'] * 19, 'bla'),
File.join('/', 'a' * 49, 'b' * 50),
File.join('a' * 49, "#{'b' * 50}x"),
File.join("#{'a' * 49}x", 'b' * 50),
]
bad_paths = {
File.join('a' * 152, 'b' * 11, 'c' * 93) => %r{longer than 256}i,
File.join('a' * 152, 'b' * 10, 'c' * 92) => %r{could not be split}i,
File.join('a' * 162, 'b' * 10) => %r{could not be split}i,
File.join('a' * 10, 'b' * 110) => %r{could not be split}i,
'a' * 114 => %r{could not be split}i,
}
good_paths.each do |path|
context "when checking '#{path}'" do
it 'does not raise an error' do
expect { instance.validate_ustar_path!(path) }.not_to raise_error
end
end
end
bad_paths.each do |path, err|
context "when checking '#{path}'" do
it 'raises an ArgumentError' do
expect { instance.validate_ustar_path!(path) }.to raise_error(ArgumentError, err)
end
end
end
end
describe '#ignored_path?' do
let(:instance) { described_class.new(module_dir: module_dir) }
let(:ignore_patterns) do
[
'/vendor/',
'foo',
]
end
let(:module_dir) { File.join(root_dir, 'tmp', 'my-module') }
before(:each) do
allow(instance).to receive(:ignored_files).and_return(PathSpec.new(ignore_patterns.join("\n")))
end
it 'returns false for paths not matched by the patterns' do
expect(instance.ignored_path?(File.join(module_dir, 'bar'))).to be_falsey
end
it 'returns true for paths matched by the patterns' do
expect(instance.ignored_path?(File.join(module_dir, 'foo'))).to be_truthy
end
it 'returns true for children of ignored parent directories' do
expect(instance.ignored_path?(File.join(module_dir, 'vendor', 'test'))).to be_truthy
end
end
describe '#ignore_file' do
subject { described_class.new(module_dir: module_dir).ignore_file }
let(:module_dir) { File.join(root_dir, 'tmp', 'my-module') }
let(:possible_files) do
[
'.pdkignore',
'.pmtignore',
'.gitignore',
]
end
let(:available_files) { [] }
before(:each) do
available_files.each do |file|
file_path = File.join(module_dir, file)
allow(PDK::Util::Filesystem).to receive(:file?).with(file_path).and_return(true)
allow(PDK::Util::Filesystem).to receive(:readable?).with(file_path).and_return(true)
end
(possible_files - available_files).each do |file|
file_path = File.join(module_dir, file)
allow(PDK::Util::Filesystem).to receive(:file?).with(file_path).and_return(false)
allow(PDK::Util::Filesystem).to receive(:readable?).with(file_path).and_return(false)
end
end
context 'when none of the possible ignore files are present' do
it { is_expected.to be_nil }
end
context 'when .gitignore is present' do
let(:available_files) { ['.gitignore'] }
it 'returns the path to the .gitignore file' do
is_expected.to eq(File.join(module_dir, '.gitignore'))
end
context 'and .pmtignore is present' do
let(:available_files) { ['.gitignore', '.pmtignore'] }
it 'returns the path to the .pmtignore file' do
is_expected.to eq(File.join(module_dir, '.pmtignore'))
end
context 'and .pdkignore is present' do
let(:available_files) { possible_files }
it 'returns the path to the .pdkignore file' do
is_expected.to eq(File.join(module_dir, '.pdkignore'))
end
end
end
end
end
describe '#ignored_files' do
subject { instance.ignored_files }
let(:module_dir) { File.join(root_dir, 'tmp', 'my-module') }
let(:instance) { described_class.new(module_dir: module_dir) }
before(:each) do
allow(File).to receive(:realdirpath) { |path| path }
end
context 'when no ignore file is present in the module' do
before(:each) do
allow(instance).to receive(:ignore_file).and_return(nil)
end
it 'returns a PathSpec object with the target dir' do
is_expected.to be_a(PathSpec)
is_expected.not_to be_empty
is_expected.to match('pkg/')
end
end
context 'when an ignore file is present in the module' do
before(:each) do
ignore_file_path = File.join(module_dir, '.pdkignore')
ignore_file_content = "/vendor/\n"
allow(instance).to receive(:ignore_file).and_return(ignore_file_path)
allow(PDK::Util::Filesystem).to receive(:read_file).with(ignore_file_path, anything).and_return(ignore_file_content)
end
it 'returns a PathSpec object populated by the ignore file' do
is_expected.to be_a(PathSpec)
is_expected.to have_attributes(specs: array_including(an_instance_of(PathSpec::GitIgnoreSpec)))
end
end
end
end
| 32.497449 | 130 | 0.644556 |
8727ab6f6345e079ed11d2975713101528e8814b
| 247 |
require 'rails_helper'
RSpec.describe "ManMessages", type: :request do
describe "GET /man_messages" do
it "works! (now write some real specs)" do
get man_messages_path
expect(response).to have_http_status(200)
end
end
end
| 22.454545 | 47 | 0.708502 |
110801a503e50bb2b99e4e4b5267912d863edc13
| 6,570 |
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# The request object for updating the resource action details.
class Optimizer::Models::UpdateResourceActionDetails
STATUS_ENUM = [
STATUS_PENDING = 'PENDING'.freeze,
STATUS_DISMISSED = 'DISMISSED'.freeze,
STATUS_POSTPONED = 'POSTPONED'.freeze,
STATUS_IMPLEMENTED = 'IMPLEMENTED'.freeze
].freeze
# **[Required]** The status of the resource action.
# @return [String]
attr_reader :status
# The date and time the current status will change. The format is defined by RFC3339.
#
# For example, \"The current `postponed` status of the resource action will end and change to `pending` on this
# date and time.\"
#
# @return [DateTime]
attr_accessor :time_status_end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'status': :'status',
'time_status_end': :'timeStatusEnd'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'status': :'String',
'time_status_end': :'DateTime'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :status The value to assign to the {#status} property
# @option attributes [DateTime] :time_status_end The value to assign to the {#time_status_end} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.status = attributes[:'status'] if attributes[:'status']
self.time_status_end = attributes[:'timeStatusEnd'] if attributes[:'timeStatusEnd']
raise 'You cannot provide both :timeStatusEnd and :time_status_end' if attributes.key?(:'timeStatusEnd') && attributes.key?(:'time_status_end')
self.time_status_end = attributes[:'time_status_end'] if attributes[:'time_status_end']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] status Object to be assigned
def status=(status)
raise "Invalid value for 'status': this must be one of the values in STATUS_ENUM." if status && !STATUS_ENUM.include?(status)
@status = status
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
status == other.status &&
time_status_end == other.time_status_end
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[status, time_status_end].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 35.706522 | 245 | 0.674429 |
abff4aa60748999aa2ec2c3fa05a1e292b4e2ccf
| 3,152 |
require 'test/unit'
require 'helper'
class TestSearchBiomodel < Test::Unit::TestCase
def setup
@instance = SysMODB::SearchBiomodel.instance
end
def test_find_all_models
puts "\n\n ---------------------find all models---------------------\n\n"
response = []
i=0
response = @instance.get_all_models
response.each { |x| puts "Response #{i+=1}: #{x[:key].to_s} " }
response.each { |x| assert_not_nil x[:key] }
assert_not_nil response
end
def test_search_by_chebiid
puts "\n\n ---------------------search by chebiid---------------------\n\n"
response = []
i = 0
response = @instance.search_by_chebiid("CHEBI:15422")
response.each { |x| puts "Response #{i+=1}: #{x} " }
response.each { |x| assert_not_nil x }
assert_not_nil response
end
def test_search_by_incorrect_chebiid
puts "\n\n ---------------------search by incorrect chebiid---------------------\n\n"
response = []
i = 0
response = @instance.search_by_chebiid("THIS IS NOT A VALID CHEBIID!!!!!<P></P>")
response.each { |x| puts "Response #{i+=1}: #{x} " }
response.each { |x| assert_not_nil x }
assert_not_nil response
end
def test_search_by_name
puts "\n\n ---------------------model ID by name---------------------\n\n"
response = []
i = 0
response = @instance.search_by_name("Kolomeisky2003") #_MyosinV_Processivity")
response.each { |x| puts "Response #{i+=1}: #{x} " }
response.each { |x| assert_not_nil x }
assert_not_nil response
end
def test_search_by_incorrect_name
puts "\n\n ---------------------model ID by incorrect name---------------------\n\n"
response = []
i = 0
response = @instance.search_by_name("THIS IS NOT A VALID NAME!!!!!<P></P>") #_MyosinV_Processivity")
response.each { |x| puts "Response #{i+=1}: #{x} " }
response.each { |x| assert_not_nil x }
assert_not_nil response
end
def test_get_model_name_by_id
puts "\n\n ---------------------model name by ID---------------------\n\n"
response = @instance.get_model_name_by_id("BIOMD0000000190")
puts response
assert_not_nil response
end
def test_get_model_name_by_incorrect_id
puts "\n\n ---------------------model name by incorrect ID---------------------\n\n"
response = @instance.get_model_name_by_id("THIS IS NOT A VALID ID!!!!!<P></P>")
puts response
assert_not_nil response
end
def test_search_id_by_person
puts "\n\n ---------------------model ID by person---------------------\n\n"
response = []
i=0
response = @instance.search_by_person("Jim")
response.each { |x| puts "Response #{i+=1}: #{x} " }
response.each { |x| assert_not_nil x }
assert_not_nil response
end
def test_search_by_incorrect_person
puts "\n\n ---------------------model ID by incorrect person---------------------\n\n"
response = []
i=0
response = @instance.search_by_person("THIS IS NOT A VALID PERSONS NAME!!!!!<P></P>")
response.each { |x| puts "Response #{i+=1}: #{x} " }
response.each { |x| assert_not_nil x }
assert_not_nil response
end
end
| 32.163265 | 104 | 0.572335 |
1a41d6961b7b32630721f085dabcc694f6dbce86
| 223 |
module Furnace::AVM2::ABC
class AS3IfGt < ControlTransferOpcode
instruction 0x17
write_barrier :memory
body do
int24 :jump_offset
end
consume 2
produce 0
conditional true
end
end
| 14.866667 | 39 | 0.668161 |
1a0c767025282e038a72a3696c24fa78615bf918
| 1,966 |
#--------------------------------------------------------------------
#
# Author: Martin Corino
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the R2CORBA LICENSE which is
# included with this program.
#
# Copyright (c) Remedy IT Expertise BV
#--------------------------------------------------------------------
require 'optparse'
OPTIONS = {
:use_implement => false,
:orb_debuglevel => 0,
:serverport => 9999
}
ARGV.options do |opts|
script_name = File.basename($0)
opts.banner = "Usage: ruby #{script_name} [options]"
opts.separator ""
opts.on("--p PORT",
"Set server endpoint port.",
"Default: 3456") { |v| OPTIONS[:serverport]=v }
opts.on("--d LVL",
"Set ORBDebugLevel value.",
"Default: 0") { |v| OPTIONS[:orb_debuglevel]=v }
opts.on("--use-implement",
"Load IDL through CORBA.implement() instead of precompiled code.",
"Default: off") { |v| OPTIONS[:use_implement]=v }
opts.separator ""
opts.on("-h", "--help",
"Show this help message.") { puts opts; exit }
opts.parse!
end
if OPTIONS[:use_implement]
require 'corba'
require 'corba/poa' # to be able to test for IORTable
CORBA.implement('Test.idl', OPTIONS)
else
require 'TestC.rb'
end
begin STDERR.puts 'Not supported on this platform'; exit(0); end unless defined?(IORTable)
orb = CORBA.ORB_init(["-ORBDebugLevel", OPTIONS[:orb_debuglevel]], 'myORB')
obj = orb.string_to_object("corbaloc:iiop:1.2@localhost:#{OPTIONS[:serverport]}/Hello")
hello_obj = Test::Hello._narrow(obj)
the_string = hello_obj.get_string()
puts "servant Hello returned <#{the_string}>"
obj = orb.string_to_object("corbaloc:iiop:1.2@localhost:#{OPTIONS[:serverport]}/Hello2")
hello_obj = Test::Hello._narrow(obj)
the_string = hello_obj.get_string()
puts "servant Hello2 returned <#{the_string}>"
hello_obj.shutdown()
orb.destroy()
| 26.213333 | 90 | 0.616989 |
ed030fc0ab36e10325808dc689be7d4c2ff238b2
| 3,246 |
# frozen_string_literal: true
require_relative '../../spec_helper'
require_relative '../../../lib/rley/syntax/terminal'
require_relative '../../../lib/rley/syntax/non_terminal'
require_relative '../../../lib/rley/syntax/symbol_seq'
# Load the class under test
require_relative '../../../lib/rley/syntax/production'
module Rley # Open this namespace to avoid module qualifier prefixes
module Syntax # Open this namespace to avoid module qualifier prefixes
describe Production do
let(:sentence) { NonTerminal.new('Sentence') }
let(:np) { NonTerminal.new('NP') }
let(:vp) { NonTerminal.new('VP') }
let(:sequence) { [np, vp] }
# Default instantiation rule
subject { Production.new(sentence, sequence) }
context 'Initialization:' do
it 'should be created with a non-terminal and a symbol sequence' do
expect { Production.new(sentence, sequence) }.not_to raise_error
end
it 'should complain when its rhs is nil' do
err = StandardError
msg_prefix = 'Right side of a production of the kind '
msg_suffix = "'Sentence' => ... is nil."
msg = msg_prefix + msg_suffix
expect { Production.new(sentence, nil) }.to raise_error(err, msg)
end
it 'should know its lhs' do
expect(subject.lhs).to eq(sentence)
expect(subject.head).to eq(sentence)
end
it 'should know its rhs' do
expect(subject.rhs).to eq(sequence)
expect(subject.body).to eq(sequence)
end
it 'should be free from constraints at start' do
expect(subject.constraints).to be_empty
end
it 'should know whether its rhs is empty' do
expect(subject).not_to be_empty
instance = Production.new(sentence, [])
expect(instance).to be_empty
end
it 'should be anonymous at creation' do
expect(subject.name).to be_nil
end
it 'should complain if its lhs is not a non-terminal' do
err = StandardError
msg_prefix = 'Left side of production must be a non-terminal symbol'
msg_suffix = ", found a #{String} instead."
msg = msg_prefix + msg_suffix
expect { Production.new('wrong', sequence) }.to raise_error(err, msg)
end
end # context
context 'Provided services:' do
it 'should accept a name (i)' do
a_name = 'nominem'
subject.name = a_name
expect(subject.name).to eq(a_name)
end
it 'should accept a name (ii)' do
a_name = 'nominem'
subject.as(a_name)
expect(subject.name).to eq(a_name)
end
it 'should provide human-readable representation of itself' do
subject.name = 'some name'
prefix = /^#<Rley::Syntax::Production:\d+ @name="some name"/
expect(subject.inspect).to match(prefix)
pattern = /@lhs=Sentence @rhs=#<Rley::Syntax::SymbolSeq/
expect(subject.inspect).to match(pattern)
suffix = /> @generative=>$/
expect(subject.inspect).to match(suffix)
end
end # context
end # describe
end # module
end # module
# End of file
| 33.122449 | 79 | 0.609673 |
1cb958a28aba1b839a3207440f1f46e6e03cd4df
| 2,798 |
#!/usr/bin/env ruby
# -*- coding: utf-8 -*-
#######################################################################
#
# Example of using the Excel::Writer::XLSX module to create worksheet panes.
#
# reverse(c), May 2001, John McNamara, [email protected]
# convert to ruby by Hideo NAKAMURA, [email protected]
#
require 'write_xlsx'
# Create a new workbook called simple.xls and add a worksheet
workbook = WriteXLSX.new('panes.xlsx')
worksheet1 = workbook.add_worksheet('Panes 1')
worksheet2 = workbook.add_worksheet('Panes 2')
worksheet3 = workbook.add_worksheet('Panes 3')
worksheet4 = workbook.add_worksheet('Panes 4')
# Freeze panes
worksheet1.freeze_panes(1, 0) # 1 row
worksheet2.freeze_panes(0, 1) # 1 column
worksheet3.freeze_panes(1, 1) # 1 row and column
# Split panes.
# The divisions must be specified in terms of row and column dimensions.
# The default row height is 15 and the default column width is 8.43
#
worksheet4.split_panes(15, 8.43) # 1 row and column
#######################################################################
#
# Set up some formatting and text to highlight the panes
#
header = workbook.add_format(
:align => 'center',
:valign => 'vcenter',
:fg_color => 0x2A
)
center = workbook.add_format(:align => 'center')
#######################################################################
#
# Sheet 1
#
worksheet1.set_column('A:I', 16)
worksheet1.set_row(0, 20)
worksheet1.set_selection('C3')
(0..8).each { |i| worksheet1.write(0, i, 'Scroll down', header) }
(1..100).each do |i|
(0..8).each {|j| worksheet1.write(i, j, i + 1, center)}
end
#######################################################################
#
# Sheet 2
#
worksheet2.set_column('A:A', 16)
worksheet2.set_selection('C3')
(0..49).each do |i|
worksheet2.set_row(i, 15)
worksheet2.write(i, 0, 'Scroll right', header)
end
(0..49).each do |i|
(1..25).each {|j| worksheet2.write(i, j, j, center)}
end
#######################################################################
#
# Sheet 3
#
worksheet3.set_column('A:Z', 16)
worksheet3.set_selection('C3')
worksheet3.write(0, 0, '', header)
(1..25).each {|i| worksheet3.write(0, i, 'Scroll down', header)}
(1..49).each {|i| worksheet3.write(i, 0, 'Scroll right', header)}
(1..49).each do |i|
(1..25).each {|j| worksheet3.write(i, j, j, center)}
end
#######################################################################
#
# Sheet 4
#
worksheet4.set_selection('C3')
(1..25).each {|i| worksheet4.write(0, i, 'Scroll', center)}
(1..49).each {|i| worksheet4.write(i, 0, 'Scroll', center)}
(1..49).each do |i|
(1..25).each {|j| worksheet4.write(i, j, j, center)}
end
workbook.close
| 25.907407 | 76 | 0.54396 |
ed30aa44a267c7d4dd84f6b5eba6bb8714b6689b
| 34 |
module API::V1::ReviewsHelper
end
| 11.333333 | 29 | 0.794118 |
b9c9f8e5f35c86300a6c4e30c111ec0f6cb7099b
| 176 |
class AddEndDateToLeases < ActiveRecord::Migration
def self.up
add_column :leases, :end_date, :date
end
def self.down
remove_column :leases, :end_date
end
end
| 17.6 | 50 | 0.727273 |
ff8993369ec9e4266c79977734e815de0a9689fb
| 133 |
def first_pos(str)
to_return = {}
str.split.each_with_index do |word, index|
to_return[word] ||= index
end
to_return
end
| 16.625 | 44 | 0.684211 |
1a8fdd463c54aa937aea5641af6ff9da667518cc
| 394 |
cask "nocturnal" do
version "0.3"
sha256 "a66c59daa1d1c59e5403aee4eb868a3967f1bdb4d90033fa3ee692bffd7db0b9"
url "https://github.com/HarshilShah/Nocturnal/releases/download/#{version}/Nocturnal.zip"
name "Nocturnal"
desc "Simple app to toggle dark mode with one click"
homepage "https://github.com/HarshilShah/Nocturnal"
depends_on macos: ">= :mojave"
app "Nocturnal.app"
end
| 28.142857 | 91 | 0.763959 |
e2f12357efe6e937ab4f0968a4b25413f7239dc9
| 1,046 |
# frozen_string_literal: true
# Takes care of all graphql queries
class GraphqlController < ApplicationController
def execute
variables = ensure_hash(params[:variables])
query = params[:query]
operation_name = params[:operationName]
context = {
# Query context goes here, for example:
# current_user: current_user,
}
opts = { variables: variables,
context: context,
operation_name: operation_name }
result = ReportFactorySchema.execute(query, opts)
render json: result
end
private
# Handle form data, JSON body, or a blank value
def ensure_hash(ambiguous_param)
case ambiguous_param
when String
format_to_hash(ambiguous_param)
when Hash, ActionController::Parameters
ambiguous_param
when nil
{}
else
raise ArgumentError, "Unexpected parameter: #{ambiguous_param}"
end
end
def format_to_hash(ambiguous_param)
return {} unless ambiguous_param.present?
ensure_hash(JSON.parse(ambiguous_param))
end
end
| 24.904762 | 69 | 0.696941 |
e89ac5b0026ac30720eb1b50671aab1e05a3d0c0
| 8,472 |
# frozen_string_literal: true
class ScheduleRule < ApplicationRecord
belongs_to :product
# oracle has a maximum table name length of 30, so we have to abbreviate it down
has_and_belongs_to_many :product_access_groups, join_table: "product_access_schedule_rules"
attr_accessor :unavailable # virtual attribute
validates_presence_of :product_id
validates_numericality_of :discount_percent, greater_than_or_equal_to: 0, less_than: 100
validates_numericality_of :start_hour, :end_hour, only_integer: true, greater_than_or_equal_to: 0, less_than_or_equal_to: 24
validates_numericality_of :start_min, :end_min, only_integer: true, greater_than_or_equal_to: 0, less_than: 60
validate :at_least_one_day_selected, :end_time_is_after_start_time, :end_time_is_valid, :no_overlap_with_existing_rules, :no_conflict_with_existing_reservation
def self.available_to_user(user)
where(product_users: { user_id: user.id })
.joins(product: :product_users).
# product doesn't have any restrictions at all, or has one that matches the product_user
where("(not EXISTS (SELECT * FROM product_access_schedule_rules WHERE product_access_schedule_rules.schedule_rule_id = schedule_rules.id)
OR (exists (select * from product_access_schedule_rules
where product_access_schedule_rules.product_access_group_id = product_users.product_access_group_id
and product_access_schedule_rules.schedule_rule_id = schedule_rules.id)))")
end
def self.unavailable_for_date(product, day)
rules = where(product_id: product.id)
rules = unavailable(rules)
rules = rules.select { |rule| rule.on_day?(day) }
rules.each_with_object([]) do |rule, reservations|
reservations << Reservation.new(
product: product,
reserve_start_at: day.change(hour: rule.start_hour, min: rule.start_min),
reserve_end_at: day.change(hour: rule.end_hour, min: rule.end_min),
blackout: true,
)
end
end
# Use this on an ActiveRecord::Relation. Is every minute within the range covered
# by one of the rules?
def self.cover?(start_at, end_at = start_at)
rule_set = all.to_a
# Time Ranges aren't iterable, so fake it by creating an array of each minute
# beween the two times. If start_at == end_at, the result will be one element.
minutes = (end_at - start_at) / 60
each_minute_in_range = 0.upto(minutes).collect { |n| start_at.advance(minutes: n) }
each_minute_in_range.all? do |time|
rule_set.any? { |rule| rule.cover? time }
end
end
# Returns a single array of calendar objects representing the set of schedule_rules
def self.as_calendar_objects(schedule_rules, options = {})
ScheduleRuleCalendarPresenter.to_json(schedule_rules, options)
end
def at_least_one_day_selected
errors.add(:base, "Please select at least one day") unless
on_sun? || on_mon? || on_tue? || on_wed? || on_thu? || on_fri? || on_sat?
end
def end_time_is_after_start_time
return if start_hour.nil? || end_hour.nil? || start_min.nil? || end_min.nil?
errors.add(:base, "End time must be after start time") if (end_hour < start_hour) || (end_hour == start_hour && end_min <= start_min)
end
def end_time_is_valid
if end_hour == 24 && end_min.to_i != 0
errors.add(:base, "End time is invalid")
end
end
def no_overlap_with_existing_rules
return if product.blank?
rules = product.schedule_rules.reject { |r| r.id == id } # select all rules except self
Date::ABBR_DAYNAMES.each do |day|
# skip unless this rule occurs on this day
next unless send("on_#{day.downcase}?")
# check all existing rules for this day
rules.select { |r| r.send("on_#{day.downcase}?") }.each do |rule|
next if start_time_int == rule.end_time_int || end_time_int == rule.start_time_int # start and end times may touch
if start_time_int.between?(rule.start_time_int, rule.end_time_int) ||
end_time_int.between?(rule.start_time_int, rule.end_time_int) ||
(start_time_int < rule.start_time_int && end_time_int > rule.end_time_int)
# overlap
errors.add(:base, "This rule conflicts with an existing rule on #{day}")
end
end
end
end
def no_conflict_with_existing_reservation
# TODO: implement me
true
end
def days_string
days = []
Date::ABBR_DAYNAMES.each do |day|
days << day if send("on_#{day.downcase}?")
end
days.join ", "
end
def start_time_int
start_hour * 100 + start_min
end
# multiplying by 100 means 8:00 is 800 -- it's time on a clock face minus the formatting and meridian
def end_time_int
end_hour * 100 + end_min
end
def start_time
"#{start_hour}:#{sprintf '%02d', start_min}"
end
def end_time
"#{end_hour}:#{sprintf '%02d', end_min}"
end
def on_day?(datetime)
public_send(%(on_#{datetime.strftime('%a').downcase}?))
end
def cover?(dt)
return false unless on_day?(dt)
dt_int = dt.hour * 100 + dt.min
dt_int >= start_time_int && dt_int <= end_time_int
end
# Build weekly calendar hashes
# Returns an array of hashes. A Mon-Fri 9-5 rule would return 5 hashes, one for
# each day.
def as_calendar_objects(options = {})
ScheduleRuleCalendarPresenter.new(self, options).to_json
end
def discount_for(start_at, end_at)
percent_overlap(start_at, end_at) * discount_percent.to_f
end
# Inverts a set of rules into another set of rules representing the times the
# product is unavailable.
#
# Example:
# Input: A set of rules representing every day, available from 9-noon and 1-5.
# Output: A set of rules for each day, midnight-9, noon-1, and 5-midnight
def self.unavailable(rules)
# rules is always a collection
rules = Array(rules)
not_rules = []
# group rules by day, sort by start_hour
Date::ABBR_DAYNAMES.each do |day|
day_rules = rules.select { |rule| rule.send("on_#{day.downcase}?") }.sort_by(&:start_hour)
if day_rules.empty?
# build entire day not rule
not_rule = ScheduleRule.new("on_#{day.downcase}" => true, :start_hour => 0, :start_min => 0, :end_hour => 24, :end_min => 0,
:unavailable => true)
not_rule.freeze
not_rules.push(not_rule)
next
end
# build not available rules as contiguous blocks between existing rules
not_start_hour = 0
not_start_min = 0
day_rules.each do |day_rule|
if day_rule.start_hour == not_start_hour && day_rule.start_min == not_start_min
# adjust not times, but don't build a not rule
not_start_hour = day_rule.end_hour
not_start_min = day_rule.end_min
next
end
not_rule = ScheduleRule.new("on_#{day.downcase}" => true, :unavailable => true)
not_rule.start_hour = not_start_hour
not_rule.start_min = not_start_min
not_rule.end_hour = day_rule.start_hour
not_rule.end_min = day_rule.start_min
not_start_hour = day_rule.end_hour
not_start_min = day_rule.end_min
not_rule.freeze
not_rules.push(not_rule)
end
next if not_start_hour == 24 && not_start_min == 0
# build not rule for last part of day
not_rule = ScheduleRule.new("on_#{day.downcase}" => true, :unavailable => true)
not_rule.start_hour = not_start_hour
not_rule.start_min = not_start_min
not_rule.end_hour = 24
not_rule.end_min = 0
not_rule.freeze
not_rules.push(not_rule)
end
not_rules
end
# If we're at, say, 4:00, return 3. If we're at 4:01, return 4.
def hour_floor
end_min == 0 ? end_hour - 1 : end_hour
end
private
def percent_overlap(start_at, end_at)
# Strip off seconds
start_at = start_at.change(sec: 0)
end_at = end_at.change(sec: 0)
return 0 unless end_at > start_at
total_mins = TimeRange.new(start_at, end_at).duration_mins
minutes_overlap(start_at, end_at).fdiv total_mins
end
def minutes_overlap(start_at, end_at)
overlap_mins = 0
# TODO: rewrite to be more efficient; don't iterate over every minute
while start_at < end_at
if start_at.hour * 100 + start_at.min >= start_time_int && start_at.hour * 100 + start_at.min < end_time_int && on_day?(start_at)
overlap_mins += 1
end
start_at += 60
end
overlap_mins
end
end
| 35.008264 | 161 | 0.687795 |
fffeece1ab224fe08bad62683783498c0dea609f
| 1,173 |
require 'spec_helper'
describe UtopianSolrizer do
@@solr_options = { read_timeout: 120, open_timeout: 120, url: 'http://localhost:8983/solr/utopian' }
it "has solrize_post" do
post = UtopianRuby::UtopianRubyAPI.get_post_obj('yuxi','utopian-api-ruby-client')
response = UtopianSolrizer.solrize_post(post, @@solr_options)
expect(response['responseHeader']['status']).to eq(0)
end
it "has query" do
params = { :q => "*:*" }
response = UtopianSolrizer.query(@@solr_options, params)
expect(response['responseHeader']['status']).to eq(0)
#response["response"]["docs"].each do |d|
# puts d["id"]
# puts d["author"]
#end
end
it "has exist method" do
id = '0'
expect(UtopianSolrizer.exist(@@solr_options, id)).to eq(false)
end
it "has solrize_posts_by_criterias method" do
expect(UtopianSolrizer.solrize_posts_by_criterias({"limit":1,"type":"development"}, @@solr_options, nil)).to be >= 0
end
it "has solrize_posts_within_minutes method" do
expect(UtopianSolrizer.solrize_posts_within_minutes({"limit":1,"status":"reviewed","type":"development"}, @@solr_options, nil, 60*2)).to be >= 0
end
end
| 32.583333 | 148 | 0.686275 |
286036367887d6828afdbbb39077c5926fa05647
| 274 |
require 'rails_helper'
include CategoryTree::Engine.routes.url_helpers
RSpec.describe 'category_tree/categories/show', :type => :view do
before(:each) do
@category = assign(:category, create(:category))
end
it 'renders attributes in <p>' do
render
end
end
| 21.076923 | 65 | 0.722628 |
1d775b663e5b1d9a5d37bd239eb6fa37c83aa923
| 389 |
require 'base_kde_formula'
class Cervisia < BaseKdeFormula
homepage 'http://www.kde.org/'
url 'http://download.kde.org/stable/4.11.4/src/cervisia-4.11.4.tar.xz'
sha1 '84eea219a74ac252257d76363f8470b106d6fdcf'
devel do
url 'http://download.kde.org/stable/4.12.0/src/cervisia-4.12.0.tar.xz'
sha1 'c4609985634c38d1533a68d3f905c785ff780942'
end
depends_on 'kdelibs'
end
| 25.933333 | 74 | 0.748072 |
1d4b1bb84f94035f2e420769e86764d9e94a0e7c
| 2,559 |
require 'rubygems'
require 'mixlib/cli'
require 'rest_client'
require 'json'
class Veslo
include Mixlib::CLI
SUPPORTED_METHODS = ["put", "get"]
SUPPORTED_RESOURCES = ["configurations"]
attr_accessor :server
option :server_url,
:short => "-s SERVER",
:long => "--server SERVER",
:description => "The Noah server to work against"
def run!(*arguments)
argv = parse_options(arguments)
@server = RestClient::Resource.new(config[:server_url], :headers => {:accept => "application/octet"})
parse_commands(argv)
send(:"resource_#{@method}_cli")
end
def self.client(server)
client = self.new
client.server = RestClient::Resource.new(server, :headers => {:accept => "application/octet"})
client
end
def parse_commands(commands)
raise ArgumentError.new("Not the right ammount of arguments") unless (3..4).include?(commands.size)
@resource = commands.shift
@method = commands.shift
@name = commands.shift
@file = commands.shift
validate_input
end
def validate_input
raise NotImplementedError.new("method #{@method} not supported") unless SUPPORTED_METHODS.include?(@method)
raise NotImplementedError.new("resource #{@resource} not supported") unless SUPPORTED_RESOURCES.include?(@resource)
end
def get(resource, name)
@resource = resource
@name = name
resource_get
end
def resource_get
@server["#{@resource}/#{@name}"].get
end
def resource_get_cli
result = resource_get
$stdout.puts result.to_str
return 0
rescue RestClient::ExceptionWithResponse => e
case e.response.code
when 404
$stderr.puts("Requested resource not found")
return 1
else
$stderr.puts("Request failed with status: #{e.response.code}")
return 2
end
end
def put(resource, name, data)
@resource = resource
@name = name
resource_put(data)
end
def resource_put(data)
@server["#{@resource}/#{@name}"].put(data)
end
def delete(resource, name, data)
@resource = resource
@name = name
resource_delete(data)
end
def resource_delete(data)
@server["#{@resource}/#{@name}"].delete(data)
end
def resource_put_cli
raise NotImplementedError, "No STDIN yet" unless @file
file_content = File.open(@file, 'r').read
put_data = "{\"format\":\"app/octet\", \"body\":#{file_content.to_json}}"
result = resource_put(put_data)
$stdout.puts "Config uploaded"
return 0
rescue Errno::ENOENT
$stderr.puts "File not found: #{@file}"
return 3
end
end
| 25.088235 | 119 | 0.671356 |
ac1eb44417a991cbfa1677e2522fc7c849586714
| 99 |
# frozen_string_literal: true
class PlanSerializer < ActiveModel::Serializer
attributes :id
end
| 16.5 | 46 | 0.808081 |
edd1d032d2f5e98de6bed6e88f6b7e3e37d3f906
| 139,689 |
# frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/endpoint_discovery.rb'
require 'aws-sdk-core/plugins/endpoint_pattern.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/client_metrics_plugin.rb'
require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb'
require 'aws-sdk-core/plugins/transfer_encoding.rb'
require 'aws-sdk-core/plugins/http_checksum.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/json_rpc.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:costexplorer)
module Aws::CostExplorer
# An API client for CostExplorer. To construct a client, you need to configure a `:region` and `:credentials`.
#
# client = Aws::CostExplorer::Client.new(
# region: region_name,
# credentials: credentials,
# # ...
# )
#
# For details on configuring region and credentials see
# the [developer guide](/sdk-for-ruby/v3/developer-guide/setup-config.html).
#
# See {#initialize} for a full list of supported configuration options.
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :costexplorer
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::EndpointDiscovery)
add_plugin(Aws::Plugins::EndpointPattern)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::ClientMetricsPlugin)
add_plugin(Aws::Plugins::ClientMetricsSendPlugin)
add_plugin(Aws::Plugins::TransferEncoding)
add_plugin(Aws::Plugins::HttpChecksum)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::JsonRpc)
# @overload initialize(options)
# @param [Hash] options
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::SharedCredentials` - Used for loading credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2 IMDS instance profile - When used by default, the timeouts are
# very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` to enable retries and extended
# timeouts.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is searched for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :active_endpoint_cache (false)
# When set to `true`, a thread polling for endpoints will be running in
# the background every 60 secs (default). Defaults to `false`.
#
# @option options [Boolean] :adaptive_retry_wait_to_fill (true)
# Used only in `adaptive` retry mode. When true, the request will sleep
# until there is sufficent client side capacity to retry the request.
# When false, the request will raise a `RetryCapacityNotAvailableError` and will
# not retry instead of sleeping.
#
# @option options [Boolean] :client_side_monitoring (false)
# When `true`, client-side metrics will be collected for all API requests from
# this client.
#
# @option options [String] :client_side_monitoring_client_id ("")
# Allows you to provide an identifier for this client which will be attached to
# all generated client side metrics. Defaults to an empty string.
#
# @option options [String] :client_side_monitoring_host ("127.0.0.1")
# Allows you to specify the DNS hostname or IPv4 or IPv6 address that the client
# side monitoring agent is running on, where client metrics will be published via UDP.
#
# @option options [Integer] :client_side_monitoring_port (31000)
# Required for publishing client metrics. The port that the client side monitoring
# agent is running on, where client metrics will be published via UDP.
#
# @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher)
# Allows you to provide a custom client-side monitoring publisher class. By default,
# will use the Client Side Monitoring Agent Publisher.
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [Boolean] :correct_clock_skew (true)
# Used only in `standard` and adaptive retry modes. Specifies whether to apply
# a clock skew correction and retry requests with skewed client clocks.
#
# @option options [Boolean] :disable_host_prefix_injection (false)
# Set to true to disable SDK automatically adding host prefix
# to default service endpoint when available.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test or custom endpoints. This should be a valid HTTP(S) URI.
#
# @option options [Integer] :endpoint_cache_max_entries (1000)
# Used for the maximum size limit of the LRU cache storing endpoints data
# for endpoint discovery enabled operations. Defaults to 1000.
#
# @option options [Integer] :endpoint_cache_max_threads (10)
# Used for the maximum threads in use for polling endpoints to be cached, defaults to 10.
#
# @option options [Integer] :endpoint_cache_poll_interval (60)
# When :endpoint_discovery and :active_endpoint_cache is enabled,
# Use this option to config the time interval in seconds for making
# requests fetching endpoints information. Defaults to 60 sec.
#
# @option options [Boolean] :endpoint_discovery (false)
# When set to `true`, endpoint discovery will be enabled for operations when available.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [Integer] :max_attempts (3)
# An integer representing the maximum number attempts that will be made for
# a single request, including the initial attempt. For example,
# setting this value to 5 will result in a request being retried up to
# 4 times. Used in `standard` and `adaptive` retry modes.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Proc] :retry_backoff
# A proc or lambda used for backoff. Defaults to 2**retries * retry_base_delay.
# This option is only used in the `legacy` retry mode.
#
# @option options [Float] :retry_base_delay (0.3)
# The base delay in seconds used by the default backoff function. This option
# is only used in the `legacy` retry mode.
#
# @option options [Symbol] :retry_jitter (:none)
# A delay randomiser function used by the default backoff function.
# Some predefined functions can be referenced by name - :none, :equal, :full,
# otherwise a Proc that takes and returns a number. This option is only used
# in the `legacy` retry mode.
#
# @see https://www.awsarchitectureblog.com/2015/03/backoff.html
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors, auth errors,
# endpoint discovery, and errors from expired credentials.
# This option is only used in the `legacy` retry mode.
#
# @option options [Integer] :retry_max_delay (0)
# The maximum number of seconds to delay between retries (0 for no limit)
# used by the default backoff function. This option is only used in the
# `legacy` retry mode.
#
# @option options [String] :retry_mode ("legacy")
# Specifies which retry algorithm to use. Values are:
#
# * `legacy` - The pre-existing retry behavior. This is default value if
# no retry mode is provided.
#
# * `standard` - A standardized set of retry rules across the AWS SDKs.
# This includes support for retry quotas, which limit the number of
# unsuccessful retries a client can make.
#
# * `adaptive` - An experimental retry mode that includes all the
# functionality of `standard` mode along with automatic client side
# throttling. This is a provisional mode that may change behavior
# in the future.
#
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :simple_json (false)
# Disables request parameter conversion, validation, and formatting.
# Also disable response data type conversions. This option is useful
# when you want to ensure the highest level of performance by
# avoiding overhead of walking request parameters and response data
# structures.
#
# When `:simple_json` is enabled, the request parameters hash must
# be formatted exactly as the DynamoDB API expects.
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
# @option options [URI::HTTP,String] :http_proxy A proxy to send
# requests through. Formatted like 'http://proxy.com:123'.
#
# @option options [Float] :http_open_timeout (15) The number of
# seconds to wait when opening a HTTP session before raising a
# `Timeout::Error`.
#
# @option options [Integer] :http_read_timeout (60) The default
# number of seconds to wait for response data. This value can
# safely be set per-request on the session.
#
# @option options [Float] :http_idle_timeout (5) The number of
# seconds a connection is allowed to sit idle before it is
# considered stale. Stale connections are closed and removed
# from the pool before making a request.
#
# @option options [Float] :http_continue_timeout (1) The number of
# seconds to wait for a 100-continue response before sending the
# request body. This option has no effect unless the request has
# "Expect" header set to "100-continue". Defaults to `nil` which
# disables this behaviour. This value can safely be set per
# request on the session.
#
# @option options [Boolean] :http_wire_trace (false) When `true`,
# HTTP debug output will be sent to the `:logger`.
#
# @option options [Boolean] :ssl_verify_peer (true) When `true`,
# SSL peer certificates are verified when establishing a
# connection.
#
# @option options [String] :ssl_ca_bundle Full path to the SSL
# certificate authority bundle file that should be used when
# verifying peer certificates. If you do not pass
# `:ssl_ca_bundle` or `:ssl_ca_directory` the the system default
# will be used if available.
#
# @option options [String] :ssl_ca_directory Full path of the
# directory that contains the unbundled SSL certificate
# authority files for verifying peer certificates. If you do
# not pass `:ssl_ca_bundle` or `:ssl_ca_directory` the the
# system default will be used if available.
#
def initialize(*args)
super
end
# @!group API Operations
# Creates a new Cost Category with the requested name and rules.
#
# @option params [required, String] :name
# The unique name of the Cost Category.
#
# @option params [required, String] :rule_version
# The rule schema version in this particular Cost Category.
#
# @option params [required, Array<Types::CostCategoryRule>] :rules
# The Cost Category rules used to categorize costs. For more
# information, see [CostCategoryRule][1].
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_CostCategoryRule.html
#
# @return [Types::CreateCostCategoryDefinitionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateCostCategoryDefinitionResponse#cost_category_arn #cost_category_arn} => String
# * {Types::CreateCostCategoryDefinitionResponse#effective_start #effective_start} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_cost_category_definition({
# name: "CostCategoryName", # required
# rule_version: "CostCategoryExpression.v1", # required, accepts CostCategoryExpression.v1
# rules: [ # required
# {
# value: "CostCategoryValue", # required
# rule: { # required
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# },
# ],
# })
#
# @example Response structure
#
# resp.cost_category_arn #=> String
# resp.effective_start #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/CreateCostCategoryDefinition AWS API Documentation
#
# @overload create_cost_category_definition(params = {})
# @param [Hash] params ({})
def create_cost_category_definition(params = {}, options = {})
req = build_request(:create_cost_category_definition, params)
req.send_request(options)
end
# Deletes a Cost Category. Expenses from this month going forward will
# no longer be categorized with this Cost Category.
#
# @option params [required, String] :cost_category_arn
# The unique identifier for your Cost Category.
#
# @return [Types::DeleteCostCategoryDefinitionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteCostCategoryDefinitionResponse#cost_category_arn #cost_category_arn} => String
# * {Types::DeleteCostCategoryDefinitionResponse#effective_end #effective_end} => String
#
# @example Request syntax with placeholder values
#
# resp = client.delete_cost_category_definition({
# cost_category_arn: "Arn", # required
# })
#
# @example Response structure
#
# resp.cost_category_arn #=> String
# resp.effective_end #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/DeleteCostCategoryDefinition AWS API Documentation
#
# @overload delete_cost_category_definition(params = {})
# @param [Hash] params ({})
def delete_cost_category_definition(params = {}, options = {})
req = build_request(:delete_cost_category_definition, params)
req.send_request(options)
end
# Returns the name, ARN, rules, definition, and effective dates of a
# Cost Category that's defined in the account.
#
# You have the option to use `EffectiveOn` to return a Cost Category
# that is active on a specific date. If there is no `EffectiveOn`
# specified, you’ll see a Cost Category that is effective on the current
# date. If Cost Category is still effective, `EffectiveEnd` is omitted
# in the response.
#
# @option params [required, String] :cost_category_arn
# The unique identifier for your Cost Category.
#
# @option params [String] :effective_on
# The date when the Cost Category was effective.
#
# @return [Types::DescribeCostCategoryDefinitionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeCostCategoryDefinitionResponse#cost_category #cost_category} => Types::CostCategory
#
# @example Request syntax with placeholder values
#
# resp = client.describe_cost_category_definition({
# cost_category_arn: "Arn", # required
# effective_on: "ZonedDateTime",
# })
#
# @example Response structure
#
# resp.cost_category.cost_category_arn #=> String
# resp.cost_category.effective_start #=> String
# resp.cost_category.effective_end #=> String
# resp.cost_category.name #=> String
# resp.cost_category.rule_version #=> String, one of "CostCategoryExpression.v1"
# resp.cost_category.rules #=> Array
# resp.cost_category.rules[0].value #=> String
# resp.cost_category.rules[0].rule.or #=> Array
# resp.cost_category.rules[0].rule.or[0] #=> Types::Expression
# resp.cost_category.rules[0].rule.and #=> Array
# resp.cost_category.rules[0].rule.and[0] #=> Types::Expression
# resp.cost_category.rules[0].rule.not #=> Types::Expression
# resp.cost_category.rules[0].rule.dimensions.key #=> String, one of "AZ", "INSTANCE_TYPE", "LINKED_ACCOUNT", "LINKED_ACCOUNT_NAME", "OPERATION", "PURCHASE_TYPE", "REGION", "SERVICE", "SERVICE_CODE", "USAGE_TYPE", "USAGE_TYPE_GROUP", "RECORD_TYPE", "OPERATING_SYSTEM", "TENANCY", "SCOPE", "PLATFORM", "SUBSCRIPTION_ID", "LEGAL_ENTITY_NAME", "DEPLOYMENT_OPTION", "DATABASE_ENGINE", "CACHE_ENGINE", "INSTANCE_TYPE_FAMILY", "BILLING_ENTITY", "RESERVATION_ID", "RESOURCE_ID", "RIGHTSIZING_TYPE", "SAVINGS_PLANS_TYPE", "SAVINGS_PLAN_ARN", "PAYMENT_OPTION"
# resp.cost_category.rules[0].rule.dimensions.values #=> Array
# resp.cost_category.rules[0].rule.dimensions.values[0] #=> String
# resp.cost_category.rules[0].rule.dimensions.match_options #=> Array
# resp.cost_category.rules[0].rule.dimensions.match_options[0] #=> String, one of "EQUALS", "STARTS_WITH", "ENDS_WITH", "CONTAINS", "CASE_SENSITIVE", "CASE_INSENSITIVE"
# resp.cost_category.rules[0].rule.tags.key #=> String
# resp.cost_category.rules[0].rule.tags.values #=> Array
# resp.cost_category.rules[0].rule.tags.values[0] #=> String
# resp.cost_category.rules[0].rule.tags.match_options #=> Array
# resp.cost_category.rules[0].rule.tags.match_options[0] #=> String, one of "EQUALS", "STARTS_WITH", "ENDS_WITH", "CONTAINS", "CASE_SENSITIVE", "CASE_INSENSITIVE"
# resp.cost_category.rules[0].rule.cost_categories.key #=> String
# resp.cost_category.rules[0].rule.cost_categories.values #=> Array
# resp.cost_category.rules[0].rule.cost_categories.values[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/DescribeCostCategoryDefinition AWS API Documentation
#
# @overload describe_cost_category_definition(params = {})
# @param [Hash] params ({})
def describe_cost_category_definition(params = {}, options = {})
req = build_request(:describe_cost_category_definition, params)
req.send_request(options)
end
# Retrieves cost and usage metrics for your account. You can specify
# which cost and usage-related metric, such as `BlendedCosts` or
# `UsageQuantity`, that you want the request to return. You can also
# filter and group your data by various dimensions, such as `SERVICE` or
# `AZ`, in a specific time range. For a complete list of valid
# dimensions, see the [GetDimensionValues][1] operation. Master accounts
# in an organization in AWS Organizations have access to all member
# accounts.
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_GetDimensionValues.html
#
# @option params [required, Types::DateInterval] :time_period
# Sets the start and end dates for retrieving AWS costs. The start date
# is inclusive, but the end date is exclusive. For example, if `start`
# is `2017-01-01` and `end` is `2017-05-01`, then the cost and usage
# data is retrieved from `2017-01-01` up to and including `2017-04-30`
# but not including `2017-05-01`.
#
# @option params [String] :granularity
# Sets the AWS cost granularity to `MONTHLY` or `DAILY`, or `HOURLY`. If
# `Granularity` isn't set, the response object doesn't include the
# `Granularity`, either `MONTHLY` or `DAILY`, or `HOURLY`.
#
# @option params [Types::Expression] :filter
# Filters AWS costs by different dimensions. For example, you can
# specify `SERVICE` and `LINKED_ACCOUNT` and get the costs that are
# associated with that account's usage of that service. You can nest
# `Expression` objects to define any combination of dimension filters.
# For more information, see [Expression][1].
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html
#
# @option params [Array<String>] :metrics
# Which metrics are returned in the query. For more information about
# blended and unblended rates, see [Why does the "blended" annotation
# appear on some line items in my bill?][1].
#
# Valid values are `AmortizedCost`, `BlendedCost`, `NetAmortizedCost`,
# `NetUnblendedCost`, `NormalizedUsageAmount`, `UnblendedCost`, and
# `UsageQuantity`.
#
# <note markdown="1"> If you return the `UsageQuantity` metric, the service aggregates all
# usage numbers without taking into account the units. For example, if
# you aggregate `usageQuantity` across all of Amazon EC2, the results
# aren't meaningful because Amazon EC2 compute hours and data transfer
# are measured in different units (for example, hours vs. GB). To get
# more meaningful `UsageQuantity` metrics, filter by `UsageType` or
# `UsageTypeGroups`.
#
# </note>
#
# `Metrics` is required for `GetCostAndUsage` requests.
#
#
#
# [1]: http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/
#
# @option params [Array<Types::GroupDefinition>] :group_by
# You can group AWS costs using up to two different groups, either
# dimensions, tag keys, or both.
#
# When you group by tag key, you get all tag values, including empty
# strings.
#
# Valid values are `AZ`, `INSTANCE_TYPE`, `LEGAL_ENTITY_NAME`,
# `LINKED_ACCOUNT`, `OPERATION`, `PLATFORM`, `PURCHASE_TYPE`, `SERVICE`,
# `TAGS`, `TENANCY`, `RECORD_TYPE`, and `USAGE_TYPE`.
#
# @option params [String] :next_page_token
# The token to retrieve the next set of results. AWS provides the token
# when the response from a previous call has more results than the
# maximum page size.
#
# @return [Types::GetCostAndUsageResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetCostAndUsageResponse#next_page_token #next_page_token} => String
# * {Types::GetCostAndUsageResponse#group_definitions #group_definitions} => Array<Types::GroupDefinition>
# * {Types::GetCostAndUsageResponse#results_by_time #results_by_time} => Array<Types::ResultByTime>
#
# @example Request syntax with placeholder values
#
# resp = client.get_cost_and_usage({
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# granularity: "DAILY", # accepts DAILY, MONTHLY, HOURLY
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# metrics: ["MetricName"],
# group_by: [
# {
# type: "DIMENSION", # accepts DIMENSION, TAG, COST_CATEGORY
# key: "GroupDefinitionKey",
# },
# ],
# next_page_token: "NextPageToken",
# })
#
# @example Response structure
#
# resp.next_page_token #=> String
# resp.group_definitions #=> Array
# resp.group_definitions[0].type #=> String, one of "DIMENSION", "TAG", "COST_CATEGORY"
# resp.group_definitions[0].key #=> String
# resp.results_by_time #=> Array
# resp.results_by_time[0].time_period.start #=> String
# resp.results_by_time[0].time_period.end #=> String
# resp.results_by_time[0].total #=> Hash
# resp.results_by_time[0].total["MetricName"].amount #=> String
# resp.results_by_time[0].total["MetricName"].unit #=> String
# resp.results_by_time[0].groups #=> Array
# resp.results_by_time[0].groups[0].keys #=> Array
# resp.results_by_time[0].groups[0].keys[0] #=> String
# resp.results_by_time[0].groups[0].metrics #=> Hash
# resp.results_by_time[0].groups[0].metrics["MetricName"].amount #=> String
# resp.results_by_time[0].groups[0].metrics["MetricName"].unit #=> String
# resp.results_by_time[0].estimated #=> Boolean
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetCostAndUsage AWS API Documentation
#
# @overload get_cost_and_usage(params = {})
# @param [Hash] params ({})
def get_cost_and_usage(params = {}, options = {})
req = build_request(:get_cost_and_usage, params)
req.send_request(options)
end
# Retrieves cost and usage metrics with resources for your account. You
# can specify which cost and usage-related metric, such as
# `BlendedCosts` or `UsageQuantity`, that you want the request to
# return. You can also filter and group your data by various dimensions,
# such as `SERVICE` or `AZ`, in a specific time range. For a complete
# list of valid dimensions, see the [GetDimensionValues][1] operation.
# Master accounts in an organization in AWS Organizations have access to
# all member accounts. This API is currently available for the Amazon
# Elastic Compute Cloud – Compute service only.
#
# <note markdown="1"> This is an opt-in only feature. You can enable this feature from the
# Cost Explorer Settings page. For information on how to access the
# Settings page, see [Controlling Access for Cost Explorer][2] in the
# *AWS Billing and Cost Management User Guide*.
#
# </note>
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_GetDimensionValues.html
# [2]: https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/ce-access.html
#
# @option params [required, Types::DateInterval] :time_period
# Sets the start and end dates for retrieving Amazon Web Services costs.
# The range must be within the last 14 days (the start date cannot be
# earlier than 14 days ago). The start date is inclusive, but the end
# date is exclusive. For example, if `start` is `2017-01-01` and `end`
# is `2017-05-01`, then the cost and usage data is retrieved from
# `2017-01-01` up to and including `2017-04-30` but not including
# `2017-05-01`.
#
# @option params [String] :granularity
# Sets the AWS cost granularity to `MONTHLY`, `DAILY`, or `HOURLY`. If
# `Granularity` isn't set, the response object doesn't include the
# `Granularity`, `MONTHLY`, `DAILY`, or `HOURLY`.
#
# @option params [Types::Expression] :filter
# Filters Amazon Web Services costs by different dimensions. For
# example, you can specify `SERVICE` and `LINKED_ACCOUNT` and get the
# costs that are associated with that account's usage of that service.
# You can nest `Expression` objects to define any combination of
# dimension filters. For more information, see [Expression][1].
#
# The `GetCostAndUsageWithResources` operation requires that you either
# group by or filter by a `ResourceId`.
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html
#
# @option params [Array<String>] :metrics
# Which metrics are returned in the query. For more information about
# blended and unblended rates, see [Why does the "blended" annotation
# appear on some line items in my bill?][1].
#
# Valid values are `AmortizedCost`, `BlendedCost`, `NetAmortizedCost`,
# `NetUnblendedCost`, `NormalizedUsageAmount`, `UnblendedCost`, and
# `UsageQuantity`.
#
# <note markdown="1"> If you return the `UsageQuantity` metric, the service aggregates all
# usage numbers without taking the units into account. For example, if
# you aggregate `usageQuantity` across all of Amazon EC2, the results
# aren't meaningful because Amazon EC2 compute hours and data transfer
# are measured in different units (for example, hours vs. GB). To get
# more meaningful `UsageQuantity` metrics, filter by `UsageType` or
# `UsageTypeGroups`.
#
# </note>
#
# `Metrics` is required for `GetCostAndUsageWithResources` requests.
#
#
#
# [1]: http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/
#
# @option params [Array<Types::GroupDefinition>] :group_by
# You can group Amazon Web Services costs using up to two different
# groups: either dimensions, tag keys, or both.
#
# @option params [String] :next_page_token
# The token to retrieve the next set of results. AWS provides the token
# when the response from a previous call has more results than the
# maximum page size.
#
# @return [Types::GetCostAndUsageWithResourcesResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetCostAndUsageWithResourcesResponse#next_page_token #next_page_token} => String
# * {Types::GetCostAndUsageWithResourcesResponse#group_definitions #group_definitions} => Array<Types::GroupDefinition>
# * {Types::GetCostAndUsageWithResourcesResponse#results_by_time #results_by_time} => Array<Types::ResultByTime>
#
# @example Request syntax with placeholder values
#
# resp = client.get_cost_and_usage_with_resources({
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# granularity: "DAILY", # accepts DAILY, MONTHLY, HOURLY
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# metrics: ["MetricName"],
# group_by: [
# {
# type: "DIMENSION", # accepts DIMENSION, TAG, COST_CATEGORY
# key: "GroupDefinitionKey",
# },
# ],
# next_page_token: "NextPageToken",
# })
#
# @example Response structure
#
# resp.next_page_token #=> String
# resp.group_definitions #=> Array
# resp.group_definitions[0].type #=> String, one of "DIMENSION", "TAG", "COST_CATEGORY"
# resp.group_definitions[0].key #=> String
# resp.results_by_time #=> Array
# resp.results_by_time[0].time_period.start #=> String
# resp.results_by_time[0].time_period.end #=> String
# resp.results_by_time[0].total #=> Hash
# resp.results_by_time[0].total["MetricName"].amount #=> String
# resp.results_by_time[0].total["MetricName"].unit #=> String
# resp.results_by_time[0].groups #=> Array
# resp.results_by_time[0].groups[0].keys #=> Array
# resp.results_by_time[0].groups[0].keys[0] #=> String
# resp.results_by_time[0].groups[0].metrics #=> Hash
# resp.results_by_time[0].groups[0].metrics["MetricName"].amount #=> String
# resp.results_by_time[0].groups[0].metrics["MetricName"].unit #=> String
# resp.results_by_time[0].estimated #=> Boolean
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetCostAndUsageWithResources AWS API Documentation
#
# @overload get_cost_and_usage_with_resources(params = {})
# @param [Hash] params ({})
def get_cost_and_usage_with_resources(params = {}, options = {})
req = build_request(:get_cost_and_usage_with_resources, params)
req.send_request(options)
end
# Retrieves a forecast for how much Amazon Web Services predicts that
# you will spend over the forecast time period that you select, based on
# your past costs.
#
# @option params [required, Types::DateInterval] :time_period
# The period of time that you want the forecast to cover.
#
# @option params [required, String] :metric
# Which metric Cost Explorer uses to create your forecast. For more
# information about blended and unblended rates, see [Why does the
# "blended" annotation appear on some line items in my bill?][1].
#
# Valid values for a `GetCostForecast` call are the following:
#
# * AMORTIZED\_COST
#
# * BLENDED\_COST
#
# * NET\_AMORTIZED\_COST
#
# * NET\_UNBLENDED\_COST
#
# * UNBLENDED\_COST
#
#
#
# [1]: http://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/
#
# @option params [required, String] :granularity
# How granular you want the forecast to be. You can get 3 months of
# `DAILY` forecasts or 12 months of `MONTHLY` forecasts.
#
# The `GetCostForecast` operation supports only `DAILY` and `MONTHLY`
# granularities.
#
# @option params [Types::Expression] :filter
# The filters that you want to use to filter your forecast. Cost
# Explorer API supports all of the Cost Explorer filters.
#
# @option params [Integer] :prediction_interval_level
# Cost Explorer always returns the mean forecast as a single point. You
# can request a prediction interval around the mean by specifying a
# confidence level. The higher the confidence level, the more confident
# Cost Explorer is about the actual value falling in the prediction
# interval. Higher confidence levels result in wider prediction
# intervals.
#
# @return [Types::GetCostForecastResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetCostForecastResponse#total #total} => Types::MetricValue
# * {Types::GetCostForecastResponse#forecast_results_by_time #forecast_results_by_time} => Array<Types::ForecastResult>
#
# @example Request syntax with placeholder values
#
# resp = client.get_cost_forecast({
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# metric: "BLENDED_COST", # required, accepts BLENDED_COST, UNBLENDED_COST, AMORTIZED_COST, NET_UNBLENDED_COST, NET_AMORTIZED_COST, USAGE_QUANTITY, NORMALIZED_USAGE_AMOUNT
# granularity: "DAILY", # required, accepts DAILY, MONTHLY, HOURLY
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# prediction_interval_level: 1,
# })
#
# @example Response structure
#
# resp.total.amount #=> String
# resp.total.unit #=> String
# resp.forecast_results_by_time #=> Array
# resp.forecast_results_by_time[0].time_period.start #=> String
# resp.forecast_results_by_time[0].time_period.end #=> String
# resp.forecast_results_by_time[0].mean_value #=> String
# resp.forecast_results_by_time[0].prediction_interval_lower_bound #=> String
# resp.forecast_results_by_time[0].prediction_interval_upper_bound #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetCostForecast AWS API Documentation
#
# @overload get_cost_forecast(params = {})
# @param [Hash] params ({})
def get_cost_forecast(params = {}, options = {})
req = build_request(:get_cost_forecast, params)
req.send_request(options)
end
# Retrieves all available filter values for a specified filter over a
# period of time. You can search the dimension values for an arbitrary
# string.
#
# @option params [String] :search_string
# The value that you want to search the filter values for.
#
# @option params [required, Types::DateInterval] :time_period
# The start and end dates for retrieving the dimension values. The start
# date is inclusive, but the end date is exclusive. For example, if
# `start` is `2017-01-01` and `end` is `2017-05-01`, then the cost and
# usage data is retrieved from `2017-01-01` up to and including
# `2017-04-30` but not including `2017-05-01`.
#
# @option params [required, String] :dimension
# The name of the dimension. Each `Dimension` is available for a
# different `Context`. For more information, see `Context`.
#
# @option params [String] :context
# The context for the call to `GetDimensionValues`. This can be
# `RESERVATIONS` or `COST_AND_USAGE`. The default value is
# `COST_AND_USAGE`. If the context is set to `RESERVATIONS`, the
# resulting dimension values can be used in the
# `GetReservationUtilization` operation. If the context is set to
# `COST_AND_USAGE`, the resulting dimension values can be used in the
# `GetCostAndUsage` operation.
#
# If you set the context to `COST_AND_USAGE`, you can use the following
# dimensions for searching:
#
# * AZ - The Availability Zone. An example is `us-east-1a`.
#
# * DATABASE\_ENGINE - The Amazon Relational Database Service database.
# Examples are Aurora or MySQL.
#
# * INSTANCE\_TYPE - The type of Amazon EC2 instance. An example is
# `m4.xlarge`.
#
# * LEGAL\_ENTITY\_NAME - The name of the organization that sells you
# AWS services, such as Amazon Web Services.
#
# * LINKED\_ACCOUNT - The description in the attribute map that includes
# the full name of the member account. The value field contains the
# AWS ID of the member account.
#
# * OPERATING\_SYSTEM - The operating system. Examples are Windows or
# Linux.
#
# * OPERATION - The action performed. Examples include `RunInstance` and
# `CreateBucket`.
#
# * PLATFORM - The Amazon EC2 operating system. Examples are Windows or
# Linux.
#
# * PURCHASE\_TYPE - The reservation type of the purchase to which this
# usage is related. Examples include On-Demand Instances and Standard
# Reserved Instances.
#
# * SERVICE - The AWS service such as Amazon DynamoDB.
#
# * USAGE\_TYPE - The type of usage. An example is
# DataTransfer-In-Bytes. The response for the `GetDimensionValues`
# operation includes a unit attribute. Examples include GB and Hrs.
#
# * USAGE\_TYPE\_GROUP - The grouping of common usage types. An example
# is Amazon EC2: CloudWatch – Alarms. The response for this operation
# includes a unit attribute.
#
# * RECORD\_TYPE - The different types of charges such as RI fees, usage
# costs, tax refunds, and credits.
#
# * RESOURCE\_ID - The unique identifier of the resource. ResourceId is
# an opt-in feature only available for last 14 days for EC2-Compute
# Service.
#
# If you set the context to `RESERVATIONS`, you can use the following
# dimensions for searching:
#
# * AZ - The Availability Zone. An example is `us-east-1a`.
#
# * CACHE\_ENGINE - The Amazon ElastiCache operating system. Examples
# are Windows or Linux.
#
# * DEPLOYMENT\_OPTION - The scope of Amazon Relational Database Service
# deployments. Valid values are `SingleAZ` and `MultiAZ`.
#
# * INSTANCE\_TYPE - The type of Amazon EC2 instance. An example is
# `m4.xlarge`.
#
# * LINKED\_ACCOUNT - The description in the attribute map that includes
# the full name of the member account. The value field contains the
# AWS ID of the member account.
#
# * PLATFORM - The Amazon EC2 operating system. Examples are Windows or
# Linux.
#
# * REGION - The AWS Region.
#
# * SCOPE (Utilization only) - The scope of a Reserved Instance (RI).
# Values are regional or a single Availability Zone.
#
# * TAG (Coverage only) - The tags that are associated with a Reserved
# Instance (RI).
#
# * TENANCY - The tenancy of a resource. Examples are shared or
# dedicated.
#
# If you set the context to `SAVINGS_PLANS`, you can use the following
# dimensions for searching:
#
# * SAVINGS\_PLANS\_TYPE - Type of Savings Plans (EC2 Instance or
# Compute)
#
# * PAYMENT\_OPTION - Payment option for the given Savings Plans (for
# example, All Upfront)
#
# * REGION - The AWS Region.
#
# * INSTANCE\_TYPE\_FAMILY - The family of instances (For example, `m5`)
#
# * LINKED\_ACCOUNT - The description in the attribute map that includes
# the full name of the member account. The value field contains the
# AWS ID of the member account.
#
# * SAVINGS\_PLAN\_ARN - The unique identifier for your Savings Plan
#
# @option params [String] :next_page_token
# The token to retrieve the next set of results. AWS provides the token
# when the response from a previous call has more results than the
# maximum page size.
#
# @return [Types::GetDimensionValuesResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetDimensionValuesResponse#dimension_values #dimension_values} => Array<Types::DimensionValuesWithAttributes>
# * {Types::GetDimensionValuesResponse#return_size #return_size} => Integer
# * {Types::GetDimensionValuesResponse#total_size #total_size} => Integer
# * {Types::GetDimensionValuesResponse#next_page_token #next_page_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_dimension_values({
# search_string: "SearchString",
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# dimension: "AZ", # required, accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# context: "COST_AND_USAGE", # accepts COST_AND_USAGE, RESERVATIONS, SAVINGS_PLANS
# next_page_token: "NextPageToken",
# })
#
# @example Response structure
#
# resp.dimension_values #=> Array
# resp.dimension_values[0].value #=> String
# resp.dimension_values[0].attributes #=> Hash
# resp.dimension_values[0].attributes["AttributeType"] #=> <Hash,Array,String,Numeric,Boolean,IO,Set,nil>
# resp.return_size #=> Integer
# resp.total_size #=> Integer
# resp.next_page_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetDimensionValues AWS API Documentation
#
# @overload get_dimension_values(params = {})
# @param [Hash] params ({})
def get_dimension_values(params = {}, options = {})
req = build_request(:get_dimension_values, params)
req.send_request(options)
end
# Retrieves the reservation coverage for your account. This enables you
# to see how much of your Amazon Elastic Compute Cloud, Amazon
# ElastiCache, Amazon Relational Database Service, or Amazon Redshift
# usage is covered by a reservation. An organization's master account
# can see the coverage of the associated member accounts. This supports
# dimensions, Cost Categories, and nested expressions. For any time
# period, you can filter data about reservation usage by the following
# dimensions:
#
# * AZ
#
# * CACHE\_ENGINE
#
# * DATABASE\_ENGINE
#
# * DEPLOYMENT\_OPTION
#
# * INSTANCE\_TYPE
#
# * LINKED\_ACCOUNT
#
# * OPERATING\_SYSTEM
#
# * PLATFORM
#
# * REGION
#
# * SERVICE
#
# * TAG
#
# * TENANCY
#
# To determine valid values for a dimension, use the
# `GetDimensionValues` operation.
#
# @option params [required, Types::DateInterval] :time_period
# The start and end dates of the period that you want to retrieve data
# about reservation coverage for. You can retrieve data for a maximum of
# 13 months: the last 12 months and the current month. The start date is
# inclusive, but the end date is exclusive. For example, if `start` is
# `2017-01-01` and `end` is `2017-05-01`, then the cost and usage data
# is retrieved from `2017-01-01` up to and including `2017-04-30` but
# not including `2017-05-01`.
#
# @option params [Array<Types::GroupDefinition>] :group_by
# You can group the data by the following attributes:
#
# * AZ
#
# * CACHE\_ENGINE
#
# * DATABASE\_ENGINE
#
# * DEPLOYMENT\_OPTION
#
# * INSTANCE\_TYPE
#
# * LINKED\_ACCOUNT
#
# * OPERATING\_SYSTEM
#
# * PLATFORM
#
# * REGION
#
# * TENANCY
#
# @option params [String] :granularity
# The granularity of the AWS cost data for the reservation. Valid values
# are `MONTHLY` and `DAILY`.
#
# If `GroupBy` is set, `Granularity` can't be set. If `Granularity`
# isn't set, the response object doesn't include `Granularity`, either
# `MONTHLY` or `DAILY`.
#
# The `GetReservationCoverage` operation supports only `DAILY` and
# `MONTHLY` granularities.
#
# @option params [Types::Expression] :filter
# Filters utilization data by dimensions. You can filter by the
# following dimensions:
#
# * AZ
#
# * CACHE\_ENGINE
#
# * DATABASE\_ENGINE
#
# * DEPLOYMENT\_OPTION
#
# * INSTANCE\_TYPE
#
# * LINKED\_ACCOUNT
#
# * OPERATING\_SYSTEM
#
# * PLATFORM
#
# * REGION
#
# * SERVICE
#
# * TAG
#
# * TENANCY
#
# `GetReservationCoverage` uses the same [Expression][1] object as the
# other operations, but only `AND` is supported among each dimension.
# You can nest only one level deep. If there are multiple values for a
# dimension, they are OR'd together.
#
# If you don't provide a `SERVICE` filter, Cost Explorer defaults to
# EC2.
#
# Cost category is also supported.
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html
#
# @option params [Array<String>] :metrics
# The measurement that you want your reservation coverage reported in.
#
# Valid values are `Hour`, `Unit`, and `Cost`. You can use multiple
# values in a request.
#
# @option params [String] :next_page_token
# The token to retrieve the next set of results. AWS provides the token
# when the response from a previous call has more results than the
# maximum page size.
#
# @return [Types::GetReservationCoverageResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetReservationCoverageResponse#coverages_by_time #coverages_by_time} => Array<Types::CoverageByTime>
# * {Types::GetReservationCoverageResponse#total #total} => Types::Coverage
# * {Types::GetReservationCoverageResponse#next_page_token #next_page_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_reservation_coverage({
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# group_by: [
# {
# type: "DIMENSION", # accepts DIMENSION, TAG, COST_CATEGORY
# key: "GroupDefinitionKey",
# },
# ],
# granularity: "DAILY", # accepts DAILY, MONTHLY, HOURLY
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# metrics: ["MetricName"],
# next_page_token: "NextPageToken",
# })
#
# @example Response structure
#
# resp.coverages_by_time #=> Array
# resp.coverages_by_time[0].time_period.start #=> String
# resp.coverages_by_time[0].time_period.end #=> String
# resp.coverages_by_time[0].groups #=> Array
# resp.coverages_by_time[0].groups[0].attributes #=> Hash
# resp.coverages_by_time[0].groups[0].attributes["AttributeType"] #=> <Hash,Array,String,Numeric,Boolean,IO,Set,nil>
# resp.coverages_by_time[0].groups[0].coverage.coverage_hours.on_demand_hours #=> String
# resp.coverages_by_time[0].groups[0].coverage.coverage_hours.reserved_hours #=> String
# resp.coverages_by_time[0].groups[0].coverage.coverage_hours.total_running_hours #=> String
# resp.coverages_by_time[0].groups[0].coverage.coverage_hours.coverage_hours_percentage #=> String
# resp.coverages_by_time[0].groups[0].coverage.coverage_normalized_units.on_demand_normalized_units #=> String
# resp.coverages_by_time[0].groups[0].coverage.coverage_normalized_units.reserved_normalized_units #=> String
# resp.coverages_by_time[0].groups[0].coverage.coverage_normalized_units.total_running_normalized_units #=> String
# resp.coverages_by_time[0].groups[0].coverage.coverage_normalized_units.coverage_normalized_units_percentage #=> String
# resp.coverages_by_time[0].groups[0].coverage.coverage_cost.on_demand_cost #=> String
# resp.coverages_by_time[0].total.coverage_hours.on_demand_hours #=> String
# resp.coverages_by_time[0].total.coverage_hours.reserved_hours #=> String
# resp.coverages_by_time[0].total.coverage_hours.total_running_hours #=> String
# resp.coverages_by_time[0].total.coverage_hours.coverage_hours_percentage #=> String
# resp.coverages_by_time[0].total.coverage_normalized_units.on_demand_normalized_units #=> String
# resp.coverages_by_time[0].total.coverage_normalized_units.reserved_normalized_units #=> String
# resp.coverages_by_time[0].total.coverage_normalized_units.total_running_normalized_units #=> String
# resp.coverages_by_time[0].total.coverage_normalized_units.coverage_normalized_units_percentage #=> String
# resp.coverages_by_time[0].total.coverage_cost.on_demand_cost #=> String
# resp.total.coverage_hours.on_demand_hours #=> String
# resp.total.coverage_hours.reserved_hours #=> String
# resp.total.coverage_hours.total_running_hours #=> String
# resp.total.coverage_hours.coverage_hours_percentage #=> String
# resp.total.coverage_normalized_units.on_demand_normalized_units #=> String
# resp.total.coverage_normalized_units.reserved_normalized_units #=> String
# resp.total.coverage_normalized_units.total_running_normalized_units #=> String
# resp.total.coverage_normalized_units.coverage_normalized_units_percentage #=> String
# resp.total.coverage_cost.on_demand_cost #=> String
# resp.next_page_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetReservationCoverage AWS API Documentation
#
# @overload get_reservation_coverage(params = {})
# @param [Hash] params ({})
def get_reservation_coverage(params = {}, options = {})
req = build_request(:get_reservation_coverage, params)
req.send_request(options)
end
# Gets recommendations for which reservations to purchase. These
# recommendations could help you reduce your costs. Reservations provide
# a discounted hourly rate (up to 75%) compared to On-Demand pricing.
#
# AWS generates your recommendations by identifying your On-Demand usage
# during a specific time period and collecting your usage into
# categories that are eligible for a reservation. After AWS has these
# categories, it simulates every combination of reservations in each
# category of usage to identify the best number of each type of RI to
# purchase to maximize your estimated savings.
#
# For example, AWS automatically aggregates your Amazon EC2 Linux,
# shared tenancy, and c4 family usage in the US West (Oregon) Region and
# recommends that you buy size-flexible regional reservations to apply
# to the c4 family usage. AWS recommends the smallest size instance in
# an instance family. This makes it easier to purchase a size-flexible
# RI. AWS also shows the equal number of normalized units so that you
# can purchase any instance size that you want. For this example, your
# RI recommendation would be for `c4.large` because that is the smallest
# size instance in the c4 instance family.
#
# @option params [String] :account_id
# The account ID that is associated with the recommendation.
#
# @option params [required, String] :service
# The specific service that you want recommendations for.
#
# @option params [String] :account_scope
# The account scope that you want your recommendations for. Amazon Web
# Services calculates recommendations including the payer account and
# linked accounts if the value is set to `PAYER`. If the value is
# `LINKED`, recommendations are calculated for individual linked
# accounts only.
#
# @option params [String] :lookback_period_in_days
# The number of previous days that you want AWS to consider when it
# calculates your recommendations.
#
# @option params [String] :term_in_years
# The reservation term that you want recommendations for.
#
# @option params [String] :payment_option
# The reservation purchase option that you want recommendations for.
#
# @option params [Types::ServiceSpecification] :service_specification
# The hardware specifications for the service instances that you want
# recommendations for, such as standard or convertible Amazon EC2
# instances.
#
# @option params [Integer] :page_size
# The number of recommendations that you want returned in a single
# response object.
#
# @option params [String] :next_page_token
# The pagination token that indicates the next set of results that you
# want to retrieve.
#
# @return [Types::GetReservationPurchaseRecommendationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetReservationPurchaseRecommendationResponse#metadata #metadata} => Types::ReservationPurchaseRecommendationMetadata
# * {Types::GetReservationPurchaseRecommendationResponse#recommendations #recommendations} => Array<Types::ReservationPurchaseRecommendation>
# * {Types::GetReservationPurchaseRecommendationResponse#next_page_token #next_page_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_reservation_purchase_recommendation({
# account_id: "GenericString",
# service: "GenericString", # required
# account_scope: "PAYER", # accepts PAYER, LINKED
# lookback_period_in_days: "SEVEN_DAYS", # accepts SEVEN_DAYS, THIRTY_DAYS, SIXTY_DAYS
# term_in_years: "ONE_YEAR", # accepts ONE_YEAR, THREE_YEARS
# payment_option: "NO_UPFRONT", # accepts NO_UPFRONT, PARTIAL_UPFRONT, ALL_UPFRONT, LIGHT_UTILIZATION, MEDIUM_UTILIZATION, HEAVY_UTILIZATION
# service_specification: {
# ec2_specification: {
# offering_class: "STANDARD", # accepts STANDARD, CONVERTIBLE
# },
# },
# page_size: 1,
# next_page_token: "NextPageToken",
# })
#
# @example Response structure
#
# resp.metadata.recommendation_id #=> String
# resp.metadata.generation_timestamp #=> String
# resp.recommendations #=> Array
# resp.recommendations[0].account_scope #=> String, one of "PAYER", "LINKED"
# resp.recommendations[0].lookback_period_in_days #=> String, one of "SEVEN_DAYS", "THIRTY_DAYS", "SIXTY_DAYS"
# resp.recommendations[0].term_in_years #=> String, one of "ONE_YEAR", "THREE_YEARS"
# resp.recommendations[0].payment_option #=> String, one of "NO_UPFRONT", "PARTIAL_UPFRONT", "ALL_UPFRONT", "LIGHT_UTILIZATION", "MEDIUM_UTILIZATION", "HEAVY_UTILIZATION"
# resp.recommendations[0].service_specification.ec2_specification.offering_class #=> String, one of "STANDARD", "CONVERTIBLE"
# resp.recommendations[0].recommendation_details #=> Array
# resp.recommendations[0].recommendation_details[0].account_id #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.ec2_instance_details.family #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.ec2_instance_details.instance_type #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.ec2_instance_details.region #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.ec2_instance_details.availability_zone #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.ec2_instance_details.platform #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.ec2_instance_details.tenancy #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.ec2_instance_details.current_generation #=> Boolean
# resp.recommendations[0].recommendation_details[0].instance_details.ec2_instance_details.size_flex_eligible #=> Boolean
# resp.recommendations[0].recommendation_details[0].instance_details.rds_instance_details.family #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.rds_instance_details.instance_type #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.rds_instance_details.region #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.rds_instance_details.database_engine #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.rds_instance_details.database_edition #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.rds_instance_details.deployment_option #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.rds_instance_details.license_model #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.rds_instance_details.current_generation #=> Boolean
# resp.recommendations[0].recommendation_details[0].instance_details.rds_instance_details.size_flex_eligible #=> Boolean
# resp.recommendations[0].recommendation_details[0].instance_details.redshift_instance_details.family #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.redshift_instance_details.node_type #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.redshift_instance_details.region #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.redshift_instance_details.current_generation #=> Boolean
# resp.recommendations[0].recommendation_details[0].instance_details.redshift_instance_details.size_flex_eligible #=> Boolean
# resp.recommendations[0].recommendation_details[0].instance_details.elasticache_instance_details.family #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.elasticache_instance_details.node_type #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.elasticache_instance_details.region #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.elasticache_instance_details.product_description #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.elasticache_instance_details.current_generation #=> Boolean
# resp.recommendations[0].recommendation_details[0].instance_details.elasticache_instance_details.size_flex_eligible #=> Boolean
# resp.recommendations[0].recommendation_details[0].instance_details.es_instance_details.instance_class #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.es_instance_details.instance_size #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.es_instance_details.region #=> String
# resp.recommendations[0].recommendation_details[0].instance_details.es_instance_details.current_generation #=> Boolean
# resp.recommendations[0].recommendation_details[0].instance_details.es_instance_details.size_flex_eligible #=> Boolean
# resp.recommendations[0].recommendation_details[0].recommended_number_of_instances_to_purchase #=> String
# resp.recommendations[0].recommendation_details[0].recommended_normalized_units_to_purchase #=> String
# resp.recommendations[0].recommendation_details[0].minimum_number_of_instances_used_per_hour #=> String
# resp.recommendations[0].recommendation_details[0].minimum_normalized_units_used_per_hour #=> String
# resp.recommendations[0].recommendation_details[0].maximum_number_of_instances_used_per_hour #=> String
# resp.recommendations[0].recommendation_details[0].maximum_normalized_units_used_per_hour #=> String
# resp.recommendations[0].recommendation_details[0].average_number_of_instances_used_per_hour #=> String
# resp.recommendations[0].recommendation_details[0].average_normalized_units_used_per_hour #=> String
# resp.recommendations[0].recommendation_details[0].average_utilization #=> String
# resp.recommendations[0].recommendation_details[0].estimated_break_even_in_months #=> String
# resp.recommendations[0].recommendation_details[0].currency_code #=> String
# resp.recommendations[0].recommendation_details[0].estimated_monthly_savings_amount #=> String
# resp.recommendations[0].recommendation_details[0].estimated_monthly_savings_percentage #=> String
# resp.recommendations[0].recommendation_details[0].estimated_monthly_on_demand_cost #=> String
# resp.recommendations[0].recommendation_details[0].estimated_reservation_cost_for_lookback_period #=> String
# resp.recommendations[0].recommendation_details[0].upfront_cost #=> String
# resp.recommendations[0].recommendation_details[0].recurring_standard_monthly_cost #=> String
# resp.recommendations[0].recommendation_summary.total_estimated_monthly_savings_amount #=> String
# resp.recommendations[0].recommendation_summary.total_estimated_monthly_savings_percentage #=> String
# resp.recommendations[0].recommendation_summary.currency_code #=> String
# resp.next_page_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetReservationPurchaseRecommendation AWS API Documentation
#
# @overload get_reservation_purchase_recommendation(params = {})
# @param [Hash] params ({})
def get_reservation_purchase_recommendation(params = {}, options = {})
req = build_request(:get_reservation_purchase_recommendation, params)
req.send_request(options)
end
# Retrieves the reservation utilization for your account. Master
# accounts in an organization have access to member accounts. You can
# filter data by dimensions in a time period. You can use
# `GetDimensionValues` to determine the possible dimension values.
# Currently, you can group only by `SUBSCRIPTION_ID`.
#
# @option params [required, Types::DateInterval] :time_period
# Sets the start and end dates for retrieving RI utilization. The start
# date is inclusive, but the end date is exclusive. For example, if
# `start` is `2017-01-01` and `end` is `2017-05-01`, then the cost and
# usage data is retrieved from `2017-01-01` up to and including
# `2017-04-30` but not including `2017-05-01`.
#
# @option params [Array<Types::GroupDefinition>] :group_by
# Groups only by `SUBSCRIPTION_ID`. Metadata is included.
#
# @option params [String] :granularity
# If `GroupBy` is set, `Granularity` can't be set. If `Granularity`
# isn't set, the response object doesn't include `Granularity`, either
# `MONTHLY` or `DAILY`. If both `GroupBy` and `Granularity` aren't set,
# `GetReservationUtilization` defaults to `DAILY`.
#
# The `GetReservationUtilization` operation supports only `DAILY` and
# `MONTHLY` granularities.
#
# @option params [Types::Expression] :filter
# Filters utilization data by dimensions. You can filter by the
# following dimensions:
#
# * AZ
#
# * CACHE\_ENGINE
#
# * DEPLOYMENT\_OPTION
#
# * INSTANCE\_TYPE
#
# * LINKED\_ACCOUNT
#
# * OPERATING\_SYSTEM
#
# * PLATFORM
#
# * REGION
#
# * SERVICE
#
# * SCOPE
#
# * TENANCY
#
# `GetReservationUtilization` uses the same [Expression][1] object as
# the other operations, but only `AND` is supported among each
# dimension, and nesting is supported up to only one level deep. If
# there are multiple values for a dimension, they are OR'd together.
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html
#
# @option params [String] :next_page_token
# The token to retrieve the next set of results. AWS provides the token
# when the response from a previous call has more results than the
# maximum page size.
#
# @return [Types::GetReservationUtilizationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetReservationUtilizationResponse#utilizations_by_time #utilizations_by_time} => Array<Types::UtilizationByTime>
# * {Types::GetReservationUtilizationResponse#total #total} => Types::ReservationAggregates
# * {Types::GetReservationUtilizationResponse#next_page_token #next_page_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_reservation_utilization({
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# group_by: [
# {
# type: "DIMENSION", # accepts DIMENSION, TAG, COST_CATEGORY
# key: "GroupDefinitionKey",
# },
# ],
# granularity: "DAILY", # accepts DAILY, MONTHLY, HOURLY
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# next_page_token: "NextPageToken",
# })
#
# @example Response structure
#
# resp.utilizations_by_time #=> Array
# resp.utilizations_by_time[0].time_period.start #=> String
# resp.utilizations_by_time[0].time_period.end #=> String
# resp.utilizations_by_time[0].groups #=> Array
# resp.utilizations_by_time[0].groups[0].key #=> String
# resp.utilizations_by_time[0].groups[0].value #=> String
# resp.utilizations_by_time[0].groups[0].attributes #=> Hash
# resp.utilizations_by_time[0].groups[0].attributes["AttributeType"] #=> <Hash,Array,String,Numeric,Boolean,IO,Set,nil>
# resp.utilizations_by_time[0].groups[0].utilization.utilization_percentage #=> String
# resp.utilizations_by_time[0].groups[0].utilization.utilization_percentage_in_units #=> String
# resp.utilizations_by_time[0].groups[0].utilization.purchased_hours #=> String
# resp.utilizations_by_time[0].groups[0].utilization.purchased_units #=> String
# resp.utilizations_by_time[0].groups[0].utilization.total_actual_hours #=> String
# resp.utilizations_by_time[0].groups[0].utilization.total_actual_units #=> String
# resp.utilizations_by_time[0].groups[0].utilization.unused_hours #=> String
# resp.utilizations_by_time[0].groups[0].utilization.unused_units #=> String
# resp.utilizations_by_time[0].groups[0].utilization.on_demand_cost_of_ri_hours_used #=> String
# resp.utilizations_by_time[0].groups[0].utilization.net_ri_savings #=> String
# resp.utilizations_by_time[0].groups[0].utilization.total_potential_ri_savings #=> String
# resp.utilizations_by_time[0].groups[0].utilization.amortized_upfront_fee #=> String
# resp.utilizations_by_time[0].groups[0].utilization.amortized_recurring_fee #=> String
# resp.utilizations_by_time[0].groups[0].utilization.total_amortized_fee #=> String
# resp.utilizations_by_time[0].total.utilization_percentage #=> String
# resp.utilizations_by_time[0].total.utilization_percentage_in_units #=> String
# resp.utilizations_by_time[0].total.purchased_hours #=> String
# resp.utilizations_by_time[0].total.purchased_units #=> String
# resp.utilizations_by_time[0].total.total_actual_hours #=> String
# resp.utilizations_by_time[0].total.total_actual_units #=> String
# resp.utilizations_by_time[0].total.unused_hours #=> String
# resp.utilizations_by_time[0].total.unused_units #=> String
# resp.utilizations_by_time[0].total.on_demand_cost_of_ri_hours_used #=> String
# resp.utilizations_by_time[0].total.net_ri_savings #=> String
# resp.utilizations_by_time[0].total.total_potential_ri_savings #=> String
# resp.utilizations_by_time[0].total.amortized_upfront_fee #=> String
# resp.utilizations_by_time[0].total.amortized_recurring_fee #=> String
# resp.utilizations_by_time[0].total.total_amortized_fee #=> String
# resp.total.utilization_percentage #=> String
# resp.total.utilization_percentage_in_units #=> String
# resp.total.purchased_hours #=> String
# resp.total.purchased_units #=> String
# resp.total.total_actual_hours #=> String
# resp.total.total_actual_units #=> String
# resp.total.unused_hours #=> String
# resp.total.unused_units #=> String
# resp.total.on_demand_cost_of_ri_hours_used #=> String
# resp.total.net_ri_savings #=> String
# resp.total.total_potential_ri_savings #=> String
# resp.total.amortized_upfront_fee #=> String
# resp.total.amortized_recurring_fee #=> String
# resp.total.total_amortized_fee #=> String
# resp.next_page_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetReservationUtilization AWS API Documentation
#
# @overload get_reservation_utilization(params = {})
# @param [Hash] params ({})
def get_reservation_utilization(params = {}, options = {})
req = build_request(:get_reservation_utilization, params)
req.send_request(options)
end
# Creates recommendations that helps you save cost by identifying idle
# and underutilized Amazon EC2 instances.
#
# Recommendations are generated to either downsize or terminate
# instances, along with providing savings detail and metrics. For
# details on calculation and function, see [Optimizing Your Cost with
# Rightsizing Recommendations][1].
#
#
#
# [1]: https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/ce-what-is.html
#
# @option params [Types::Expression] :filter
# Use `Expression` to filter by cost or by usage. There are two
# patterns:
#
# * Simple dimension values - You can set the dimension name and values
# for the filters that you plan to use. For example, you can filter
# for `REGION==us-east-1 OR REGION==us-west-1`. The `Expression` for
# that looks like this:
#
# `\{ "Dimensions": \{ "Key": "REGION", "Values": [ "us-east-1",
# “us-west-1” ] \} \}`
#
# The list of dimension values are OR'd together to retrieve cost or
# usage data. You can create `Expression` and `DimensionValues`
# objects using either `with*` methods or `set*` methods in multiple
# lines.
#
# * Compound dimension values with logical operations - You can use
# multiple `Expression` types and the logical operators `AND/OR/NOT`
# to create a list of one or more `Expression` objects. This allows
# you to filter on more advanced options. For example, you can filter
# on `((REGION == us-east-1 OR REGION == us-west-1) OR (TAG.Type ==
# Type1)) AND (USAGE_TYPE != DataTransfer)`. The `Expression` for that
# looks like this:
#
# `\{ "And": [ \{"Or": [ \{"Dimensions": \{ "Key": "REGION", "Values":
# [ "us-east-1", "us-west-1" ] \}\}, \{"Tags": \{ "Key": "TagName",
# "Values": ["Value1"] \} \} ]\}, \{"Not": \{"Dimensions": \{ "Key":
# "USAGE_TYPE", "Values": ["DataTransfer"] \}\}\} ] \} `
#
# <note markdown="1"> Because each `Expression` can have only one operator, the service
# returns an error if more than one is specified. The following
# example shows an `Expression` object that creates an error.
#
# </note>
#
# ` \{ "And": [ ... ], "DimensionValues": \{ "Dimension":
# "USAGE_TYPE", "Values": [ "DataTransfer" ] \} \} `
#
# <note markdown="1"> For `GetRightsizingRecommendation` action, a combination of OR and NOT
# is not supported. OR is not supported between different dimensions, or
# dimensions and tags. NOT operators aren't supported. Dimensions are
# also limited to `LINKED_ACCOUNT`, `REGION`, or `RIGHTSIZING_TYPE`.
#
# </note>
#
# @option params [Types::RightsizingRecommendationConfiguration] :configuration
# Enables you to customize recommendations across two attributes. You
# can choose to view recommendations for instances within the same
# instance families or across different instance families. You can also
# choose to view your estimated savings associated with recommendations
# with consideration of existing Savings Plans or RI benefits, or
# niether.
#
# @option params [required, String] :service
# The specific service that you want recommendations for. The only valid
# value for `GetRightsizingRecommendation` is "`AmazonEC2`".
#
# @option params [Integer] :page_size
# The number of recommendations that you want returned in a single
# response object.
#
# @option params [String] :next_page_token
# The pagination token that indicates the next set of results that you
# want to retrieve.
#
# @return [Types::GetRightsizingRecommendationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetRightsizingRecommendationResponse#metadata #metadata} => Types::RightsizingRecommendationMetadata
# * {Types::GetRightsizingRecommendationResponse#summary #summary} => Types::RightsizingRecommendationSummary
# * {Types::GetRightsizingRecommendationResponse#rightsizing_recommendations #rightsizing_recommendations} => Array<Types::RightsizingRecommendation>
# * {Types::GetRightsizingRecommendationResponse#next_page_token #next_page_token} => String
# * {Types::GetRightsizingRecommendationResponse#configuration #configuration} => Types::RightsizingRecommendationConfiguration
#
# @example Request syntax with placeholder values
#
# resp = client.get_rightsizing_recommendation({
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# configuration: {
# recommendation_target: "SAME_INSTANCE_FAMILY", # required, accepts SAME_INSTANCE_FAMILY, CROSS_INSTANCE_FAMILY
# benefits_considered: false, # required
# },
# service: "GenericString", # required
# page_size: 1,
# next_page_token: "NextPageToken",
# })
#
# @example Response structure
#
# resp.metadata.recommendation_id #=> String
# resp.metadata.generation_timestamp #=> String
# resp.metadata.lookback_period_in_days #=> String, one of "SEVEN_DAYS", "THIRTY_DAYS", "SIXTY_DAYS"
# resp.summary.total_recommendation_count #=> String
# resp.summary.estimated_total_monthly_savings_amount #=> String
# resp.summary.savings_currency_code #=> String
# resp.summary.savings_percentage #=> String
# resp.rightsizing_recommendations #=> Array
# resp.rightsizing_recommendations[0].account_id #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_id #=> String
# resp.rightsizing_recommendations[0].current_instance.tags #=> Array
# resp.rightsizing_recommendations[0].current_instance.tags[0].key #=> String
# resp.rightsizing_recommendations[0].current_instance.tags[0].values #=> Array
# resp.rightsizing_recommendations[0].current_instance.tags[0].values[0] #=> String
# resp.rightsizing_recommendations[0].current_instance.tags[0].match_options #=> Array
# resp.rightsizing_recommendations[0].current_instance.tags[0].match_options[0] #=> String, one of "EQUALS", "STARTS_WITH", "ENDS_WITH", "CONTAINS", "CASE_SENSITIVE", "CASE_INSENSITIVE"
# resp.rightsizing_recommendations[0].current_instance.resource_details.ec2_resource_details.hourly_on_demand_rate #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_details.ec2_resource_details.instance_type #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_details.ec2_resource_details.platform #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_details.ec2_resource_details.region #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_details.ec2_resource_details.sku #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_details.ec2_resource_details.memory #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_details.ec2_resource_details.network_performance #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_details.ec2_resource_details.storage #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_details.ec2_resource_details.vcpu #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_utilization.ec2_resource_utilization.max_cpu_utilization_percentage #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_utilization.ec2_resource_utilization.max_memory_utilization_percentage #=> String
# resp.rightsizing_recommendations[0].current_instance.resource_utilization.ec2_resource_utilization.max_storage_utilization_percentage #=> String
# resp.rightsizing_recommendations[0].current_instance.reservation_covered_hours_in_lookback_period #=> String
# resp.rightsizing_recommendations[0].current_instance.savings_plans_covered_hours_in_lookback_period #=> String
# resp.rightsizing_recommendations[0].current_instance.on_demand_hours_in_lookback_period #=> String
# resp.rightsizing_recommendations[0].current_instance.total_running_hours_in_lookback_period #=> String
# resp.rightsizing_recommendations[0].current_instance.monthly_cost #=> String
# resp.rightsizing_recommendations[0].current_instance.currency_code #=> String
# resp.rightsizing_recommendations[0].rightsizing_type #=> String, one of "TERMINATE", "MODIFY"
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances #=> Array
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].estimated_monthly_cost #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].estimated_monthly_savings #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].currency_code #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].default_target_instance #=> Boolean
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].resource_details.ec2_resource_details.hourly_on_demand_rate #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].resource_details.ec2_resource_details.instance_type #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].resource_details.ec2_resource_details.platform #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].resource_details.ec2_resource_details.region #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].resource_details.ec2_resource_details.sku #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].resource_details.ec2_resource_details.memory #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].resource_details.ec2_resource_details.network_performance #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].resource_details.ec2_resource_details.storage #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].resource_details.ec2_resource_details.vcpu #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].expected_resource_utilization.ec2_resource_utilization.max_cpu_utilization_percentage #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].expected_resource_utilization.ec2_resource_utilization.max_memory_utilization_percentage #=> String
# resp.rightsizing_recommendations[0].modify_recommendation_detail.target_instances[0].expected_resource_utilization.ec2_resource_utilization.max_storage_utilization_percentage #=> String
# resp.rightsizing_recommendations[0].terminate_recommendation_detail.estimated_monthly_savings #=> String
# resp.rightsizing_recommendations[0].terminate_recommendation_detail.currency_code #=> String
# resp.next_page_token #=> String
# resp.configuration.recommendation_target #=> String, one of "SAME_INSTANCE_FAMILY", "CROSS_INSTANCE_FAMILY"
# resp.configuration.benefits_considered #=> Boolean
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetRightsizingRecommendation AWS API Documentation
#
# @overload get_rightsizing_recommendation(params = {})
# @param [Hash] params ({})
def get_rightsizing_recommendation(params = {}, options = {})
req = build_request(:get_rightsizing_recommendation, params)
req.send_request(options)
end
# Retrieves the Savings Plans covered for your account. This enables you
# to see how much of your cost is covered by a Savings Plan. An
# organization’s master account can see the coverage of the associated
# member accounts. This supports dimensions, Cost Categories, and nested
# expressions. For any time period, you can filter data for Savings
# Plans usage with the following dimensions:
#
# * `LINKED_ACCOUNT`
#
# * `REGION`
#
# * `SERVICE`
#
# * `INSTANCE_FAMILY`
#
# To determine valid values for a dimension, use the
# `GetDimensionValues` operation.
#
# @option params [required, Types::DateInterval] :time_period
# The time period that you want the usage and costs for. The `Start`
# date must be within 13 months. The `End` date must be after the
# `Start` date, and before the current date. Future dates can't be used
# as an `End` date.
#
# @option params [Array<Types::GroupDefinition>] :group_by
# You can group the data using the attributes `INSTANCE_FAMILY`,
# `REGION`, or `SERVICE`.
#
# @option params [String] :granularity
# The granularity of the Amazon Web Services cost data for your Savings
# Plans. `Granularity` can't be set if `GroupBy` is set.
#
# The `GetSavingsPlansCoverage` operation supports only `DAILY` and
# `MONTHLY` granularities.
#
# @option params [Types::Expression] :filter
# Filters Savings Plans coverage data by dimensions. You can filter data
# for Savings Plans usage with the following dimensions:
#
# * `LINKED_ACCOUNT`
#
# * `REGION`
#
# * `SERVICE`
#
# * `INSTANCE_FAMILY`
#
# `GetSavingsPlansCoverage` uses the same [Expression][1] object as the
# other operations, but only `AND` is supported among each dimension. If
# there are multiple values for a dimension, they are OR'd together.
#
# Cost category is also supported.
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html
#
# @option params [Array<String>] :metrics
# The measurement that you want your Savings Plans coverage reported in.
# The only valid value is `SpendCoveredBySavingsPlans`.
#
# @option params [String] :next_token
# The token to retrieve the next set of results. Amazon Web Services
# provides the token when the response from a previous call has more
# results than the maximum page size.
#
# @option params [Integer] :max_results
# The number of items to be returned in a response. The default is `20`,
# with a minimum value of `1`.
#
# @return [Types::GetSavingsPlansCoverageResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetSavingsPlansCoverageResponse#savings_plans_coverages #savings_plans_coverages} => Array<Types::SavingsPlansCoverage>
# * {Types::GetSavingsPlansCoverageResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.get_savings_plans_coverage({
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# group_by: [
# {
# type: "DIMENSION", # accepts DIMENSION, TAG, COST_CATEGORY
# key: "GroupDefinitionKey",
# },
# ],
# granularity: "DAILY", # accepts DAILY, MONTHLY, HOURLY
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# metrics: ["MetricName"],
# next_token: "NextPageToken",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.savings_plans_coverages #=> Array
# resp.savings_plans_coverages[0].attributes #=> Hash
# resp.savings_plans_coverages[0].attributes["AttributeType"] #=> <Hash,Array,String,Numeric,Boolean,IO,Set,nil>
# resp.savings_plans_coverages[0].coverage.spend_covered_by_savings_plans #=> String
# resp.savings_plans_coverages[0].coverage.on_demand_cost #=> String
# resp.savings_plans_coverages[0].coverage.total_cost #=> String
# resp.savings_plans_coverages[0].coverage.coverage_percentage #=> String
# resp.savings_plans_coverages[0].time_period.start #=> String
# resp.savings_plans_coverages[0].time_period.end #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetSavingsPlansCoverage AWS API Documentation
#
# @overload get_savings_plans_coverage(params = {})
# @param [Hash] params ({})
def get_savings_plans_coverage(params = {}, options = {})
req = build_request(:get_savings_plans_coverage, params)
req.send_request(options)
end
# Retrieves your request parameters, Savings Plan Recommendations
# Summary and Details.
#
# @option params [required, String] :savings_plans_type
# The Savings Plans recommendation type requested.
#
# @option params [required, String] :term_in_years
# The savings plan recommendation term used to generated these
# recommendations.
#
# @option params [required, String] :payment_option
# The payment option used to generate these recommendations.
#
# @option params [String] :account_scope
# The account scope that you want your recommendations for. Amazon Web
# Services calculates recommendations including the payer account and
# linked accounts if the value is set to `PAYER`. If the value is
# `LINKED`, recommendations are calculated for individual linked
# accounts only.
#
# @option params [String] :next_page_token
# The token to retrieve the next set of results. Amazon Web Services
# provides the token when the response from a previous call has more
# results than the maximum page size.
#
# @option params [Integer] :page_size
# The number of recommendations that you want returned in a single
# response object.
#
# @option params [required, String] :lookback_period_in_days
# The lookback period used to generate the recommendation.
#
# @option params [Types::Expression] :filter
# You can filter your recommendations by Account ID with the
# `LINKED_ACCOUNT` dimension. To filter your recommendations by Account
# ID, specify `Key` as `LINKED_ACCOUNT` and `Value` as the
# comma-separated Acount ID(s) for which you want to see Savings Plans
# purchase recommendations.
#
# For GetSavingsPlansPurchaseRecommendation, the `Filter` does not
# include `CostCategories` or `Tags`. It only includes `Dimensions`.
# With `Dimensions`, `Key` must be `LINKED_ACCOUNT` and `Value` can be a
# single Account ID or multiple comma-separated Account IDs for which
# you want to see Savings Plans Purchase Recommendations. `AND` and `OR`
# operators are not supported.
#
# @return [Types::GetSavingsPlansPurchaseRecommendationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetSavingsPlansPurchaseRecommendationResponse#metadata #metadata} => Types::SavingsPlansPurchaseRecommendationMetadata
# * {Types::GetSavingsPlansPurchaseRecommendationResponse#savings_plans_purchase_recommendation #savings_plans_purchase_recommendation} => Types::SavingsPlansPurchaseRecommendation
# * {Types::GetSavingsPlansPurchaseRecommendationResponse#next_page_token #next_page_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_savings_plans_purchase_recommendation({
# savings_plans_type: "COMPUTE_SP", # required, accepts COMPUTE_SP, EC2_INSTANCE_SP
# term_in_years: "ONE_YEAR", # required, accepts ONE_YEAR, THREE_YEARS
# payment_option: "NO_UPFRONT", # required, accepts NO_UPFRONT, PARTIAL_UPFRONT, ALL_UPFRONT, LIGHT_UTILIZATION, MEDIUM_UTILIZATION, HEAVY_UTILIZATION
# account_scope: "PAYER", # accepts PAYER, LINKED
# next_page_token: "NextPageToken",
# page_size: 1,
# lookback_period_in_days: "SEVEN_DAYS", # required, accepts SEVEN_DAYS, THIRTY_DAYS, SIXTY_DAYS
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# })
#
# @example Response structure
#
# resp.metadata.recommendation_id #=> String
# resp.metadata.generation_timestamp #=> String
# resp.savings_plans_purchase_recommendation.account_scope #=> String, one of "PAYER", "LINKED"
# resp.savings_plans_purchase_recommendation.savings_plans_type #=> String, one of "COMPUTE_SP", "EC2_INSTANCE_SP"
# resp.savings_plans_purchase_recommendation.term_in_years #=> String, one of "ONE_YEAR", "THREE_YEARS"
# resp.savings_plans_purchase_recommendation.payment_option #=> String, one of "NO_UPFRONT", "PARTIAL_UPFRONT", "ALL_UPFRONT", "LIGHT_UTILIZATION", "MEDIUM_UTILIZATION", "HEAVY_UTILIZATION"
# resp.savings_plans_purchase_recommendation.lookback_period_in_days #=> String, one of "SEVEN_DAYS", "THIRTY_DAYS", "SIXTY_DAYS"
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details #=> Array
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].savings_plans_details.region #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].savings_plans_details.instance_family #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].savings_plans_details.offering_id #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].account_id #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].upfront_cost #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].estimated_roi #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].currency_code #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].estimated_sp_cost #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].estimated_on_demand_cost #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].estimated_on_demand_cost_with_current_commitment #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].estimated_savings_amount #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].estimated_savings_percentage #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].hourly_commitment_to_purchase #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].estimated_average_utilization #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].estimated_monthly_savings_amount #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].current_minimum_hourly_on_demand_spend #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].current_maximum_hourly_on_demand_spend #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_details[0].current_average_hourly_on_demand_spend #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.estimated_roi #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.currency_code #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.estimated_total_cost #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.current_on_demand_spend #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.estimated_savings_amount #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.total_recommendation_count #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.daily_commitment_to_purchase #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.hourly_commitment_to_purchase #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.estimated_savings_percentage #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.estimated_monthly_savings_amount #=> String
# resp.savings_plans_purchase_recommendation.savings_plans_purchase_recommendation_summary.estimated_on_demand_cost_with_current_commitment #=> String
# resp.next_page_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetSavingsPlansPurchaseRecommendation AWS API Documentation
#
# @overload get_savings_plans_purchase_recommendation(params = {})
# @param [Hash] params ({})
def get_savings_plans_purchase_recommendation(params = {}, options = {})
req = build_request(:get_savings_plans_purchase_recommendation, params)
req.send_request(options)
end
# Retrieves the Savings Plans utilization for your account across date
# ranges with daily or monthly granularity. Master accounts in an
# organization have access to member accounts. You can use
# `GetDimensionValues` in `SAVINGS_PLANS` to determine the possible
# dimension values.
#
# <note markdown="1"> You cannot group by any dimension values for
# `GetSavingsPlansUtilization`.
#
# </note>
#
# @option params [required, Types::DateInterval] :time_period
# The time period that you want the usage and costs for. The `Start`
# date must be within 13 months. The `End` date must be after the
# `Start` date, and before the current date. Future dates can't be used
# as an `End` date.
#
# @option params [String] :granularity
# The granularity of the Amazon Web Services utillization data for your
# Savings Plans.
#
# The `GetSavingsPlansUtilization` operation supports only `DAILY` and
# `MONTHLY` granularities.
#
# @option params [Types::Expression] :filter
# Filters Savings Plans utilization coverage data for active Savings
# Plans dimensions. You can filter data with the following dimensions:
#
# * `LINKED_ACCOUNT`
#
# * `SAVINGS_PLAN_ARN`
#
# * `SAVINGS_PLANS_TYPE`
#
# * `REGION`
#
# * `PAYMENT_OPTION`
#
# * `INSTANCE_TYPE_FAMILY`
#
# `GetSavingsPlansUtilization` uses the same [Expression][1] object as
# the other operations, but only `AND` is supported among each
# dimension.
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html
#
# @return [Types::GetSavingsPlansUtilizationResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetSavingsPlansUtilizationResponse#savings_plans_utilizations_by_time #savings_plans_utilizations_by_time} => Array<Types::SavingsPlansUtilizationByTime>
# * {Types::GetSavingsPlansUtilizationResponse#total #total} => Types::SavingsPlansUtilizationAggregates
#
# @example Request syntax with placeholder values
#
# resp = client.get_savings_plans_utilization({
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# granularity: "DAILY", # accepts DAILY, MONTHLY, HOURLY
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# })
#
# @example Response structure
#
# resp.savings_plans_utilizations_by_time #=> Array
# resp.savings_plans_utilizations_by_time[0].time_period.start #=> String
# resp.savings_plans_utilizations_by_time[0].time_period.end #=> String
# resp.savings_plans_utilizations_by_time[0].utilization.total_commitment #=> String
# resp.savings_plans_utilizations_by_time[0].utilization.used_commitment #=> String
# resp.savings_plans_utilizations_by_time[0].utilization.unused_commitment #=> String
# resp.savings_plans_utilizations_by_time[0].utilization.utilization_percentage #=> String
# resp.savings_plans_utilizations_by_time[0].savings.net_savings #=> String
# resp.savings_plans_utilizations_by_time[0].savings.on_demand_cost_equivalent #=> String
# resp.savings_plans_utilizations_by_time[0].amortized_commitment.amortized_recurring_commitment #=> String
# resp.savings_plans_utilizations_by_time[0].amortized_commitment.amortized_upfront_commitment #=> String
# resp.savings_plans_utilizations_by_time[0].amortized_commitment.total_amortized_commitment #=> String
# resp.total.utilization.total_commitment #=> String
# resp.total.utilization.used_commitment #=> String
# resp.total.utilization.unused_commitment #=> String
# resp.total.utilization.utilization_percentage #=> String
# resp.total.savings.net_savings #=> String
# resp.total.savings.on_demand_cost_equivalent #=> String
# resp.total.amortized_commitment.amortized_recurring_commitment #=> String
# resp.total.amortized_commitment.amortized_upfront_commitment #=> String
# resp.total.amortized_commitment.total_amortized_commitment #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetSavingsPlansUtilization AWS API Documentation
#
# @overload get_savings_plans_utilization(params = {})
# @param [Hash] params ({})
def get_savings_plans_utilization(params = {}, options = {})
req = build_request(:get_savings_plans_utilization, params)
req.send_request(options)
end
# Retrieves attribute data along with aggregate utilization and savings
# data for a given time period. This doesn't support granular or
# grouped data (daily/monthly) in response. You can't retrieve data by
# dates in a single response similar to `GetSavingsPlanUtilization`, but
# you have the option to make multiple calls to
# `GetSavingsPlanUtilizationDetails` by providing individual dates. You
# can use `GetDimensionValues` in `SAVINGS_PLANS` to determine the
# possible dimension values.
#
# <note markdown="1"> `GetSavingsPlanUtilizationDetails` internally groups data by
# `SavingsPlansArn`.
#
# </note>
#
# @option params [required, Types::DateInterval] :time_period
# The time period that you want the usage and costs for. The `Start`
# date must be within 13 months. The `End` date must be after the
# `Start` date, and before the current date. Future dates can't be used
# as an `End` date.
#
# @option params [Types::Expression] :filter
# Filters Savings Plans utilization coverage data for active Savings
# Plans dimensions. You can filter data with the following dimensions:
#
# * `LINKED_ACCOUNT`
#
# * `SAVINGS_PLAN_ARN`
#
# * `REGION`
#
# * `PAYMENT_OPTION`
#
# * `INSTANCE_TYPE_FAMILY`
#
# `GetSavingsPlansUtilizationDetails` uses the same [Expression][1]
# object as the other operations, but only `AND` is supported among each
# dimension.
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html
#
# @option params [String] :next_token
# The token to retrieve the next set of results. Amazon Web Services
# provides the token when the response from a previous call has more
# results than the maximum page size.
#
# @option params [Integer] :max_results
# The number of items to be returned in a response. The default is `20`,
# with a minimum value of `1`.
#
# @return [Types::GetSavingsPlansUtilizationDetailsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetSavingsPlansUtilizationDetailsResponse#savings_plans_utilization_details #savings_plans_utilization_details} => Array<Types::SavingsPlansUtilizationDetail>
# * {Types::GetSavingsPlansUtilizationDetailsResponse#total #total} => Types::SavingsPlansUtilizationAggregates
# * {Types::GetSavingsPlansUtilizationDetailsResponse#time_period #time_period} => Types::DateInterval
# * {Types::GetSavingsPlansUtilizationDetailsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.get_savings_plans_utilization_details({
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# next_token: "NextPageToken",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.savings_plans_utilization_details #=> Array
# resp.savings_plans_utilization_details[0].savings_plan_arn #=> String
# resp.savings_plans_utilization_details[0].attributes #=> Hash
# resp.savings_plans_utilization_details[0].attributes["AttributeType"] #=> <Hash,Array,String,Numeric,Boolean,IO,Set,nil>
# resp.savings_plans_utilization_details[0].utilization.total_commitment #=> String
# resp.savings_plans_utilization_details[0].utilization.used_commitment #=> String
# resp.savings_plans_utilization_details[0].utilization.unused_commitment #=> String
# resp.savings_plans_utilization_details[0].utilization.utilization_percentage #=> String
# resp.savings_plans_utilization_details[0].savings.net_savings #=> String
# resp.savings_plans_utilization_details[0].savings.on_demand_cost_equivalent #=> String
# resp.savings_plans_utilization_details[0].amortized_commitment.amortized_recurring_commitment #=> String
# resp.savings_plans_utilization_details[0].amortized_commitment.amortized_upfront_commitment #=> String
# resp.savings_plans_utilization_details[0].amortized_commitment.total_amortized_commitment #=> String
# resp.total.utilization.total_commitment #=> String
# resp.total.utilization.used_commitment #=> String
# resp.total.utilization.unused_commitment #=> String
# resp.total.utilization.utilization_percentage #=> String
# resp.total.savings.net_savings #=> String
# resp.total.savings.on_demand_cost_equivalent #=> String
# resp.total.amortized_commitment.amortized_recurring_commitment #=> String
# resp.total.amortized_commitment.amortized_upfront_commitment #=> String
# resp.total.amortized_commitment.total_amortized_commitment #=> String
# resp.time_period.start #=> String
# resp.time_period.end #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetSavingsPlansUtilizationDetails AWS API Documentation
#
# @overload get_savings_plans_utilization_details(params = {})
# @param [Hash] params ({})
def get_savings_plans_utilization_details(params = {}, options = {})
req = build_request(:get_savings_plans_utilization_details, params)
req.send_request(options)
end
# Queries for available tag keys and tag values for a specified period.
# You can search the tag values for an arbitrary string.
#
# @option params [String] :search_string
# The value that you want to search for.
#
# @option params [required, Types::DateInterval] :time_period
# The start and end dates for retrieving the dimension values. The start
# date is inclusive, but the end date is exclusive. For example, if
# `start` is `2017-01-01` and `end` is `2017-05-01`, then the cost and
# usage data is retrieved from `2017-01-01` up to and including
# `2017-04-30` but not including `2017-05-01`.
#
# @option params [String] :tag_key
# The key of the tag that you want to return values for.
#
# @option params [String] :next_page_token
# The token to retrieve the next set of results. AWS provides the token
# when the response from a previous call has more results than the
# maximum page size.
#
# @return [Types::GetTagsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetTagsResponse#next_page_token #next_page_token} => String
# * {Types::GetTagsResponse#tags #tags} => Array<String>
# * {Types::GetTagsResponse#return_size #return_size} => Integer
# * {Types::GetTagsResponse#total_size #total_size} => Integer
#
# @example Request syntax with placeholder values
#
# resp = client.get_tags({
# search_string: "SearchString",
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# tag_key: "TagKey",
# next_page_token: "NextPageToken",
# })
#
# @example Response structure
#
# resp.next_page_token #=> String
# resp.tags #=> Array
# resp.tags[0] #=> String
# resp.return_size #=> Integer
# resp.total_size #=> Integer
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetTags AWS API Documentation
#
# @overload get_tags(params = {})
# @param [Hash] params ({})
def get_tags(params = {}, options = {})
req = build_request(:get_tags, params)
req.send_request(options)
end
# Retrieves a forecast for how much Amazon Web Services predicts that
# you will use over the forecast time period that you select, based on
# your past usage.
#
# @option params [required, Types::DateInterval] :time_period
# The start and end dates of the period that you want to retrieve usage
# forecast for. The start date is inclusive, but the end date is
# exclusive. For example, if `start` is `2017-01-01` and `end` is
# `2017-05-01`, then the cost and usage data is retrieved from
# `2017-01-01` up to and including `2017-04-30` but not including
# `2017-05-01`.
#
# @option params [required, String] :metric
# Which metric Cost Explorer uses to create your forecast.
#
# Valid values for a `GetUsageForecast` call are the following:
#
# * USAGE\_QUANTITY
#
# * NORMALIZED\_USAGE\_AMOUNT
#
# @option params [required, String] :granularity
# How granular you want the forecast to be. You can get 3 months of
# `DAILY` forecasts or 12 months of `MONTHLY` forecasts.
#
# The `GetUsageForecast` operation supports only `DAILY` and `MONTHLY`
# granularities.
#
# @option params [Types::Expression] :filter
# The filters that you want to use to filter your forecast. Cost
# Explorer API supports all of the Cost Explorer filters.
#
# @option params [Integer] :prediction_interval_level
# Cost Explorer always returns the mean forecast as a single point. You
# can request a prediction interval around the mean by specifying a
# confidence level. The higher the confidence level, the more confident
# Cost Explorer is about the actual value falling in the prediction
# interval. Higher confidence levels result in wider prediction
# intervals.
#
# @return [Types::GetUsageForecastResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetUsageForecastResponse#total #total} => Types::MetricValue
# * {Types::GetUsageForecastResponse#forecast_results_by_time #forecast_results_by_time} => Array<Types::ForecastResult>
#
# @example Request syntax with placeholder values
#
# resp = client.get_usage_forecast({
# time_period: { # required
# start: "YearMonthDay", # required
# end: "YearMonthDay", # required
# },
# metric: "BLENDED_COST", # required, accepts BLENDED_COST, UNBLENDED_COST, AMORTIZED_COST, NET_UNBLENDED_COST, NET_AMORTIZED_COST, USAGE_QUANTITY, NORMALIZED_USAGE_AMOUNT
# granularity: "DAILY", # required, accepts DAILY, MONTHLY, HOURLY
# filter: {
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# prediction_interval_level: 1,
# })
#
# @example Response structure
#
# resp.total.amount #=> String
# resp.total.unit #=> String
# resp.forecast_results_by_time #=> Array
# resp.forecast_results_by_time[0].time_period.start #=> String
# resp.forecast_results_by_time[0].time_period.end #=> String
# resp.forecast_results_by_time[0].mean_value #=> String
# resp.forecast_results_by_time[0].prediction_interval_lower_bound #=> String
# resp.forecast_results_by_time[0].prediction_interval_upper_bound #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetUsageForecast AWS API Documentation
#
# @overload get_usage_forecast(params = {})
# @param [Hash] params ({})
def get_usage_forecast(params = {}, options = {})
req = build_request(:get_usage_forecast, params)
req.send_request(options)
end
# Returns the name, ARN, `NumberOfRules` and effective dates of all Cost
# Categories defined in the account. You have the option to use
# `EffectiveOn` to return a list of Cost Categories that were active on
# a specific date. If there is no `EffectiveOn` specified, you’ll see
# Cost Categories that are effective on the current date. If Cost
# Category is still effective, `EffectiveEnd` is omitted in the
# response. `ListCostCategoryDefinitions` supports pagination. The
# request can have a `MaxResults` range up to 100.
#
# @option params [String] :effective_on
# The date when the Cost Category was effective.
#
# @option params [String] :next_token
# The token to retrieve the next set of results. Amazon Web Services
# provides the token when the response from a previous call has more
# results than the maximum page size.
#
# @option params [Integer] :max_results
# The number of entries a paginated response contains.
#
# @return [Types::ListCostCategoryDefinitionsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListCostCategoryDefinitionsResponse#cost_category_references #cost_category_references} => Array<Types::CostCategoryReference>
# * {Types::ListCostCategoryDefinitionsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_cost_category_definitions({
# effective_on: "ZonedDateTime",
# next_token: "NextPageToken",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.cost_category_references #=> Array
# resp.cost_category_references[0].cost_category_arn #=> String
# resp.cost_category_references[0].name #=> String
# resp.cost_category_references[0].effective_start #=> String
# resp.cost_category_references[0].effective_end #=> String
# resp.cost_category_references[0].number_of_rules #=> Integer
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/ListCostCategoryDefinitions AWS API Documentation
#
# @overload list_cost_category_definitions(params = {})
# @param [Hash] params ({})
def list_cost_category_definitions(params = {}, options = {})
req = build_request(:list_cost_category_definitions, params)
req.send_request(options)
end
# Updates an existing Cost Category. Changes made to the Cost Category
# rules will be used to categorize the current month’s expenses and
# future expenses. This won’t change categorization for the previous
# months.
#
# @option params [required, String] :cost_category_arn
# The unique identifier for your Cost Category.
#
# @option params [required, String] :rule_version
# The rule schema version in this particular Cost Category.
#
# @option params [required, Array<Types::CostCategoryRule>] :rules
# The `Expression` object used to categorize costs. For more
# information, see [CostCategoryRule ][1].
#
#
#
# [1]: https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_CostCategoryRule.html
#
# @return [Types::UpdateCostCategoryDefinitionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateCostCategoryDefinitionResponse#cost_category_arn #cost_category_arn} => String
# * {Types::UpdateCostCategoryDefinitionResponse#effective_start #effective_start} => String
#
# @example Request syntax with placeholder values
#
# resp = client.update_cost_category_definition({
# cost_category_arn: "Arn", # required
# rule_version: "CostCategoryExpression.v1", # required, accepts CostCategoryExpression.v1
# rules: [ # required
# {
# value: "CostCategoryValue", # required
# rule: { # required
# or: [
# {
# # recursive Expression
# },
# ],
# and: [
# {
# # recursive Expression
# },
# ],
# not: {
# # recursive Expression
# },
# dimensions: {
# key: "AZ", # accepts AZ, INSTANCE_TYPE, LINKED_ACCOUNT, LINKED_ACCOUNT_NAME, OPERATION, PURCHASE_TYPE, REGION, SERVICE, SERVICE_CODE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# tags: {
# key: "TagKey",
# values: ["Value"],
# match_options: ["EQUALS"], # accepts EQUALS, STARTS_WITH, ENDS_WITH, CONTAINS, CASE_SENSITIVE, CASE_INSENSITIVE
# },
# cost_categories: {
# key: "CostCategoryName",
# values: ["Value"],
# },
# },
# },
# ],
# })
#
# @example Response structure
#
# resp.cost_category_arn #=> String
# resp.effective_start #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/UpdateCostCategoryDefinition AWS API Documentation
#
# @overload update_cost_category_definition(params = {})
# @param [Hash] params ({})
def update_cost_category_definition(params = {}, options = {})
req = build_request(:update_cost_category_definition, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-costexplorer'
context[:gem_version] = '1.43.1'
Seahorse::Client::Request.new(handlers, context)
end
# @api private
# @deprecated
def waiter_names
[]
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 50.777535 | 556 | 0.678486 |
1ab512acd1033309ddcac8469797936718575107
| 304 |
post '/interviews/:inteview_id/feedbacks' do
feedback = Feedback.new(params[:feed])
@interview = Interview.find(params[:feed][:interview_id])
pass_user
if feedback.save
redirect "/interviews/#{@interview.id}"
else
@error = 'all fields are required'
erb :'interviews/show'
end
end
| 25.333333 | 59 | 0.700658 |
18745b6e043387e343049c766ea7418e2ccfeab1
| 63 |
module Fog
module Brightbox
VERSION = "0.13.0"
end
end
| 10.5 | 22 | 0.650794 |
33dc6bdea9787d57e9e64dd9c58d7bbcbf1783dc
| 3,364 |
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Graphql::Pagination::Keyset::Conditions::NotNullCondition do
describe '#build' do
let(:operators) { ['>', '>'] }
let(:before_or_after) { :after }
let(:condition) { described_class.new(arel_table, order_list, values, operators, before_or_after) }
context 'when there is only one ordering field' do
let(:arel_table) { Issue.arel_table }
let(:order_list) { [double(named_function: nil, attribute_name: 'id')] }
let(:values) { [500] }
let(:operators) { ['>'] }
it 'generates a single condition sql' do
expected_sql = <<~SQL
("issues"."id" > 500)
SQL
expect(condition.build.squish).to eq expected_sql.squish
end
end
context 'when ordering by a column attribute' do
let(:arel_table) { Issue.arel_table }
let(:order_list) { [double(named_function: nil, attribute_name: 'relative_position'), double(named_function: nil, attribute_name: 'id')] }
let(:values) { [1500, 500] }
shared_examples ':after condition' do
it 'generates :after sql' do
expected_sql = <<~SQL
("issues"."relative_position" > 1500)
OR (
"issues"."relative_position" = 1500
AND
"issues"."id" > 500
)
OR ("issues"."relative_position" IS NULL)
SQL
expect(condition.build.squish).to eq expected_sql.squish
end
end
context 'when :after' do
it_behaves_like ':after condition'
end
context 'when :before' do
let(:before_or_after) { :before }
it 'generates :before sql' do
expected_sql = <<~SQL
("issues"."relative_position" > 1500)
OR (
"issues"."relative_position" = 1500
AND
"issues"."id" > 500
)
SQL
expect(condition.build.squish).to eq expected_sql.squish
end
end
context 'when :foo' do
let(:before_or_after) { :foo }
it_behaves_like ':after condition'
end
end
context 'when ordering by LOWER' do
let(:arel_table) { Project.arel_table }
let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) }
let(:order_list) { Gitlab::Graphql::Pagination::Keyset::OrderInfo.build_order_list(relation) }
let(:values) { ['Test', 500] }
context 'when :after' do
it 'generates :after sql' do
expected_sql = <<~SQL
(LOWER("projects"."name") > 'test')
OR (
LOWER("projects"."name") = 'test'
AND
"projects"."id" > 500
)
OR (LOWER("projects"."name") IS NULL)
SQL
expect(condition.build.squish).to eq expected_sql.squish
end
end
context 'when :before' do
let(:before_or_after) { :before }
it 'generates :before sql' do
expected_sql = <<~SQL
(LOWER("projects"."name") > 'test')
OR (
LOWER("projects"."name") = 'test'
AND
"projects"."id" > 500
)
SQL
expect(condition.build.squish).to eq expected_sql.squish
end
end
end
end
end
| 29 | 144 | 0.54786 |
2876c47c2a3ad72bc1a65b39caacd322148314df
| 96 |
RSpec.describe "first spec" do
pending "is a spec" do
expect(true).to be(false)
end
end
| 16 | 30 | 0.677083 |
03502cea19711e43b85807e032f4ea4078fba59f
| 894 |
class UsersController < ApplicationController
before_action :set_user, only: [:show, :edit, :update, :destroy]
def index
authorize User
@users = User.where.not(id: current_user.id)
end
def show
end
def edit
end
def update
service = UpdatePermissionsService.call(@user, params.dig(:user, :permissions))
respond_to do |format|
if service.success?
format.html { redirect_to edit_user_path(@user), notice: 'User was successfully updated.' }
format.json { render :show, status: :ok, location: @user }
else
format.html { render :edit }
format.json { render json: service.errors, status: :unprocessable_entity }
end
end
end
private
def set_user
@user = User.find(params[:id])
authorize @user
end
def user_params
params.require(:user).permit(:email, :permissions)
end
end
| 23.526316 | 99 | 0.655481 |
d5723f03968c528ddfd55f96bd4eff145ba5a0f4
| 1,660 |
include_recipe "env"
include_recipe "runit"
package "libpcre3"
package "libpcre3-dev"
package "libssl-dev"
remote_file File.join("/tmp", "nginx-#{node[:nginx][:version]}.tar.gz") do
source "http://nginx.org/download/nginx-#{node[:nginx][:version]}.tar.gz"
not_if { ::File.exists?(File.join("/tmp", "nginx-#{node[:nginx][:version]}.tar.gz")) }
end
remote_file File.join("/tmp", "nginx_upload_module-#{node[:nginx][:upload_module_version]}.tar.gz") do
source "http://www.grid.net.ru/nginx/download/nginx_upload_module-#{node[:nginx][:upload_module_version]}.tar.gz"
not_if { ::File.exists?(File.join("/tmp", "nginx_upload_module-#{node[:nginx][:upload_module_version]}.tar.gz")) }
end
%w[logs run sites upload].each do |dir|
directory File.join(node[:nginx][:path], dir) do
owner node[:nginx][:runner]
group node[:nginx][:runner]
mode "0755"
recursive true
action :create
end
end
bash "Install Nginx" do
cwd "/tmp"
code <<-EOH
tar xzf nginx_upload_module-#{node[:nginx][:upload_module_version]}.tar.gz
tar xzf nginx-#{node[:nginx][:version]}.tar.gz
cd nginx-#{node[:nginx][:version]}
./configure --prefix=#{node[:nginx][:path]} --add-module=#{File.join("/tmp", "nginx_upload_module-" + node[:nginx][:upload_module_version])}
make
make install
EOH
not_if "#{File.join(node[:nginx][:path], "sbin/nginx")} -V 2>&1 | grep nginx_upload_module"
end
runit_service "nginx"
template "nginx.conf" do
path File.join(node[:nginx][:path], "conf/nginx.conf")
source "nginx.conf.erb"
owner node[:nginx][:runner]
group node[:nginx][:runner]
mode "0644"
notifies :restart, "service[nginx]", :immediately
end
| 32.54902 | 142 | 0.689157 |
f84a94ba5f9f2925081d21e66ec83c3a8388e653
| 77 |
# frozen_string_literal: true
default['node_exporter']['version'] = '1.0.0'
| 19.25 | 45 | 0.714286 |
bb8507a1898f274d781804382efe00af76876194
| 83,584 |
# encoding: utf-8
require 'spec_helper'
module Punchblock
module Translator
class Asterisk
module Component
describe Output do
include HasMockCallbackConnection
let(:renderer) { nil }
let(:ami_client) { double('AMI') }
let(:translator) { Punchblock::Translator::Asterisk.new ami_client, connection }
let(:mock_call) { Punchblock::Translator::Asterisk::Call.new 'foo', translator, ami_client, connection }
let :original_command do
Punchblock::Component::Output.new command_options
end
let :ssml_doc do
RubySpeech::SSML.draw do
say_as(:interpret_as => :cardinal) { 'FOO' }
end
end
let(:command_opts) { {} }
let :command_options do
{ :render_document => {:value => ssml_doc}, renderer: renderer }
end
let(:ast13mode) { false }
subject { Output.new original_command, mock_call }
def expect_answered(value = true)
allow(mock_call).to receive(:answered?).and_return(value)
end
def expect_mrcpsynth_with_options(options)
expect(mock_call).to receive(:execute_agi_command).once { |*args|
expect(args[0]).to eq('EXEC MRCPSynth')
expect(args[1]).to match options
}.and_return code: 200, result: 1
end
describe '#execute' do
before { original_command.request! }
context 'with an invalid renderer' do
let(:renderer) { 'foobar' }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'The renderer foobar is unsupported.'
expect(original_command.response(0.1)).to eq(error)
end
end
context 'with a renderer of :swift' do
let(:renderer) { 'swift' }
let(:audio_filename) { 'http://foo.com/bar.mp3' }
let :ssml_doc do
RubySpeech::SSML.draw do
audio :src => audio_filename
say_as(:interpret_as => :cardinal) { 'FOO' }
end
end
let :command_options do
{ :render_document => {:value => ssml_doc}, renderer: renderer }.merge(command_opts)
end
def ssml_with_options(prefix = '', postfix = '')
base_doc = ssml_doc.to_s.squish.gsub(/["\\]/) { |m| "\\#{m}" }
prefix + base_doc + postfix
end
before { expect_answered }
it "should execute Swift" do
expect(mock_call).to receive(:execute_agi_command).once.with 'EXEC Swift', ssml_with_options
subject.execute
end
it 'should send a complete event when Swift completes' do
expect(mock_call).to receive(:execute_agi_command).and_return code: 200, result: 1
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context "when we get a RubyAMI Error" do
it "should send an error complete event" do
error = RubyAMI::Error.new.tap { |e| e.message = 'FooBar' }
expect(mock_call).to receive(:execute_agi_command).and_raise error
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to AMI error 'FooBar'")
end
end
context "when the channel is gone" do
it "should send an error complete event" do
error = ChannelGoneError.new 'FooBar'
expect(mock_call).to receive(:execute_agi_command).and_raise error
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Hangup
end
end
context "when the call is not answered" do
before { expect_answered false }
it "should send progress" do
expect(mock_call).to receive(:send_progress)
expect(mock_call).to receive(:execute_agi_command).and_return code: 200, result: 1
subject.execute
end
end
describe 'interrupt_on' do
context "set to nil" do
let(:command_opts) { { :interrupt_on => nil } }
it "should not add interrupt arguments" do
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Swift', ssml_with_options).and_return code: 200, result: 1
subject.execute
end
end
context "set to :any" do
let(:command_opts) { { :interrupt_on => :any } }
it "should add the interrupt options to the argument" do
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Swift', ssml_with_options('', '|1|1')).and_return code: 200, result: 1
subject.execute
end
end
context "set to :dtmf" do
let(:command_opts) { { :interrupt_on => :dtmf } }
it "should add the interrupt options to the argument" do
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Swift', ssml_with_options('', '|1|1')).and_return code: 200, result: 1
subject.execute
end
end
context "set to :voice" do
let(:command_opts) { { :interrupt_on => :voice } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'An interrupt-on value of speech is unsupported.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'voice' do
context "set to nil" do
let(:command_opts) { { :voice => nil } }
it "should not add a voice at the beginning of the argument" do
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Swift', ssml_with_options).and_return code: 200, result: 1
subject.execute
end
end
context "set to Leonard" do
let(:command_opts) { { :voice => "Leonard" } }
it "should add a voice at the beginning of the argument" do
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Swift', ssml_with_options('Leonard^', '')).and_return code: 200, result: 1
subject.execute
end
end
end
describe "with multiple documents" do
let :first_ssml_doc do
RubySpeech::SSML.draw do
audio :src => audio_filename
end
end
let :second_ssml_doc do
RubySpeech::SSML.draw do
say_as(:interpret_as => :cardinal) { 'FOO' }
end
end
let(:command_opts) { { render_documents: [{value: first_ssml_doc}, {value: second_ssml_doc}] } }
it "executes Swift with a concatenated version of the documents" do
expect(mock_call).to receive(:execute_agi_command).once.with 'EXEC Swift', ssml_with_options
subject.execute
end
end
end
context 'with a renderer of :unimrcp' do
let(:renderer) { :unimrcp }
let(:audio_filename) { 'http://foo.com/bar.mp3' }
let :ssml_doc do
RubySpeech::SSML.draw do
audio :src => audio_filename
say_as(:interpret_as => :cardinal) { 'FOO' }
end
end
let(:command_opts) { {} }
let :command_options do
{ :render_document => {:value => ssml_doc}, renderer: renderer }.merge(command_opts)
end
let(:synthstatus) { 'OK' }
before { allow(mock_call).to receive(:channel_var).with('SYNTHSTATUS').and_return synthstatus }
before { expect_answered }
it "should execute MRCPSynth" do
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC MRCPSynth', ["\"#{ssml_doc.to_s.squish.gsub('"', '\"')}\"", ''].join(',')).and_return code: 200, result: 1
subject.execute
end
it 'should send a complete event when MRCPSynth completes' do
expect(mock_call).to receive(:execute_agi_command).and_return code: 200, result: 1
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context "when we get a RubyAMI Error" do
it "should send an error complete event" do
error = RubyAMI::Error.new.tap { |e| e.message = 'FooBar' }
expect(mock_call).to receive(:execute_agi_command).and_raise error
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to AMI error 'FooBar'")
end
end
context "when the channel is gone" do
it "should send an error complete event" do
error = ChannelGoneError.new 'FooBar'
expect(mock_call).to receive(:execute_agi_command).and_raise error
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Hangup
end
end
context "when the call is not answered" do
before { expect_answered false }
it "should send progress" do
expect(mock_call).to receive(:send_progress)
expect(mock_call).to receive(:execute_agi_command).and_return code: 200, result: 1
subject.execute
end
end
context "when the SYNTHSTATUS variable is set to 'ERROR'" do
let(:synthstatus) { 'ERROR' }
it "should send an error complete event" do
expect(mock_call).to receive(:execute_agi_command).and_return code: 200, result: 1
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to UniMRCP error")
end
end
describe 'document' do
context 'unset' do
let(:ssml_doc) { nil }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'An SSML document is required.'
expect(original_command.response(0.1)).to eq(error)
end
end
context 'with multiple documents' do
let(:command_opts) { { :render_documents => [{:value => ssml_doc}, {:value => ssml_doc}] } }
it "should execute MRCPSynth once with each document" do
param = ["\"#{ssml_doc.to_s.squish.gsub('"', '\"')}\"", ''].join(',')
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC MRCPSynth', param).and_return code: 200, result: 1
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC MRCPSynth', param).and_return code: 200, result: 1
subject.execute
end
it 'should not execute further output after a stop command' do
expect(mock_call).to receive(:execute_agi_command).once.ordered do
sleep 0.5
end
latch = CountDownLatch.new 1
expect(original_command).to receive(:add_event).once { |e|
expect(e.reason).to be_a Punchblock::Component::Output::Complete::Finish
latch.countdown!
}
Celluloid::Future.new { subject.execute }
sleep 0.2
expect(mock_call).to receive(:redirect_back).ordered
stop_command = Punchblock::Component::Stop.new
stop_command.request!
subject.execute_command stop_command
expect(latch.wait(2)).to be_truthy
end
end
end
describe 'start-offset' do
context 'unset' do
let(:command_opts) { { :start_offset => nil } }
it 'should not pass any options to MRCPSynth' do
expect_mrcpsynth_with_options(//)
subject.execute
end
end
context 'set' do
let(:command_opts) { { :start_offset => 10 } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A start_offset value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'start-paused' do
context 'false' do
let(:command_opts) { { :start_paused => false } }
it 'should not pass any options to MRCPSynth' do
expect_mrcpsynth_with_options(//)
subject.execute
end
end
context 'true' do
let(:command_opts) { { :start_paused => true } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A start_paused value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'repeat-interval' do
context 'unset' do
let(:command_opts) { { :repeat_interval => nil } }
it 'should not pass any options to MRCPSynth' do
expect_mrcpsynth_with_options(//)
subject.execute
end
end
context 'set' do
let(:command_opts) { { :repeat_interval => 10 } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A repeat_interval value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'repeat-times' do
context 'unset' do
let(:command_opts) { { :repeat_times => nil } }
it 'should not pass any options to MRCPSynth' do
expect_mrcpsynth_with_options(//)
subject.execute
end
end
context 'set' do
let(:command_opts) { { :repeat_times => 2 } }
it "should render the specified number of times" do
2.times { expect_mrcpsynth_with_options(//) }
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context 'to 0' do
let(:command_opts) { { :repeat_times => 0 } }
it "should render 10,000 the specified number of times" do
expect_answered
1000.times { expect_mrcpsynth_with_options(//) }
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
end
it 'should not execute further output after a stop command' do
expect(mock_call).to receive(:execute_agi_command).once.ordered do
sleep 0.2
end
latch = CountDownLatch.new 1
expect(original_command).to receive(:add_event).once { |e|
expect(e.reason).to be_a Punchblock::Component::Output::Complete::Finish
latch.countdown!
}
Celluloid::Future.new { subject.execute }
sleep 0.1
expect(mock_call).to receive(:redirect_back).ordered
stop_command = Punchblock::Component::Stop.new
stop_command.request!
subject.execute_command stop_command
expect(latch.wait(2)).to be_truthy
end
end
end
describe 'max-time' do
context 'unset' do
let(:command_opts) { { :max_time => nil } }
it 'should not pass any options to MRCPSynth' do
expect_mrcpsynth_with_options(//)
subject.execute
end
end
context 'set' do
let(:command_opts) { { :max_time => 30 } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A max_time value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'voice' do
context 'unset' do
let(:command_opts) { { :voice => nil } }
it 'should not pass the v option to MRCPSynth' do
expect_mrcpsynth_with_options(//)
subject.execute
end
end
context 'set' do
let(:command_opts) { { :voice => 'alison' } }
it 'should pass the v option to MRCPSynth' do
expect_mrcpsynth_with_options(/v=alison/)
subject.execute
end
end
end
describe 'interrupt_on' do
context "set to nil" do
let(:command_opts) { { :interrupt_on => nil } }
it "should not pass the i option to MRCPSynth" do
expect_mrcpsynth_with_options(//)
subject.execute
end
end
context "set to :any" do
let(:command_opts) { { :interrupt_on => :any } }
it "should pass the i option to MRCPSynth" do
expect_mrcpsynth_with_options(/i=any/)
subject.execute
end
end
context "set to :dtmf" do
let(:command_opts) { { :interrupt_on => :dtmf } }
it "should pass the i option to MRCPSynth" do
expect_mrcpsynth_with_options(/i=any/)
subject.execute
end
end
context "set to :voice" do
let(:command_opts) { { :interrupt_on => :voice } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'An interrupt-on value of speech is unsupported.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
end
[:asterisk, nil].each do |renderer|
context "with a renderer of #{renderer.inspect}" do
def expect_playback(filename = audio_filename)
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Playback', filename).and_return code: 200
end
def expect_playback_noanswer
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Playback', audio_filename + ',noanswer').and_return code: 200
end
let(:audio_filename) { 'tt-monkeys' }
let :ssml_doc do
RubySpeech::SSML.draw do
audio :src => audio_filename
end
end
let(:command_opts) { {} }
let :command_options do
{ :render_document => {:value => ssml_doc}, renderer: renderer }.merge(command_opts)
end
let :original_command do
Punchblock::Component::Output.new command_options
end
let(:playbackstatus) { 'SUCCESS' }
before { allow(mock_call).to receive(:channel_var).with('PLAYBACKSTATUS').and_return playbackstatus }
describe 'ssml' do
context 'unset' do
let(:ssml_doc) { nil }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'An SSML document is required.'
expect(original_command.response(0.1)).to eq(error)
end
end
context 'with a single audio SSML node' do
let(:audio_filename) { 'tt-monkeys' }
let :ssml_doc do
RubySpeech::SSML.draw { audio :src => audio_filename }
end
it 'should playback the audio file using Playback' do
expect_answered
expect_playback
subject.execute
end
it 'should send a complete event when the file finishes playback' do
def mock_call.answered?
true
end
expect_playback
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context "when the audio filename is prefixed by file://" do
let(:audio_filename) { 'file://tt-monkeys' }
it 'should playback the audio file using Playback' do
expect_answered
expect_playback 'tt-monkeys'
subject.execute
end
end
context "when the audio filename has an extension" do
let(:audio_filename) { 'tt-monkeys.wav' }
it 'should playback the audio file using Playback' do
expect_answered
expect_playback 'tt-monkeys'
subject.execute
end
context "when there are other dots in the filename" do
let(:audio_filename) { 'blue.tt-monkeys.wav' }
it 'should playback the audio file using Playback' do
expect_answered
expect_playback 'blue.tt-monkeys'
subject.execute
end
context "and no file extension" do
let(:audio_filename) { '/var/lib/gems/1.9.1/gems/myapp-1.0.0/prompts/greeting' }
it 'should playback the audio file using Playback' do
expect_answered
expect_playback '/var/lib/gems/1.9.1/gems/myapp-1.0.0/prompts/greeting'
subject.execute
end
end
end
end
context "when we get a RubyAMI Error" do
it "should send an error complete event" do
expect_answered
error = RubyAMI::Error.new.tap { |e| e.message = 'FooBar' }
expect(mock_call).to receive(:execute_agi_command).and_raise error
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to AMI error 'FooBar'")
end
end
context "when the channel is gone" do
it "should send an error complete event" do
expect_answered
error = ChannelGoneError.new 'FooBar'
expect(mock_call).to receive(:execute_agi_command).and_raise error
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Hangup
end
end
context "when the PLAYBACKSTATUS variable is set to 'FAILED'" do
let(:playbackstatus) { 'FAILED' }
it "should send an error complete event" do
expect_answered
expect(mock_call).to receive(:execute_agi_command).and_return code: 200, result: 1
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to playback error")
end
end
end
context 'with a single text node without spaces' do
let(:audio_filename) { 'tt-monkeys' }
let :ssml_doc do
RubySpeech::SSML.draw { string audio_filename }
end
it 'should playback the audio file using Playback' do
expect_answered
expect_playback
subject.execute
end
it 'should send a complete event when the file finishes playback' do
expect_answered
expect_playback
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context "when we get a RubyAMI Error" do
it "should send an error complete event" do
expect_answered
error = RubyAMI::Error.new.tap { |e| e.message = 'FooBar' }
expect(mock_call).to receive(:execute_agi_command).and_raise error
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to AMI error 'FooBar'")
end
end
context "with early media playback" do
it "should play the file with Playback" do
expect_answered false
expect_playback_noanswer
expect(mock_call).to receive(:send_progress)
subject.execute
end
context "with interrupt_on set to something that is not nil" do
let(:audio_filename) { 'tt-monkeys' }
let :command_options do
{
:render_document => {
:value => RubySpeech::SSML.draw { string audio_filename },
},
:interrupt_on => :any
}
end
it "should return an error when the output is interruptible and it is early media" do
expect_answered false
error = ProtocolError.new.setup 'option error', 'Interrupt digits are not allowed with early media.'
subject.execute
expect(original_command.response(0.1)).to eq(error)
end
end
end
end
context 'with multiple audio SSML nodes' do
let(:audio_filename1) { 'foo' }
let(:audio_filename2) { 'bar' }
let :ssml_doc do
RubySpeech::SSML.draw do
audio :src => audio_filename1
audio :src => audio_filename2
end
end
it 'should playback all audio files using Playback' do
latch = CountDownLatch.new 2
expect_playback [audio_filename1, audio_filename2].join('&')
expect_answered
subject.execute
latch.wait 2
sleep 2
end
it 'should send a complete event after the final file has finished playback' do
expect_answered
expect_playback [audio_filename1, audio_filename2].join('&')
latch = CountDownLatch.new 1
expect(original_command).to receive(:add_event).once { |e|
expect(e.reason).to be_a Punchblock::Component::Output::Complete::Finish
latch.countdown!
}
subject.execute
expect(latch.wait(2)).to be_truthy
end
end
context "with an SSML document containing elements other than <audio/>" do
let :ssml_doc do
RubySpeech::SSML.draw do
string "Foo Bar"
end
end
it "should return an unrenderable document error" do
subject.execute
error = ProtocolError.new.setup 'unrenderable document error', 'The provided document could not be rendered. See http://adhearsion.com/docs/common_problems#unrenderable-document-error for details.'
expect(original_command.response(0.1)).to eq(error)
end
end
context 'with multiple documents' do
let(:command_opts) { { render_documents: [{value: ssml_doc}, {value: ssml_doc}] } }
it "should render each document in turn using a Playback per document" do
expect_answered
2.times { expect_playback }
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
it 'should not execute further output after a stop command' do
expect_answered
expect(mock_call).to receive(:execute_agi_command).once.ordered do
sleep 0.2
end
latch = CountDownLatch.new 1
expect(original_command).to receive(:add_event).once { |e|
expect(e.reason).to be_a Punchblock::Component::Output::Complete::Finish
latch.countdown!
}
Celluloid::Future.new { subject.execute }
sleep 0.1
expect(mock_call).to receive(:redirect_back).ordered
stop_command = Punchblock::Component::Stop.new
stop_command.request!
subject.execute_command stop_command
expect(latch.wait(2)).to be_truthy
end
context "when the PLAYBACKSTATUS variable is set to 'FAILED'" do
let(:playbackstatus) { 'FAILED' }
it "should terminate playback and send an error complete event" do
expect_answered
expect(mock_call).to receive(:execute_agi_command).once.and_return code: 200, result: 1
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to playback error")
end
end
end
end
describe 'start-offset' do
context 'unset' do
let(:command_opts) { { :start_offset => nil } }
it 'should not pass any options to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'set' do
let(:command_opts) { { :start_offset => 10 } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A start_offset value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'start-paused' do
context 'false' do
let(:command_opts) { { :start_paused => false } }
it 'should not pass any options to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'true' do
let(:command_opts) { { :start_paused => true } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A start_paused value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'repeat-interval' do
context 'unset' do
let(:command_opts) { { :repeat_interval => nil } }
it 'should not pass any options to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'set' do
let(:command_opts) { { :repeat_interval => 10 } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A repeat_interval value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'repeat-times' do
context 'unset' do
let(:command_opts) { { :repeat_times => nil } }
it 'should not pass any options to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'set' do
let(:command_opts) { { :repeat_times => 2 } }
it "should render the specified number of times" do
expect_answered
2.times { expect_playback }
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context 'to 0' do
let(:command_opts) { { :repeat_times => 0 } }
it "should render 10,000 the specified number of times" do
expect_answered
1000.times { expect_playback }
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
end
it 'should not execute further output after a stop command' do
expect_answered
expect(mock_call).to receive(:execute_agi_command).once.ordered do
sleep 0.2
end
latch = CountDownLatch.new 1
expect(original_command).to receive(:add_event).once { |e|
expect(e.reason).to be_a Punchblock::Component::Output::Complete::Finish
latch.countdown!
}
Celluloid::Future.new { subject.execute }
sleep 0.1
expect(mock_call).to receive(:redirect_back).ordered
stop_command = Punchblock::Component::Stop.new
stop_command.request!
subject.execute_command stop_command
expect(latch.wait(2)).to be_truthy
end
end
end
describe 'max-time' do
context 'unset' do
let(:command_opts) { { :max_time => nil } }
it 'should not pass any options to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'set' do
let(:command_opts) { { :max_time => 30 } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A max_time value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'voice' do
context 'unset' do
let(:command_opts) { { :voice => nil } }
it 'should not pass the v option to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'set' do
let(:command_opts) { { :voice => 'alison' } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A voice value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'interrupt_on' do
def ami_event_for_dtmf(digit, position)
if ast13mode
RubyAMI::Event.new 'DTMF' + (position == :start ? 'Begin' : '') + (position == :end ? 'End' : ''),
'Digit' => digit.to_s
else
RubyAMI::Event.new 'DTMF',
'Digit' => digit.to_s,
'Start' => position == :start ? 'Yes' : 'No',
'End' => position == :end ? 'Yes' : 'No'
end
end
def send_ami_events_for_dtmf(digit)
mock_call.process_ami_event ami_event_for_dtmf(digit, :start)
mock_call.process_ami_event ami_event_for_dtmf(digit, :end)
end
let(:reason) { original_command.complete_event(5).reason }
let(:channel) { "SIP/1234-00000000" }
let :ami_event do
RubyAMI::Event.new 'AsyncAGI',
'SubEvent' => "Start",
'Channel' => channel,
'Env' => "agi_request%3A%20async%0Aagi_channel%3A%20SIP%2F1234-00000000%0Aagi_language%3A%20en%0Aagi_type%3A%20SIP%0Aagi_uniqueid%3A%201320835995.0%0Aagi_version%3A%201.8.4.1%0Aagi_callerid%3A%205678%0Aagi_calleridname%3A%20Jane%20Smith%0Aagi_callingpres%3A%200%0Aagi_callingani2%3A%200%0Aagi_callington%3A%200%0Aagi_callingtns%3A%200%0Aagi_dnid%3A%201000%0Aagi_rdnis%3A%20unknown%0Aagi_context%3A%20default%0Aagi_extension%3A%201000%0Aagi_priority%3A%201%0Aagi_enhanced%3A%200.0%0Aagi_accountcode%3A%20%0Aagi_threadid%3A%204366221312%0A%0A"
end
context "set to nil" do
let(:command_opts) { { :interrupt_on => nil } }
it "does not redirect the call" do
expect_answered
expect_playback
expect(mock_call).to receive(:redirect_back).never
subject.execute
expect(original_command.response(0.1)).to be_a Ref
send_ami_events_for_dtmf 1
end
end
context "set to :any" do
let(:command_opts) { { :interrupt_on => :any } }
before do
expect_answered
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Playback', audio_filename)
expect(subject).to receive(:send_finish).and_return nil
end
context "when a DTMF digit is received" do
it "sends the correct complete event" do
expect(mock_call).to receive :redirect_back
subject.execute
expect(original_command.response(0.1)).to be_a Ref
expect(original_command).not_to be_complete
send_ami_events_for_dtmf 1
mock_call.process_ami_event ami_event
sleep 0.2
expect(original_command).to be_complete
expect(reason).to be_a Punchblock::Component::Output::Complete::Finish
end
it "redirects the call back to async AGI" do
expect(mock_call).to receive(:redirect_back).once
subject.execute
expect(original_command.response(0.1)).to be_a Ref
send_ami_events_for_dtmf 1
end
end
end
context "set to :dtmf" do
let(:command_opts) { { :interrupt_on => :dtmf } }
before do
expect_answered
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Playback', audio_filename)
expect(subject).to receive(:send_finish).and_return nil
end
context "when a DTMF digit is received" do
it "sends the correct complete event" do
expect(mock_call).to receive :redirect_back
subject.execute
expect(original_command.response(0.1)).to be_a Ref
expect(original_command).not_to be_complete
send_ami_events_for_dtmf 1
mock_call.process_ami_event ami_event
sleep 0.2
expect(original_command).to be_complete
expect(reason).to be_a Punchblock::Component::Output::Complete::Finish
end
it "redirects the call back to async AGI" do
expect(mock_call).to receive(:redirect_back).once
subject.execute
expect(original_command.response(0.1)).to be_a Ref
send_ami_events_for_dtmf 1
end
end
end
context "set to :voice" do
let(:command_opts) { { :interrupt_on => :voice } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'An interrupt-on value of speech is unsupported.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
end
end
context "with a renderer of :native_or_unimrcp" do
def expect_playback(filename = audio_filename)
expect(mock_call).to receive(:execute_agi_command).ordered.once.with('EXEC Playback', filename).and_return code: 200
end
def expect_playback_noanswer
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Playback', audio_filename + ',noanswer').and_return code: 200
end
def expect_mrcpsynth(doc = ssml_doc)
expect(mock_call).to receive(:execute_agi_command).ordered.once.with('EXEC MRCPSynth', ["\"#{doc.to_s.squish.gsub('"', '\"')}\"", ''].join(',')).and_return code: 200, result: 1
end
let(:audio_filename) { 'tt-monkeys' }
let :ssml_doc do
RubySpeech::SSML.draw do
audio :src => audio_filename do
string "Foobar"
end
end
end
let(:command_opts) { {} }
let :command_options do
{ :render_document => {:value => ssml_doc}, renderer: :native_or_unimrcp }.merge(command_opts)
end
let :original_command do
Punchblock::Component::Output.new command_options
end
let(:playbackstatus) { 'SUCCESS' }
before { allow(mock_call).to receive(:channel_var).with('PLAYBACKSTATUS').and_return playbackstatus }
describe 'ssml' do
context 'unset' do
let(:ssml_doc) { nil }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'An SSML document is required.'
expect(original_command.response(0.1)).to eq(error)
end
end
context 'with a single audio SSML node' do
let(:audio_filename) { 'tt-monkeys' }
let :ssml_doc do
RubySpeech::SSML.draw language: 'pt-BR' do
audio :src => audio_filename do
voice name: 'frank' do
string "Hello world"
end
end
end
end
it 'should playback the audio file using Playback' do
expect_answered
expect_playback
subject.execute
end
it 'should send a complete event when the file finishes playback' do
def mock_call.answered?
true
end
expect_playback
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context "when the audio filename is prefixed by file://" do
let(:audio_filename) { 'file://tt-monkeys' }
it 'should playback the audio file using Playback' do
expect_answered
expect_playback 'tt-monkeys'
subject.execute
end
end
context "when the audio filename has an extension" do
let(:audio_filename) { 'tt-monkeys.wav' }
it 'should playback the audio file using Playback' do
expect_answered
expect_playback 'tt-monkeys'
subject.execute
end
context "when there are other dots in the filename" do
let(:audio_filename) { 'blue.tt-monkeys.wav' }
it 'should playback the audio file using Playback' do
expect_answered
expect_playback 'blue.tt-monkeys'
subject.execute
end
end
end
context "when we get a RubyAMI Error" do
it "should send an error complete event" do
expect_answered
error = RubyAMI::Error.new.tap { |e| e.message = 'FooBar' }
expect(mock_call).to receive(:execute_agi_command).and_raise error
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to AMI error 'FooBar'")
end
end
context "when the channel is gone" do
it "should send an error complete event" do
expect_answered
error = ChannelGoneError.new 'FooBar'
expect(mock_call).to receive(:execute_agi_command).and_raise error
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Hangup
end
end
context "when the PLAYBACKSTATUS variable is set to 'FAILED'" do
let(:playbackstatus) { 'FAILED' }
let(:synthstatus) { 'SUCCESS' }
before { allow(mock_call).to receive(:channel_var).with('SYNTHSTATUS').and_return synthstatus }
let :fallback_doc do
RubySpeech::SSML.draw language: 'pt-BR' do
voice name: 'frank' do
string "Hello world"
end
end
end
it "should attempt to render the children of the audio tag via MRCP and then send a complete event" do
expect_answered
expect_playback
expect_mrcpsynth fallback_doc
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context "and the SYNTHSTATUS variable is set to 'ERROR'" do
let(:synthstatus) { 'ERROR' }
it "should send an error complete event" do
expect_answered
expect_playback
expect_mrcpsynth fallback_doc
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to UniMRCP error")
end
end
end
end
context 'with a single text node without spaces' do
let(:audio_filename) { 'tt-monkeys' }
let :ssml_doc do
RubySpeech::SSML.draw { string audio_filename }
end
it 'should playback the audio file using Playback' do
expect_answered
expect_playback
subject.execute
end
it 'should send a complete event when the file finishes playback' do
expect_answered
expect_playback
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context "when we get a RubyAMI Error" do
it "should send an error complete event" do
expect_answered
error = RubyAMI::Error.new.tap { |e| e.message = 'FooBar' }
expect(mock_call).to receive(:execute_agi_command).and_raise error
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to AMI error 'FooBar'")
end
end
context "with early media playback" do
it "should play the file with Playback" do
expect_answered false
expect_playback_noanswer
expect(mock_call).to receive(:send_progress)
subject.execute
end
context "with interrupt_on set to something that is not nil" do
let(:audio_filename) { 'tt-monkeys' }
let :command_options do
{
:render_document => {
:value => RubySpeech::SSML.draw { string audio_filename },
},
:interrupt_on => :any
}
end
it "should return an error when the output is interruptible and it is early media" do
expect_answered false
error = ProtocolError.new.setup 'option error', 'Interrupt digits are not allowed with early media.'
subject.execute
expect(original_command.response(0.1)).to eq(error)
end
end
end
end
context 'with multiple audio SSML nodes' do
let(:audio_filename1) { 'foo' }
let(:audio_filename2) { 'bar' }
let(:audio_filename3) { 'baz' }
let :ssml_doc do
RubySpeech::SSML.draw do
audio :src => audio_filename1 do
string "Fallback 1"
end
audio :src => audio_filename2 do
string "Fallback 2"
end
audio :src => audio_filename3 do
string "Fallback 3"
end
end
end
it 'should playback all audio files using Playback' do
latch = CountDownLatch.new 2
expect_playback audio_filename1
expect_playback audio_filename2
expect_playback audio_filename3
expect_answered
subject.execute
latch.wait 2
sleep 2
end
it 'should send a complete event after the final file has finished playback' do
expect_answered
expect_playback audio_filename1
expect_playback audio_filename2
expect_playback audio_filename3
latch = CountDownLatch.new 1
expect(original_command).to receive(:add_event).once { |e|
expect(e.reason).to be_a Punchblock::Component::Output::Complete::Finish
latch.countdown!
}
subject.execute
expect(latch.wait(2)).to be_truthy
end
it 'should not execute further output after a stop command' do
expect_answered
expect(mock_call).to receive(:execute_agi_command).once.ordered do
sleep 0.2
end
latch = CountDownLatch.new 1
expect(original_command).to receive(:add_event).once { |e|
expect(e.reason).to be_a Punchblock::Component::Output::Complete::Finish
latch.countdown!
}
Celluloid::Future.new { subject.execute }
sleep 0.1
expect(mock_call).to receive(:redirect_back).ordered
stop_command = Punchblock::Component::Stop.new
stop_command.request!
subject.execute_command stop_command
expect(latch.wait(2)).to be_truthy
end
context "when the PLAYBACKSTATUS variable is set to 'FAILED'" do
let(:synthstatus) { 'SUCCESS' }
before { allow(mock_call).to receive(:channel_var).with('PLAYBACKSTATUS').and_return 'SUCCESS', 'FAILED', 'SUCCESS' }
before { allow(mock_call).to receive(:channel_var).with('SYNTHSTATUS').and_return synthstatus }
let :fallback_doc do
RubySpeech::SSML.draw do
string "Fallback 2"
end
end
it "should attempt to render the document via MRCP and then send a complete event" do
expect_answered
expect_playback audio_filename1
expect_playback audio_filename2
expect_mrcpsynth fallback_doc
expect_playback audio_filename3
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context "and the SYNTHSTATUS variable is set to 'ERROR'" do
let(:synthstatus) { 'ERROR' }
it "should terminate playback and send an error complete event" do
expect_answered
expect_playback audio_filename1
expect_playback audio_filename2
expect_mrcpsynth fallback_doc
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to UniMRCP error")
end
end
end
end
context "with an SSML document containing top-level elements other than <audio/>" do
let :ssml_doc do
RubySpeech::SSML.draw do
voice name: 'Paul' do
string "Foo Bar"
end
end
end
before { allow(mock_call).to receive(:channel_var).with('SYNTHSTATUS').and_return 'SUCCESS' }
it "should attempt to render the document via MRCP and then send a complete event" do
expect_answered
expect_mrcpsynth ssml_doc
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
end
context "with mixed TTS and audio tags" do
let :ssml_doc do
RubySpeech::SSML.draw do
voice name: 'Paul' do
string "Foo Bar"
end
audio src: 'tt-monkeys'
voice name: 'Frank' do
string "Doo Dah"
end
string 'tt-weasels'
end
end
let :first_doc do
RubySpeech::SSML.draw do
voice name: 'Paul' do
string "Foo Bar"
end
end
end
let :second_doc do
RubySpeech::SSML.draw do
voice name: 'Frank' do
string "Doo Dah"
end
end
end
before { allow(mock_call).to receive(:channel_var).with('SYNTHSTATUS').and_return 'SUCCESS' }
it "should attempt to render the document via MRCP and then send a complete event" do
expect_answered
expect_mrcpsynth first_doc
expect_playback 'tt-monkeys'
expect_mrcpsynth second_doc
expect_playback 'tt-weasels'
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
end
context 'with multiple documents' do
let :second_ssml_doc do
RubySpeech::SSML.draw do
audio :src => 'two.wav' do
string "Bazzz"
end
end
end
let :third_ssml_doc do
RubySpeech::SSML.draw do
audio :src => 'three.wav' do
string "Barrrr"
end
end
end
let(:command_opts) { { render_documents: [{value: ssml_doc}, {value: second_ssml_doc}, {value: third_ssml_doc}] } }
it "should render each document in turn using a Playback per document" do
expect_answered
expect_playback
expect_playback 'two'
expect_playback 'three'
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
it 'should not execute further output after a stop command' do
expect_answered
expect(mock_call).to receive(:execute_agi_command).once.ordered do
sleep 0.2
end
latch = CountDownLatch.new 1
expect(original_command).to receive(:add_event).once { |e|
expect(e.reason).to be_a Punchblock::Component::Output::Complete::Finish
latch.countdown!
}
Celluloid::Future.new { subject.execute }
sleep 0.1
expect(mock_call).to receive(:redirect_back).ordered
stop_command = Punchblock::Component::Stop.new
stop_command.request!
subject.execute_command stop_command
expect(latch.wait(2)).to be_truthy
end
context "when the PLAYBACKSTATUS variable is set to 'FAILED'" do
let(:synthstatus) { 'SUCCESS' }
before { allow(mock_call).to receive(:channel_var).with('PLAYBACKSTATUS').and_return 'SUCCESS', 'FAILED', 'SUCCESS' }
before { allow(mock_call).to receive(:channel_var).with('SYNTHSTATUS').and_return synthstatus }
let :fallback_doc do
RubySpeech::SSML.draw do
string "Bazzz"
end
end
it "should attempt to render the document via MRCP and then send a complete event" do
expect_answered
expect_playback
expect_playback 'two'
expect_mrcpsynth fallback_doc
expect_playback 'three'
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context "and the SYNTHSTATUS variable is set to 'ERROR'" do
let(:synthstatus) { 'ERROR' }
it "should terminate playback and send an error complete event" do
expect_answered
expect_playback
expect_playback 'two'
expect_mrcpsynth fallback_doc
subject.execute
complete_reason = original_command.complete_event(0.1).reason
expect(complete_reason).to be_a Punchblock::Event::Complete::Error
expect(complete_reason.details).to eq("Terminated due to UniMRCP error")
end
end
end
end
end
describe 'start-offset' do
context 'unset' do
let(:command_opts) { { :start_offset => nil } }
it 'should not pass any options to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'set' do
let(:command_opts) { { :start_offset => 10 } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A start_offset value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'start-paused' do
context 'false' do
let(:command_opts) { { :start_paused => false } }
it 'should not pass any options to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'true' do
let(:command_opts) { { :start_paused => true } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A start_paused value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'repeat-interval' do
context 'unset' do
let(:command_opts) { { :repeat_interval => nil } }
it 'should not pass any options to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'set' do
let(:command_opts) { { :repeat_interval => 10 } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A repeat_interval value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'repeat-times' do
context 'unset' do
let(:command_opts) { { :repeat_times => nil } }
it 'should not pass any options to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'set' do
let(:command_opts) { { :repeat_times => 2 } }
it "should render the specified number of times" do
expect_answered
2.times { expect_playback }
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context 'to 0' do
let(:command_opts) { { :repeat_times => 0 } }
it "should render 10,000 the specified number of times" do
expect_answered
1000.times { expect_playback }
subject.execute
expect(original_command.complete_event(0.1).reason).to be_a Punchblock::Component::Output::Complete::Finish
end
end
it 'should not execute further output after a stop command' do
expect_answered
expect(mock_call).to receive(:execute_agi_command).once.ordered do
sleep 0.2
end
latch = CountDownLatch.new 1
expect(original_command).to receive(:add_event).once { |e|
expect(e.reason).to be_a Punchblock::Component::Output::Complete::Finish
latch.countdown!
}
Celluloid::Future.new { subject.execute }
sleep 0.1
expect(mock_call).to receive(:redirect_back).ordered
stop_command = Punchblock::Component::Stop.new
stop_command.request!
subject.execute_command stop_command
expect(latch.wait(2)).to be_truthy
end
end
end
describe 'max-time' do
context 'unset' do
let(:command_opts) { { :max_time => nil } }
it 'should not pass any options to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'set' do
let(:command_opts) { { :max_time => 30 } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A max_time value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'voice' do
context 'unset' do
let(:command_opts) { { :voice => nil } }
it 'should not pass the v option to Playback' do
expect_answered
expect_playback
subject.execute
end
end
context 'set' do
let(:command_opts) { { :voice => 'alison' } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'A voice value is unsupported on Asterisk.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
describe 'interrupt_on' do
def ami_event_for_dtmf(digit, position)
if ast13mode
RubyAMI::Event.new 'DTMF' + (position == :start ? 'Begin' : '') + (position == :end ? 'End' : ''),
'Digit' => digit.to_s
else
RubyAMI::Event.new 'DTMF',
'Digit' => digit.to_s,
'Start' => position == :start ? 'Yes' : 'No',
'End' => position == :end ? 'Yes' : 'No'
end
end
def send_ami_events_for_dtmf(digit)
mock_call.process_ami_event ami_event_for_dtmf(digit, :start)
mock_call.process_ami_event ami_event_for_dtmf(digit, :end)
end
let(:reason) { original_command.complete_event(5).reason }
let(:channel) { "SIP/1234-00000000" }
let :ami_event do
RubyAMI::Event.new 'AsyncAGI',
'SubEvent' => "Start",
'Channel' => channel,
'Env' => "agi_request%3A%20async%0Aagi_channel%3A%20SIP%2F1234-00000000%0Aagi_language%3A%20en%0Aagi_type%3A%20SIP%0Aagi_uniqueid%3A%201320835995.0%0Aagi_version%3A%201.8.4.1%0Aagi_callerid%3A%205678%0Aagi_calleridname%3A%20Jane%20Smith%0Aagi_callingpres%3A%200%0Aagi_callingani2%3A%200%0Aagi_callington%3A%200%0Aagi_callingtns%3A%200%0Aagi_dnid%3A%201000%0Aagi_rdnis%3A%20unknown%0Aagi_context%3A%20default%0Aagi_extension%3A%201000%0Aagi_priority%3A%201%0Aagi_enhanced%3A%200.0%0Aagi_accountcode%3A%20%0Aagi_threadid%3A%204366221312%0A%0A"
end
context "set to nil" do
let(:command_opts) { { :interrupt_on => nil } }
it "does not redirect the call" do
expect_answered
expect_playback
expect(mock_call).to receive(:redirect_back).never
subject.execute
expect(original_command.response(0.1)).to be_a Ref
send_ami_events_for_dtmf 1
end
end
context "set to :any" do
let(:command_opts) { { :interrupt_on => :any } }
before do
expect_answered
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Playback', audio_filename)
expect(subject).to receive(:send_finish).and_return nil
end
context "when a DTMF digit is received" do
it "sends the correct complete event" do
expect(mock_call).to receive :redirect_back
subject.execute
expect(original_command.response(0.1)).to be_a Ref
expect(original_command).not_to be_complete
send_ami_events_for_dtmf 1
mock_call.process_ami_event ami_event
sleep 0.2
expect(original_command).to be_complete
expect(reason).to be_a Punchblock::Component::Output::Complete::Finish
end
it "redirects the call back to async AGI" do
expect(mock_call).to receive(:redirect_back).once
subject.execute
expect(original_command.response(0.1)).to be_a Ref
send_ami_events_for_dtmf 1
end
end
end
context "set to :dtmf" do
let(:command_opts) { { :interrupt_on => :dtmf } }
before do
expect_answered
expect(mock_call).to receive(:execute_agi_command).once.with('EXEC Playback', audio_filename)
expect(subject).to receive(:send_finish).and_return nil
end
def send_correct_complete_event
expect(mock_call).to receive :redirect_back
subject.execute
expect(original_command.response(0.1)).to be_a Ref
expect(original_command).not_to be_complete
send_ami_events_for_dtmf 1
mock_call.process_ami_event ami_event
sleep 0.2
expect(original_command).to be_complete
expect(reason).to be_a Punchblock::Component::Output::Complete::Finish
end
context "when a DTMF digit is received" do
it "sends the correct complete event" do
send_correct_complete_event
end
it "redirects the call back to async AGI" do
expect(mock_call).to receive(:redirect_back).once
subject.execute
expect(original_command.response(0.1)).to be_a Ref
send_ami_events_for_dtmf 1
end
context 'with an Asterisk 13 DTMFEnd event' do
let(:ast13mode) { true }
it "sends the correct complete event" do
send_correct_complete_event
end
end
end
end
context "set to :voice" do
let(:command_opts) { { :interrupt_on => :voice } }
it "should return an error and not execute any actions" do
subject.execute
error = ProtocolError.new.setup 'option error', 'An interrupt-on value of speech is unsupported.'
expect(original_command.response(0.1)).to eq(error)
end
end
end
end
end
describe "#execute_command" do
context "with a command it does not understand" do
let(:command) { Punchblock::Component::Output::Pause.new }
before { command.request! }
it "returns a ProtocolError response" do
subject.execute_command command
expect(command.response(0.1)).to be_a ProtocolError
end
end
context "with a Stop command" do
let(:command) { Punchblock::Component::Stop.new }
let(:reason) { original_command.complete_event(5).reason }
let(:channel) { "SIP/1234-00000000" }
let :ami_event do
RubyAMI::Event.new 'AsyncAGI',
'SubEvent' => "Start",
'Channel' => channel,
'Env' => "agi_request%3A%20async%0Aagi_channel%3A%20SIP%2F1234-00000000%0Aagi_language%3A%20en%0Aagi_type%3A%20SIP%0Aagi_uniqueid%3A%201320835995.0%0Aagi_version%3A%201.8.4.1%0Aagi_callerid%3A%205678%0Aagi_calleridname%3A%20Jane%20Smith%0Aagi_callingpres%3A%200%0Aagi_callingani2%3A%200%0Aagi_callington%3A%200%0Aagi_callingtns%3A%200%0Aagi_dnid%3A%201000%0Aagi_rdnis%3A%20unknown%0Aagi_context%3A%20default%0Aagi_extension%3A%201000%0Aagi_priority%3A%201%0Aagi_enhanced%3A%200.0%0Aagi_accountcode%3A%20%0Aagi_threadid%3A%204366221312%0A%0A"
end
before do
command.request!
original_command.request!
original_command.execute!
end
it "sets the command response to true" do
expect(mock_call).to receive(:redirect_back)
subject.execute_command command
expect(command.response(0.1)).to eq(true)
end
it "sends the correct complete event" do
expect(mock_call).to receive(:redirect_back)
subject.execute_command command
expect(original_command).not_to be_complete
mock_call.process_ami_event ami_event
expect(reason).to be_a Punchblock::Event::Complete::Stop
expect(original_command).to be_complete
end
it "redirects the call by unjoining it" do
expect(mock_call).to receive(:redirect_back)
subject.execute_command command
end
end
end
end
end
end
end
end
| 45.156132 | 569 | 0.49208 |
4ad73473609ba60669abd1736ccb9e9ad1b8a641
| 6,722 |
# coding: utf-8
lib = File.expand_path('../fastlane/lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'fastlane/version'
# Copy over the latest .rubocop.yml style guide
require 'yaml'
rubocop_config = File.expand_path('../.rubocop.yml', __FILE__)
config = YAML.safe_load(open(rubocop_config))
config.delete("require")
File.write("#{lib}/fastlane/plugins/template/.rubocop.yml", YAML.dump(config))
Gem::Specification.new do |spec|
spec.name = "fastlane"
spec.version = Fastlane::VERSION
# list of authors is regenerated and resorted on each release
spec.authors = ["Danielle Tomlinson",
"Josh Holtz",
"Joshua Liebowitz",
"Olivier Halligon",
"Iulian Onofrei",
"Jan Piotrowski",
"Luka Mirosevic",
"Matthew Ellis",
"Andrew McBurney",
"Kohki Miki",
"Maksym Grebenets",
"Stefan Natchev",
"Felix Krause",
"Manu Wallner",
"Helmut Januschka",
"Fumiya Nakamura",
"Jimmy Dee",
"Jérôme Lacoste",
"Aaron Brager",
"Jorge Revuelta H"]
spec.email = ["[email protected]"]
spec.summary = Fastlane::DESCRIPTION
spec.description = Fastlane::DESCRIPTION
spec.homepage = "https://fastlane.tools"
spec.license = "MIT"
spec.metadata = {
"docs_url" => "https://docs.fastlane.tools"
}
spec.required_ruby_version = '>= 2.0.0'
spec.files = Dir.glob("*/lib/**/*", File::FNM_DOTMATCH) + Dir["fastlane/swift/**/*"] + Dir["bin/*"] + Dir["*/README.md"] + %w(README.md LICENSE .yardopts) - Dir["fastlane/lib/fastlane/actions/device_grid/assets/*"] - Dir["fastlane/lib/fastlane/actions/docs/assets/*"]
spec.bindir = "bin"
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
# spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = Dir["*/lib"]
spec.add_dependency('slack-notifier', '>= 2.0.0', '< 3.0.0') # Slack notifications
spec.add_dependency('xcodeproj', '>= 1.8.1', '< 2.0.0') # Modify Xcode projects
spec.add_dependency('xcpretty', '~> 0.3.0') # prettify xcodebuild output
spec.add_dependency('terminal-notifier', '>= 2.0.0', '< 3.0.0') # macOS notifications
spec.add_dependency('terminal-table', '>= 1.4.5', '< 2.0.0') # Actions documentation
spec.add_dependency('plist', '>= 3.1.0', '< 4.0.0') # Needed for set_build_number_repository and get_info_plist_value actions
spec.add_dependency('CFPropertyList', '>= 2.3', '< 4.0.0') # Needed to be able to read binary plist format
spec.add_dependency('addressable', '>= 2.3', '< 3.0.0') # Support for URI templates
spec.add_dependency('multipart-post', '~> 2.0.0') # Needed for uploading builds to appetize
spec.add_dependency('word_wrap', '~> 1.0.0') # to add line breaks for tables with long strings
spec.add_dependency('public_suffix', '~> 2.0.0') # https://github.com/fastlane/fastlane/issues/10162
# TTY dependencies
spec.add_dependency('tty-screen', '>= 0.6.3', '< 1.0.0') # detect the terminal width
spec.add_dependency('tty-spinner', '>= 0.8.0', '< 1.0.0') # loading indicators
spec.add_dependency('babosa', '>= 1.0.2', "< 2.0.0")
spec.add_dependency('colored') # colored terminal output
spec.add_dependency('commander-fastlane', '>= 4.4.6', '< 5.0.0') # CLI parser
spec.add_dependency('excon', '>= 0.45.0', '< 1.0.0') # Great HTTP Client
spec.add_dependency('faraday-cookie_jar', '~> 0.0.6')
spec.add_dependency('fastimage', '>= 2.1.0', '< 3.0.0') # fetch the image sizes from the screenshots
spec.add_dependency('gh_inspector', '>= 1.1.2', '< 2.0.0') # search for issues on GitHub when something goes wrong
spec.add_dependency('highline', '>= 1.7.2', '< 2.0.0') # user inputs (e.g. passwords)
spec.add_dependency('json', '< 3.0.0') # Because sometimes it's just not installed
spec.add_dependency('mini_magick', '~> 4.5.1') # To open, edit and export PSD files
spec.add_dependency('multi_xml', '~> 0.5')
spec.add_dependency('rubyzip', '>= 1.2.2', '< 2.0.0') # fix swift/ipa in gym
spec.add_dependency('security', '= 0.1.3') # macOS Keychain manager, a dead project, no updates expected
spec.add_dependency('xcpretty-travis-formatter', '>= 0.0.3')
spec.add_dependency('dotenv', '>= 2.1.1', '< 3.0.0')
spec.add_dependency('bundler', '>= 1.12.0', '< 3.0.0') # Used for fastlane plugins
spec.add_dependency('faraday', '~> 0.9') # Used for deploygate, hockey and testfairy actions
spec.add_dependency('faraday_middleware', '~> 0.9') # same as faraday
spec.add_dependency('simctl', '~> 1.6.3') # Used for querying and interacting with iOS simulators
# The Google API Client gem is *not* API stable between minor versions - hence the specific version locking here.
# If you upgrade this gem, make sure to upgrade the users of it as well.
spec.add_dependency('google-api-client', '>= 0.21.2', '< 0.24.0') # Google API Client to access Play Publishing API
spec.add_dependency('google-cloud-storage', '>= 1.15.0', '< 2.0.0') # Access Google Cloud Storage for match
spec.add_dependency('emoji_regex', '>= 0.1', '< 2.0') # Used to scan for Emoji in the changelog
# Development only
spec.add_development_dependency('rake', '< 12')
spec.add_development_dependency('rspec', '~> 3.5.0')
spec.add_development_dependency('rspec_junit_formatter', '~> 0.2.3')
spec.add_development_dependency('pry')
spec.add_development_dependency('pry-byebug')
spec.add_development_dependency('pry-rescue')
spec.add_development_dependency('pry-stack_explorer')
spec.add_development_dependency('yard', '~> 0.9.11')
spec.add_development_dependency('webmock', '~> 2.3.2')
spec.add_development_dependency('coveralls', '~> 0.8.13')
spec.add_development_dependency('rubocop', Fastlane::RUBOCOP_REQUIREMENT)
spec.add_development_dependency('rubocop-require_tools', '>= 0.1.2')
spec.add_development_dependency('rb-readline') # https://github.com/deivid-rodriguez/byebug/issues/289#issuecomment-251383465
spec.add_development_dependency('rest-client', '>= 1.8.0')
spec.add_development_dependency('fakefs', '~> 0.8.1')
spec.add_development_dependency('sinatra', '~> 1.4.8')
spec.add_development_dependency('xcov', '~> 1.4.1') # Used for xcov's parameters generation: https://github.com/fastlane/fastlane/pull/12416
spec.add_development_dependency('climate_control', '~> 0.2.0')
end
| 55.553719 | 269 | 0.646087 |
d54e56939f5340a0264bfc29ac0004b227742397
| 239 |
class Person < ActiveRecord::Base
validates_email_format_of :email,
:on => :create,
:message => 'fails with custom message',
:allow_nil => true
end
| 26.555556 | 69 | 0.460251 |
f7a90fa25c3f51fd79aacfddc46248dbd43d4f83
| 1,190 |
# frozen_string_literal: true
module ComfortableMexicanSofa::Seeds::Layout
class Exporter < ComfortableMexicanSofa::Seeds::Exporter
def initialize(from, to = from)
super
self.path = ::File.join(ComfortableMexicanSofa.config.seeds_path, to, "layouts/")
end
def export!
prepare_folder!(path)
site.layouts.each do |layout|
layout_path = File.join(path, layout.ancestors.reverse.collect(&:identifier), layout.identifier)
FileUtils.mkdir_p(layout_path)
path = ::File.join(layout_path, "content.html")
data = []
attrs = {
"label" => layout.label,
"app_layout" => layout.app_layout,
"position" => layout.position
}.to_yaml
data << { header: "attributes", content: attrs }
data << { header: "content", content: layout.content }
data << { header: "js", content: layout.js }
data << { header: "css", content: layout.css }
write_file_content(path, data)
message = "[CMS SEEDS] Exported Layout \t #{layout.identifier}"
ComfortableMexicanSofa.logger.info(message)
end
end
end
end
| 29.02439 | 104 | 0.607563 |
0399d967ac90277f560f5e0558651b28e0afc99b
| 145 |
class Department < ApplicationRecord
has_many :personnels, class_name: 'Personnel', primary_key: 'id', foreign_key: 'workin_department_id'
end
| 36.25 | 103 | 0.8 |
1a14fa1c4a4960149ddc755a592d87f02446d63b
| 4,821 |
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'concerns/omniauth_login'
module Redmine::MenuManager::TopMenuHelper
include Redmine::MenuManager::TopMenu::HelpMenu
include Redmine::MenuManager::TopMenu::ProjectsMenu
def render_top_menu_left
content_tag :ul, id: 'account-nav-left', class: 'menu_root account-nav' do
[render_main_top_menu_nodes,
render_projects_top_menu_node].join.html_safe
end
end
def render_top_menu_right
content_tag :ul, id: 'account-nav-right', class: 'menu_root account-nav' do
[render_module_top_menu_node,
render_help_top_menu_node,
render_user_top_menu_node].join.html_safe
end
end
private
def render_user_top_menu_node(items = first_level_menu_items_for(:account_menu))
if User.current.logged?
render_user_drop_down items
elsif omniauth_direct_login?
render_direct_login
else
render_login_drop_down
end
end
def render_login_drop_down
url = { controller: '/account', action: 'login' }
link = link_to url,
class: 'login',
title: l(:label_login) do
concat(t(:label_login))
concat('<i class="button--dropdown-indicator"></i>'.html_safe)
end
render_menu_dropdown(link, menu_item_class: 'drop-down last-child') do
render_login_partial
end
end
def render_direct_login
login = Redmine::MenuManager::MenuItem.new :login,
signin_path,
caption: I18n.t(:label_login),
html: { class: 'login' }
render_menu_node login
end
def render_user_drop_down(items)
avatar = avatar(User.current, fallbackIcon: 'icon2 icon-user')
render_menu_dropdown_with_items(
label: avatar.presence || '',
label_options: {
title: User.current.name,
icon: (avatar.present? ? 'overridden-by-avatar' : 'icon-user')
},
items: items,
options: { drop_down_id: 'user-menu', menu_item_class: 'last-child' }
)
end
def render_login_partial
partial =
if OpenProject::Configuration.disable_password_login?
'account/omniauth_login'
else
'account/login'
end
render partial: partial
end
def render_module_top_menu_node(items = more_top_menu_items)
unless items.empty?
render_menu_dropdown_with_items(
label: '',
label_options: { icon: 'icon-menu', title: I18n.t('label_modules') },
items: items,
options: { drop_down_id: 'more-menu', drop_down_class: 'drop-down--modules ' }
)
end
end
def render_main_top_menu_nodes(items = main_top_menu_items)
items.map { |item|
render_menu_node(item)
}.join(' ')
end
# Menu items for the main top menu
def main_top_menu_items
split_top_menu_into_main_or_more_menus[:base]
end
# Menu items for the modules top menu
def more_top_menu_items
split_top_menu_into_main_or_more_menus[:modules]
end
def project_menu_items
split_top_menu_into_main_or_more_menus[:projects]
end
def help_menu_item
split_top_menu_into_main_or_more_menus[:help]
end
# Split the :top_menu into separate :main and :modules items
def split_top_menu_into_main_or_more_menus
@top_menu_split ||= begin
items = Hash.new { |h, k| h[k] = [] }
first_level_menu_items_for(:top_menu) do |item|
if item.name == :help
items[:help] = item
else
context = item.context || :modules
items[context] << item
end
end
items
end
end
end
| 30.13125 | 91 | 0.681186 |
6a80b08ead2c954967a673f3cbd762ca9d8317c0
| 1,327 |
# Copyright [2020] [University of Aizu]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Attack
class Create
attr_reader :vagrant, :ansible
def initialize(args)
@env_dir = args[:env_dir]
@vagrant = nil
@ansible = nil
end
def exec
@vagrant = prepare_vagrant
vagrant.create
@ansible = prepare_ansible
ansible.create
end
private
def prepare_vagrant
@vagrant = Vagrant::Core.new(
os_name: 'ubuntu',
os_version: '18.04',
vagrant_img_box: 'ubuntu/bionic64',
host: '192.168.77.77',
env_dir: @env_dir
)
end
def prepare_ansible
Ansible::Core.new(
env_dir: @env_dir,
os_name: 'ubuntu',
host: '192.168.77.77',
attack_method: 'msf'
)
end
end
end
| 23.696429 | 74 | 0.649586 |
ed701e8ea6fa5a13a2ce6bdef9bc9c66fb37d1de
| 343 |
Time::DATE_FORMATS[:human] = lambda {|date|
seconds = (Time.now - date).round;
days = seconds / (60 * 60 * 24); return "#{date.month}月#{date.day}日" if days > 0;
hours = seconds / (60 * 60); return "#{hours}時間前" if hours > 0;
minutes = seconds / 60; return "#{minutes}分前" if minutes > 0;
return "#{seconds}秒前"
}
| 42.875 | 86 | 0.571429 |
e81d0ba0246cb73112f026a56c045ea156f3e7c4
| 1,020 |
module AnAxe
module AuthenticationStub
#this is a shell that Savage Beast uses to query the current user - override in your app controller
def update_last_seen_at
ActiveSupport::Deprecation.warn('You must setup An Axe in your app by overriding method :update_last_seen_at')
end
def login_required
ActiveSupport::Deprecation.warn('You must setup An Axe in your app by overriding this method :login_required')
end
def authorized?
ActiveSupport::Deprecation.warn('You must setup An Axe in your app by overriding this method :authorized?')
end
def current_user
ActiveSupport::Deprecation.warn('You must setup An Axe in your app by overriding this method :current_user')
end
def logged_in?
ActiveSupport::Deprecation.warn('You must setup An Axe in your app by overriding this method :logged_in?')
end
def admin?
ActiveSupport::Deprecation.warn('You must setup An Axe in your app by overriding this method :admin?')
end
end
end
| 34 | 116 | 0.731373 |
e8be287a2898524c5cee2e1ef7b79eff30293a05
| 40 |
module Maxitest
VERSION = "4.1.0"
end
| 10 | 19 | 0.675 |
edbb9c6d9ac5e3e6610716d74e92db6ad2feaac1
| 6,215 |
# encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_06_01
module Models
#
# Virtual Network Tap resource.
#
class VirtualNetworkTap < Resource
include MsRestAzure
# @return [Array<NetworkInterfaceTapConfiguration>] Specifies the list of
# resource IDs for the network interface IP configuration that needs to
# be tapped.
attr_accessor :network_interface_tap_configurations
# @return [String] The resourceGuid property of the virtual network tap.
attr_accessor :resource_guid
# @return [String] The provisioning state of the virtual network tap.
# Possible values are: 'Updating', 'Deleting', and 'Failed'.
attr_accessor :provisioning_state
# @return [NetworkInterfaceIPConfiguration] The reference to the private
# IP Address of the collector nic that will receive the tap.
attr_accessor :destination_network_interface_ipconfiguration
# @return [FrontendIPConfiguration] The reference to the private IP
# address on the internal Load Balancer that will receive the tap.
attr_accessor :destination_load_balancer_front_end_ipconfiguration
# @return [Integer] The VXLAN destination port that will receive the
# tapped traffic.
attr_accessor :destination_port
# @return [String] Gets a unique read-only string that changes whenever
# the resource is updated.
attr_accessor :etag
#
# Mapper for VirtualNetworkTap class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'VirtualNetworkTap',
type: {
name: 'Composite',
class_name: 'VirtualNetworkTap',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
location: {
client_side_validation: true,
required: false,
serialized_name: 'location',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
network_interface_tap_configurations: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.networkInterfaceTapConfigurations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'NetworkInterfaceTapConfigurationElementType',
type: {
name: 'Composite',
class_name: 'NetworkInterfaceTapConfiguration'
}
}
}
},
resource_guid: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.resourceGuid',
type: {
name: 'String'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
destination_network_interface_ipconfiguration: {
client_side_validation: true,
required: false,
serialized_name: 'properties.destinationNetworkInterfaceIPConfiguration',
type: {
name: 'Composite',
class_name: 'NetworkInterfaceIPConfiguration'
}
},
destination_load_balancer_front_end_ipconfiguration: {
client_side_validation: true,
required: false,
serialized_name: 'properties.destinationLoadBalancerFrontEndIPConfiguration',
type: {
name: 'Composite',
class_name: 'FrontendIPConfiguration'
}
},
destination_port: {
client_side_validation: true,
required: false,
serialized_name: 'properties.destinationPort',
type: {
name: 'Number'
}
},
etag: {
client_side_validation: true,
required: false,
serialized_name: 'etag',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 33.777174 | 93 | 0.494771 |
bfc8bbcf40f37ee8397268a6499a2c8cdb7ff08a
| 1,509 |
class Sonar < Formula
desc "Manage code quality"
homepage "http://www.sonarqube.org/"
url "https://sonarsource.bintray.com/Distribution/sonarqube/sonarqube-5.3.zip"
sha256 "4e61ee3a01d5e447bcd64357df77e0377325c4a2bb5ceb2cdeb5a5d193443ba5"
depends_on :java => "1.7+"
bottle :unneeded
def install
# Delete native bin directories for other systems
rm_rf Dir["bin/{aix,hpux,solaris,windows}-*"]
rm_rf "bin/macosx-universal-32" unless OS.mac? && !MacOS.prefer_64_bit?
rm_rf "bin/macosx-universal-64" unless OS.mac? && MacOS.prefer_64_bit?
rm_rf "bin/linux-x86-32" unless OS.linux? && !MacOS.prefer_64_bit?
rm_rf "bin/linux-x86-64" unless OS.linux? && MacOS.prefer_64_bit?
# Delete Windows files
rm_f Dir["war/*.bat"]
libexec.install Dir["*"]
bin.install_symlink Dir[libexec/"bin/*/sonar.sh"].first => "sonar"
end
plist_options :manual => "sonar console"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/sonar</string>
<string>start</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_match /SonarQube/, shell_output("#{bin}/sonar status", 1)
end
end
| 29.019231 | 106 | 0.648111 |
7a0f5d7d0cd126252a8d63efbd4b7819acd21b27
| 260 |
class Api::V1::FishSerializer < Api::V1::BaseSerializer
attributes :id, :species, :weight, :length, :user_id, :date, :longitude, :latitude, :time, :image_file_name, :image_content_type, :image_file_size, :image_updated_at, :created_at, :updated_at
end
| 65 | 199 | 0.742308 |
bb4ab7ae88f71c2ad1c8efc53f4a3c90fc0f83ce
| 140 |
class AddUrlHashToPlaceholderImages < ActiveRecord::Migration
def change
add_column :placeholder_images, :url_hash, :string
end
end
| 23.333333 | 61 | 0.8 |
79abdd80355a4d393c7d1c18300fc3f20f6d6ebe
| 661 |
cask "transcribe" do
version "8.80.0"
sha256 "2c32b81ebe15115550c74f6d758facac05acbd20d1da16237e1fe3fc60b04215"
url "https://www.seventhstring.com/xscribe/transcribe.dmg"
appcast "https://www.seventhstring.com/xscribe/history.html",
must_contain: version.major_minor
name "Transcribe!"
desc "Transcribes recorded music"
homepage "https://www.seventhstring.com/xscribe/overview.html"
depends_on macos: ">= :yosemite"
app "Transcribe!.app"
uninstall quit: "com.seventhstring.transcribe"
zap trash: [
"~/Library/Preferences/Transcribe!7 Preferences",
"~/Library/Preferences/com.seventhstring.transcribe.plist",
]
end
| 28.73913 | 75 | 0.747352 |
6215edfecfbb3d69495af51e72376928e19c4761
| 155 |
class RenameUserIdToAccountIdInEligibilities < ActiveRecord::Migration[4.2]
def change
rename_column :eligibilities, :user_id, :account_id
end
end
| 25.833333 | 75 | 0.8 |
184875ac02c1b1f4741330a37c730afc8ddf996f
| 1,386 |
# Import modules
require './Common.rb'
class UpdateBarcode
def self.Run()
# Getting instance of the API
api = GroupDocsSignatureCloud::SignApi.from_config($config)
$info = GroupDocsSignatureCloud::FileInfo.new()
$info.file_path = "signaturedocs\\signedBarcodeOne_page.docx"
# Search
$opts = GroupDocsSignatureCloud::SearchBarcodeOptions.new()
$opts.signature_type = "Barcode"
$opts.all_pages = true
$settings = GroupDocsSignatureCloud::SearchSettings.new()
$settings.options = [$opts]
$settings.file_info = $info
$request = GroupDocsSignatureCloud::SearchSignaturesRequest.new($settings)
$response = api.search_signatures($request)
# Update
$updateOpts = GroupDocsSignatureCloud::UpdateOptions.new()
$updateOpts.signature_type = "Barcode"
$updateOpts.signature_id = $response.signatures[0].signature_id
$updateOpts.left = 200
$updateOpts.top = 200
$updateOpts.width = 300
$updateOpts.height = 100
$updateOpts.is_signature = true
$updateSettings = GroupDocsSignatureCloud::UpdateSettings.new()
$updateSettings.options = [$updateOpts]
$updateSettings.file_info = $info
$request = GroupDocsSignatureCloud::UpdateSignaturesRequest.new($updateSettings)
$response = api.update_signatures($request)
puts("Succeeded count: " + $response.succeeded.length.to_s)
end
end
| 31.5 | 84 | 0.72583 |
7a2c92aaa3fc1bb34c3de911ff8ae77be0fedb92
| 380 |
Sequel.migration do
up do
alter_table(:parts) do
drop_constraint :part_number, :type => :unique
add_unique_constraint [:project_id, :part_number], :name => :project_id_and_part_number
end
end
down do
alter_table(:parts) do
drop_constraint :project_id_and_part_number, :type => :unique
add_unique_constraint :part_number
end
end
end
| 25.333333 | 93 | 0.707895 |
bfd91fcf684181cd5689c3dea5cbb64f054a3a8e
| 220 |
require_relative '../../spec_helper'
ruby_version_is ""..."3.1" do
require_relative 'shared/determinant'
require 'matrix'
describe "Matrix#determinant" do
it_behaves_like :determinant, :determinant
end
end
| 20 | 46 | 0.736364 |
39720659392f0173e501e899348d1f740b8a8af7
| 1,512 |
# frozen_string_literal: true
module Defi
# This class contains an object that returned or raised during the initialize.
#
class Value
# @return [#object_id] The returned or the raised object.
attr_reader :object
# Initialize the value class.
#
# @yieldreturn [#object_id] The challenged code.
# rubocop:disable Lint/RescueException
def initialize
@object = yield
@raised = false
rescue ::Exception => e
@object = e
@raised = true
end
# rubocop:enable Lint/RescueException
# Raise or return the value.
#
# @return [#object_id] Raised exception or returned object.
def call
raise object if raised?
object
end
# @return [Boolean] The value was raised (or returned)?
def raised?
@raised
end
# Properties of the value.
#
# @return [Hash] The properties of the value.
def to_h
{
raised: raised?,
object: object
}
end
# String of the value.
#
# @return [String] The string representation of the value.
def to_s
"#{raise_or_return} #{object}"
end
# A string containing a human-readable representation of the value.
#
# @return [String] The human-readable representation of the value.
def inspect
"Value(object: #{object}, raised: #{raised?})"
end
private
# @return [String] A "raise" or "return" string.
def raise_or_return
raised? ? "raise" : "return"
end
end
end
| 21.913043 | 80 | 0.619048 |
edb6d6aed10b1dddf9a157566bbbb18ed4976f81
| 1,200 |
unless File.exists?(node['bash_it']['dir'])
bash_it_version = version_string_for('bash_it')
git "#{Chef::Config[:file_cache_path]}/bash_it" do
repository node['bash_it']['repository']
revision bash_it_version
destination "#{Chef::Config[:file_cache_path]}/bash_it"
action :sync
end
directory node['bash_it']['dir'] do
owner node['current_user']
mode "0777"
end
execute "Copying bash-it's .git to #{node['bash_it']['dir']}" do
command "rsync -axSH #{Chef::Config[:file_cache_path]}/bash_it/ #{node['bash_it']['dir']}"
user node['current_user']
end
template node['bash_it']['bashrc_path'] do
source "bash_it/bashrc.erb"
cookbook 'sprout-osx-base'
owner node['current_user']
mode "0777"
end
node['bash_it']['enabled_plugins'].each do |feature_type, features|
features.each do |feature_name|
sprout_osx_base_bash_it_enable_feature "#{feature_type}/#{feature_name}"
end
end
node['bash_it']['custom_plugins'].each do |cookbook_name, custom_plugins|
custom_plugins.each do |custom_plugin|
sprout_osx_base_bash_it_custom_plugin custom_plugin do
cookbook cookbook_name
end
end
end
end
| 27.272727 | 94 | 0.691667 |
878b20215934d1faf5f57efcd3562b1e55fbc826
| 126 |
require 'active_support'
require 'mongo_mapper'
require File.join(File.dirname(__FILE__), 'mongo_mapper/plugins/draft/draft')
| 31.5 | 77 | 0.81746 |
bf8183a3cf030cfe81dfd56f37ce9d3fb156b4e2
| 42,899 |
# coding: UTF-8
require 'rubygems/test_case'
require 'stringio'
require 'rubygems/specification'
class TestGemSpecification < Gem::TestCase
LEGACY_YAML_SPEC = <<-EOF
--- !ruby/object:Gem::Specification
rubygems_version: "1.0"
name: keyedlist
version: !ruby/object:Gem::Version
version: 0.4.0
date: 2004-03-28 15:37:49.828000 +02:00
platform:
summary: A Hash which automatically computes keys.
require_paths:
- lib
files:
- lib/keyedlist.rb
autorequire: keyedlist
author: Florian Gross
email: [email protected]
has_rdoc: true
EOF
LEGACY_RUBY_SPEC = <<-EOF
Gem::Specification.new do |s|
s.name = %q{keyedlist}
s.version = %q{0.4.0}
s.has_rdoc = true
s.summary = %q{A Hash which automatically computes keys.}
s.files = [%q{lib/keyedlist.rb}]
s.require_paths = [%q{lib}]
s.autorequire = %q{keyedlist}
s.author = %q{Florian Gross}
s.email = %q{[email protected]}
end
EOF
def setup
super
@a1 = quick_spec 'a', '1' do |s|
s.executable = 'exec'
s.extensions << 'ext/a/extconf.rb'
s.test_file = 'test/suite.rb'
s.requirements << 'A working computer'
s.rubyforge_project = 'example'
s.license = 'MIT'
s.add_dependency 'rake', '> 0.4'
s.add_dependency 'jabber4r', '> 0.0.0'
s.add_dependency 'pqa', ['> 0.4', '<= 0.6']
s.mark_version
s.files = %w[lib/code.rb]
end
@a2 = quick_spec 'a', '2' do |s|
s.files = %w[lib/code.rb]
end
@current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
load 'rubygems/syck_hack.rb'
end
def test_self_attribute_names
expected_value = %w[
authors
autorequire
bindir
cert_chain
date
dependencies
description
email
executables
extensions
extra_rdoc_files
files
homepage
licenses
name
platform
post_install_message
rdoc_options
require_paths
required_ruby_version
required_rubygems_version
requirements
rubyforge_project
rubygems_version
signing_key
specification_version
summary
test_files
version
]
actual_value = Gem::Specification.attribute_names.map { |a| a.to_s }.sort
assert_equal expected_value, actual_value
end
def test_self__load_future
spec = Gem::Specification.new
spec.name = 'a'
spec.version = '1'
spec.specification_version = @current_version + 1
new_spec = Marshal.load Marshal.dump(spec)
assert_equal 'a', new_spec.name
assert_equal Gem::Version.new(1), new_spec.version
assert_equal @current_version, new_spec.specification_version
end
def test_self_from_yaml_syck_date_bug
# This is equivalent to (and totally valid) psych 1.0 output and
# causes parse errors on syck.
yaml = @a1.to_yaml
yaml.sub!(/^date:.*/, "date: 2011-04-26 00:00:00.000000000Z")
new_spec = with_syck do
Gem::Specification.from_yaml yaml
end
assert_kind_of Time, @a1.date
assert_kind_of Time, new_spec.date
end
def test_self_from_yaml_syck_default_key_bug
# This is equivalent to (and totally valid) psych 1.0 output and
# causes parse errors on syck.
yaml = <<-YAML
--- !ruby/object:Gem::Specification
name: posix-spawn
version: !ruby/object:Gem::Version
version: 0.3.6
prerelease:
dependencies:
- !ruby/object:Gem::Dependency
name: rake-compiler
requirement: &70243867725240 !ruby/object:Gem::Requirement
none: false
requirements:
- - =
- !ruby/object:Gem::Version
version: 0.7.6
type: :development
prerelease: false
version_requirements: *70243867725240
platform: ruby
files: []
test_files: []
bindir:
YAML
new_spec = with_syck do
Gem::Specification.from_yaml yaml
end
op = new_spec.dependencies.first.requirement.requirements.first.first
refute_kind_of YAML::Syck::DefaultKey, op
refute_match %r%DefaultKey%, new_spec.to_ruby
end
def test_self_from_yaml_cleans_up_defaultkey
yaml = <<-YAML
--- !ruby/object:Gem::Specification
name: posix-spawn
version: !ruby/object:Gem::Version
version: 0.3.6
prerelease:
dependencies:
- !ruby/object:Gem::Dependency
name: rake-compiler
requirement: &70243867725240 !ruby/object:Gem::Requirement
none: false
requirements:
- - !ruby/object:YAML::Syck::DefaultKey {}
- !ruby/object:Gem::Version
version: 0.7.6
type: :development
prerelease: false
version_requirements: *70243867725240
platform: ruby
files: []
test_files: []
bindir:
YAML
new_spec = Gem::Specification.from_yaml yaml
op = new_spec.dependencies.first.requirement.requirements.first.first
refute_kind_of YAML::Syck::DefaultKey, op
refute_match %r%DefaultKey%, new_spec.to_ruby
end
def test_self_from_yaml_cleans_up_defaultkey_from_newer_192
yaml = <<-YAML
--- !ruby/object:Gem::Specification
name: posix-spawn
version: !ruby/object:Gem::Version
version: 0.3.6
prerelease:
dependencies:
- !ruby/object:Gem::Dependency
name: rake-compiler
requirement: &70243867725240 !ruby/object:Gem::Requirement
none: false
requirements:
- - !ruby/object:Syck::DefaultKey {}
- !ruby/object:Gem::Version
version: 0.7.6
type: :development
prerelease: false
version_requirements: *70243867725240
platform: ruby
files: []
test_files: []
bindir:
YAML
new_spec = Gem::Specification.from_yaml yaml
op = new_spec.dependencies.first.requirement.requirements.first.first
refute_kind_of YAML::Syck::DefaultKey, op
refute_match %r%DefaultKey%, new_spec.to_ruby
end
def test_self_from_yaml_cleans_up_Date_objects
yaml = <<-YAML
--- !ruby/object:Gem::Specification
rubygems_version: 0.8.1
specification_version: 1
name: diff-lcs
version: !ruby/object:Gem::Version
version: 1.1.2
date: 2004-10-20
summary: Provides a list of changes that represent the difference between two sequenced collections.
require_paths:
- lib
author: Austin Ziegler
email: [email protected]
homepage: http://rubyforge.org/projects/ruwiki/
rubyforge_project: ruwiki
description: "Test"
bindir: bin
has_rdoc: true
required_ruby_version: !ruby/object:Gem::Version::Requirement
requirements:
-
- ">="
- !ruby/object:Gem::Version
version: 1.8.1
version:
platform: ruby
files:
- tests/00test.rb
rdoc_options:
- "--title"
- "Diff::LCS -- A Diff Algorithm"
- "--main"
- README
- "--line-numbers"
extra_rdoc_files:
- README
- ChangeLog
- Install
executables:
- ldiff
- htmldiff
extensions: []
requirements: []
dependencies: []
YAML
new_spec = Gem::Specification.from_yaml yaml
assert_kind_of Time, new_spec.date
end
def test_self_load
full_path = @a2.spec_file
write_file full_path do |io|
io.write @a2.to_ruby_for_cache
end
spec = Gem::Specification.load full_path
@a2.files.clear
assert_equal @a2, spec
end
def test_self_load_escape_curly
@a2.name = 'a};raise "improper escaping";%q{'
full_path = @a2.spec_file
write_file full_path do |io|
io.write @a2.to_ruby_for_cache
end
spec = Gem::Specification.load full_path
@a2.files.clear
assert_equal @a2, spec
end
def test_self_load_escape_interpolation
@a2.name = 'a#{raise %<improper escaping>}'
full_path = @a2.spec_file
write_file full_path do |io|
io.write @a2.to_ruby_for_cache
end
spec = Gem::Specification.load full_path
@a2.files.clear
assert_equal @a2, spec
end
def test_self_load_escape_quote
@a2.name = 'a";raise "improper escaping";"'
full_path = @a2.spec_file
write_file full_path do |io|
io.write @a2.to_ruby_for_cache
end
spec = Gem::Specification.load full_path
@a2.files.clear
assert_equal @a2, spec
end
if defined?(Encoding)
def test_self_load_utf8_with_ascii_encoding
int_enc = Encoding.default_internal
silence_warnings { Encoding.default_internal = 'US-ASCII' }
spec2 = @a2.dup
bin = "\u5678"
spec2.authors = [bin]
full_path = spec2.spec_file
write_file full_path do |io|
io.write spec2.to_ruby_for_cache.force_encoding('BINARY').sub("\\u{5678}", bin.force_encoding('BINARY'))
end
spec = Gem::Specification.load full_path
spec2.files.clear
assert_equal spec2, spec
ensure
silence_warnings { Encoding.default_internal = int_enc }
end
end
def test_self_load_legacy_ruby
spec = Gem::Deprecate.skip_during do
eval LEGACY_RUBY_SPEC
end
assert_equal 'keyedlist', spec.name
assert_equal '0.4.0', spec.version.to_s
assert_equal Gem::Specification::TODAY, spec.date
assert spec.required_ruby_version.satisfied_by?(Gem::Version.new('1'))
assert_equal false, spec.has_unit_tests?
end
def test_self_normalize_yaml_input_with_183_yaml
input = "!ruby/object:Gem::Specification "
assert_equal "--- #{input}", Gem::Specification.normalize_yaml_input(input)
end
def test_self_normalize_yaml_input_with_non_183_yaml
input = "--- !ruby/object:Gem::Specification "
assert_equal input, Gem::Specification.normalize_yaml_input(input)
end
def test_self_normalize_yaml_input_with_183_io
input = "!ruby/object:Gem::Specification "
assert_equal "--- #{input}",
Gem::Specification.normalize_yaml_input(StringIO.new(input))
end
def test_self_normalize_yaml_input_with_non_183_io
input = "--- !ruby/object:Gem::Specification "
assert_equal input,
Gem::Specification.normalize_yaml_input(StringIO.new(input))
end
def test_self_normalize_yaml_input_with_192_yaml
input = "--- !ruby/object:Gem::Specification \nblah: !!null \n"
expected = "--- !ruby/object:Gem::Specification \nblah: \n"
assert_equal expected, Gem::Specification.normalize_yaml_input(input)
end
DATA_PATH = File.expand_path "../data", __FILE__
def test_handles_private_null_type
path = File.join DATA_PATH, "null-type.gemspec.rz"
data = Marshal.load Gem.inflate(Gem.read_binary(path))
assert_equal nil, data.rubyforge_project
end
def test_emits_zulu_timestamps_properly
skip "bug only on 1.9.2" unless RUBY_VERSION =~ /1\.9\.2/
t = Time.utc(2012, 3, 12)
@a2.date = t
yaml = with_psych { @a2.to_yaml }
assert_match %r!date: 2012-03-12 00:00:00\.000000000 Z!, yaml
end
def test_initialize
spec = Gem::Specification.new do |s|
s.name = "blah"
s.version = "1.3.5"
end
assert_equal "blah", spec.name
assert_equal "1.3.5", spec.version.to_s
assert_equal Gem::Platform::RUBY, spec.platform
assert_equal nil, spec.summary
assert_equal [], spec.files
assert_equal [], spec.test_files
assert_equal [], spec.rdoc_options
assert_equal [], spec.extra_rdoc_files
assert_equal [], spec.executables
assert_equal [], spec.extensions
assert_equal [], spec.requirements
assert_equal [], spec.dependencies
assert_equal 'bin', spec.bindir
assert_equal '>= 0', spec.required_ruby_version.to_s
assert_equal '>= 0', spec.required_rubygems_version.to_s
end
def test_initialize_future
version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + 1
spec = Gem::Specification.new do |s|
s.name = "blah"
s.version = "1.3.5"
s.specification_version = version
s.new_unknown_attribute = "a value"
end
assert_equal "blah", spec.name
assert_equal "1.3.5", spec.version.to_s
end
def test_initialize_copy
spec = Gem::Specification.new do |s|
s.name = "blah"
s.version = "1.3.5"
s.summary = 'summary'
s.description = 'description'
s.authors = 'author a', 'author b'
s.licenses = 'BSD'
s.files = 'lib/file.rb'
s.test_files = 'test/file.rb'
s.rdoc_options = '--foo'
s.extra_rdoc_files = 'README.txt'
s.executables = 'exec'
s.extensions = 'ext/extconf.rb'
s.requirements = 'requirement'
s.add_dependency 'some_gem'
end
new_spec = spec.dup
assert_equal "blah", spec.name
assert_same spec.name, new_spec.name
assert_equal "1.3.5", spec.version.to_s
assert_same spec.version, new_spec.version
assert_equal Gem::Platform::RUBY, spec.platform
assert_same spec.platform, new_spec.platform
assert_equal 'summary', spec.summary
assert_same spec.summary, new_spec.summary
assert_equal %w[lib/file.rb test/file.rb bin/exec README.txt
ext/extconf.rb],
spec.files
refute_same spec.files, new_spec.files, 'files'
assert_equal %w[test/file.rb], spec.test_files
refute_same spec.test_files, new_spec.test_files, 'test_files'
assert_equal %w[--foo], spec.rdoc_options
refute_same spec.rdoc_options, new_spec.rdoc_options, 'rdoc_options'
assert_equal %w[README.txt], spec.extra_rdoc_files
refute_same spec.extra_rdoc_files, new_spec.extra_rdoc_files,
'extra_rdoc_files'
assert_equal %w[exec], spec.executables
refute_same spec.executables, new_spec.executables, 'executables'
assert_equal %w[ext/extconf.rb], spec.extensions
refute_same spec.extensions, new_spec.extensions, 'extensions'
assert_equal %w[requirement], spec.requirements
refute_same spec.requirements, new_spec.requirements, 'requirements'
assert_equal [Gem::Dependency.new('some_gem', Gem::Requirement.default)],
spec.dependencies
refute_same spec.dependencies, new_spec.dependencies, 'dependencies'
assert_equal 'bin', spec.bindir
assert_same spec.bindir, new_spec.bindir
assert_equal '>= 0', spec.required_ruby_version.to_s
assert_same spec.required_ruby_version, new_spec.required_ruby_version
assert_equal '>= 0', spec.required_rubygems_version.to_s
assert_same spec.required_rubygems_version,
new_spec.required_rubygems_version
end
def test_initialize_copy_broken
spec = Gem::Specification.new do |s|
s.name = 'a'
s.version = '1'
end
spec.instance_variable_set :@licenses, :blah
spec.loaded_from = '/path/to/file'
e = assert_raises Gem::FormatException do
spec.dup
end
assert_equal 'a-1 has an invalid value for @licenses', e.message
assert_equal '/path/to/file', e.file_path
end
def test__dump
@a2.platform = Gem::Platform.local
@a2.instance_variable_set :@original_platform, 'old_platform'
data = Marshal.dump @a2
same_spec = Marshal.load data
assert_equal 'old_platform', same_spec.original_platform
end
def test_activate
@a2.activate
assert @a2.activated?
Gem::Deprecate.skip_during do
assert @a2.loaded?
end
end
def test_add_dependency_with_explicit_type
gem = quick_spec "awesome", "1.0" do |awesome|
awesome.add_development_dependency "monkey"
end
monkey = gem.dependencies.detect { |d| d.name == "monkey" }
assert_equal(:development, monkey.type)
end
def test_author
assert_equal 'A User', @a1.author
end
def test_authors
assert_equal ['A User'], @a1.authors
end
def test_bindir_equals
@a1.bindir = 'apps'
assert_equal 'apps', @a1.bindir
end
def test_bindir_equals_nil
@a2.bindir = nil
@a2.executable = 'app'
assert_equal nil, @a2.bindir
assert_equal %w[lib/code.rb app], @a2.files
end
def test_date
assert_equal Gem::Specification::TODAY, @a1.date
end
def test_date_equals_date
@a1.date = Date.new(2003, 9, 17)
assert_equal Time.utc(2003, 9, 17, 0,0,0), @a1.date
end
def test_date_equals_string
@a1.date = '2003-09-17'
assert_equal Time.utc(2003, 9, 17, 0,0,0), @a1.date
end
def test_date_equals_string_bad
assert_raises Gem::InvalidSpecificationException do
@a1.date = '9/11/2003'
end
end
def test_date_equals_time
@a1.date = Time.local(2003, 9, 17, 0,0,0)
assert_equal Time.utc(2003, 9, 17, 0,0,0), @a1.date
end
def test_date_equals_time_local
@a1.date = Time.local(2003, 9, 17, 19,50,0) # may not pass in utc >= +4
assert_equal Time.utc(2003, 9, 17, 0,0,0), @a1.date
end
def test_date_equals_time_utc
@a1.date = Time.utc(2003, 9, 17, 19,50,0)
assert_equal Time.utc(2003, 9, 17, 0,0,0), @a1.date
end
def test_dependencies
util_setup_deps
assert_equal [@bonobo, @monkey], @gem.dependencies
end
def test_runtime_dependencies
util_setup_deps
assert_equal [@bonobo], @gem.runtime_dependencies
end
def test_development_dependencies
util_setup_deps
assert_equal [@monkey], @gem.development_dependencies
end
def test_description
assert_equal 'This is a test description', @a1.description
end
def test_eql_eh
g1 = new_spec 'gem', 1
g2 = new_spec 'gem', 1
assert_equal g1, g2
assert_equal g1.hash, g2.hash
assert_equal true, g1.eql?(g2)
end
def test_eql_eh_extensions
spec = @a1.dup
spec.extensions = 'xx'
refute_operator @a1, :eql?, spec
refute_operator spec, :eql?, @a1
end
def test_executables
@a1.executable = 'app'
assert_equal %w[app], @a1.executables
end
def test_executable_equals
@a2.executable = 'app'
assert_equal 'app', @a2.executable
assert_equal %w[lib/code.rb bin/app], @a2.files
end
def test_extensions
assert_equal ['ext/a/extconf.rb'], @a1.extensions
end
def test_files
@a1.files = %w(files bin/common)
@a1.test_files = %w(test_files bin/common)
@a1.executables = %w(executables common)
@a1.extra_rdoc_files = %w(extra_rdoc_files bin/common)
@a1.extensions = %w(extensions bin/common)
expected = %w[
bin/common
bin/executables
extensions
extra_rdoc_files
files
test_files
]
assert_equal expected, @a1.files.sort
end
def test_files_append
@a1.files = %w(files bin/common)
@a1.test_files = %w(test_files bin/common)
@a1.executables = %w(executables common)
@a1.extra_rdoc_files = %w(extra_rdoc_files bin/common)
@a1.extensions = %w(extensions bin/common)
expected = %w[
bin/common
bin/executables
extensions
extra_rdoc_files
files
test_files
]
assert_equal expected, @a1.files.sort
@a1.files << "generated_file.c"
expected << "generated_file.c"
expected.sort!
assert_equal expected, @a1.files.sort
end
def test_files_duplicate
@a2.files = %w[a b c d b]
@a2.extra_rdoc_files = %w[x y z x]
@a2.normalize
assert_equal %w[a b c d x y z], @a2.files
assert_equal %w[x y z], @a2.extra_rdoc_files
end
def test_files_extra_rdoc_files
@a2.files = %w[a b c d]
@a2.extra_rdoc_files = %w[x y z]
@a2.normalize
assert_equal %w[a b c d x y z], @a2.files
end
def test_files_non_array
@a1.files = "F"
@a1.test_files = "TF"
@a1.executables = "X"
@a1.extra_rdoc_files = "ERF"
@a1.extensions = "E"
assert_equal %w[E ERF F TF bin/X], @a1.files.sort
end
def test_files_non_array_pathological
@a1.instance_variable_set :@files, "F"
@a1.instance_variable_set :@test_files, "TF"
@a1.instance_variable_set :@extra_rdoc_files, "ERF"
@a1.instance_variable_set :@extensions, "E"
@a1.instance_variable_set :@executables, "X"
assert_equal %w[E ERF F TF bin/X], @a1.files.sort
assert_kind_of Integer, @a1.hash
end
def test_for_cache
@a2.add_runtime_dependency 'b', '1'
@a2.dependencies.first.instance_variable_set :@type, nil
@a2.required_rubygems_version = Gem::Requirement.new '> 0'
@a2.test_files = %w[test/test_b.rb]
refute_empty @a2.files
refute_empty @a2.test_files
spec = @a2.for_cache
assert_empty spec.files
assert_empty spec.test_files
refute_empty @a2.files
refute_empty @a2.test_files
end
def test_full_gem_path
assert_equal File.join(@gemhome, 'gems', @a1.full_name), @a1.full_gem_path
@a1.original_platform = 'mswin32'
assert_equal File.join(@gemhome, 'gems', @a1.original_name),
@a1.full_gem_path
end
def test_full_gem_path_double_slash
gemhome = @gemhome.to_s.sub(/\w\//, '\&/')
@a1.loaded_from = File.join gemhome, "specifications", @a1.spec_name
expected = File.join @gemhome, "gems", @a1.full_name
assert_equal expected, @a1.full_gem_path
end
def test_full_name
assert_equal 'a-1', @a1.full_name
@a1.platform = Gem::Platform.new ['universal', 'darwin', nil]
assert_equal 'a-1-universal-darwin', @a1.full_name
@a1.instance_variable_set :@new_platform, 'mswin32'
assert_equal 'a-1-mswin32', @a1.full_name, 'legacy'
return if win_platform?
@a1.platform = 'current'
assert_equal 'a-1-x86-darwin-8', @a1.full_name
end
def test_full_name_windows
test_cases = {
'i386-mswin32' => 'a-1-x86-mswin32-60',
'i386-mswin32_80' => 'a-1-x86-mswin32-80',
'i386-mingw32' => 'a-1-x86-mingw32'
}
test_cases.each do |arch, expected|
util_set_arch arch
@a1.platform = 'current'
assert_equal expected, @a1.full_name
end
end
def test_hash
assert_equal @a1.hash, @a1.hash
assert_equal @a1.hash, @a1.dup.hash
refute_equal @a1.hash, @a2.hash
end
def test_installation_path
Gem::Deprecate.skip_during do
assert_equal @gemhome, @a1.installation_path
@a1.instance_variable_set :@loaded_from, nil
@a1.instance_variable_set :@loaded, false
assert_nil @a1.installation_path
end
end
def test_lib_files
@a1.files = %w[lib/foo.rb Rakefile]
assert_equal %w[lib/foo.rb], @a1.lib_files
end
def test_license
assert_equal 'MIT', @a1.license
end
def test_licenses
assert_equal ['MIT'], @a1.licenses
end
def test_name
assert_equal 'a', @a1.name
end
def test_original_name
assert_equal 'a-1', @a1.full_name
@a1.platform = 'i386-linux'
@a1.instance_variable_set :@original_platform, 'i386-linux'
assert_equal 'a-1-i386-linux', @a1.original_name
end
def test_platform
assert_equal Gem::Platform::RUBY, @a1.platform
end
def test_platform_equals
@a1.platform = nil
assert_equal Gem::Platform::RUBY, @a1.platform
@a1.platform = Gem::Platform::RUBY
assert_equal Gem::Platform::RUBY, @a1.platform
test_cases = {
'i386-mswin32' => ['x86', 'mswin32', '60'],
'i386-mswin32_80' => ['x86', 'mswin32', '80'],
'i386-mingw32' => ['x86', 'mingw32', nil ],
'x86-darwin8' => ['x86', 'darwin', '8' ],
}
test_cases.each do |arch, expected|
util_set_arch arch
@a1.platform = Gem::Platform::CURRENT
assert_equal Gem::Platform.new(expected), @a1.platform
end
end
def test_platform_equals_current
@a1.platform = Gem::Platform::CURRENT
assert_equal Gem::Platform.local, @a1.platform
assert_equal Gem::Platform.local.to_s, @a1.original_platform
end
def test_platform_equals_legacy
@a1.platform = 'mswin32'
assert_equal Gem::Platform.new('x86-mswin32'), @a1.platform
@a1.platform = 'i586-linux'
assert_equal Gem::Platform.new('x86-linux'), @a1.platform
@a1.platform = 'powerpc-darwin'
assert_equal Gem::Platform.new('ppc-darwin'), @a1.platform
end
def test_prerelease_spec_adds_required_rubygems_version
@prerelease = quick_spec('tardis', '2.2.0.a')
refute @prerelease.required_rubygems_version.satisfied_by?(Gem::Version.new('1.3.1'))
assert @prerelease.required_rubygems_version.satisfied_by?(Gem::Version.new('1.4.0'))
end
def test_require_paths
@a1.require_path = 'lib'
assert_equal %w[lib], @a1.require_paths
end
def test_requirements
assert_equal ['A working computer'], @a1.requirements
end
def test_runtime_dependencies_legacy
# legacy gems don't have a type
@a1.runtime_dependencies.each do |dep|
dep.instance_variable_set :@type, nil
end
expected = %w[rake jabber4r pqa]
assert_equal expected, @a1.runtime_dependencies.map { |d| d.name }
end
def test_spaceship_name
s1 = new_spec 'a', '1'
s2 = new_spec 'b', '1'
assert_equal(-1, (s1 <=> s2))
assert_equal( 0, (s1 <=> s1))
assert_equal( 1, (s2 <=> s1))
end
def test_spaceship_platform
s1 = new_spec 'a', '1'
s2 = new_spec 'a', '1' do |s|
s.platform = Gem::Platform.new 'x86-my_platform1'
end
assert_equal( -1, (s1 <=> s2))
assert_equal( 0, (s1 <=> s1))
assert_equal( 1, (s2 <=> s1))
end
def test_spaceship_version
s1 = new_spec 'a', '1'
s2 = new_spec 'a', '2'
assert_equal( -1, (s1 <=> s2))
assert_equal( 0, (s1 <=> s1))
assert_equal( 1, (s2 <=> s1))
end
def test_spec_name
assert_equal 'a-1.gemspec', @a1.spec_name
end
def test_summary
assert_equal 'this is a summary', @a1.summary
end
def test_test_files
@a1.test_file = 'test/suite.rb'
assert_equal ['test/suite.rb'], @a1.test_files
end
def test_to_ruby
@a2.add_runtime_dependency 'b', '1'
@a2.dependencies.first.instance_variable_set :@type, nil
@a2.required_rubygems_version = Gem::Requirement.new '> 0'
ruby_code = @a2.to_ruby
expected = <<-SPEC
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "a"
s.version = "2"
s.required_rubygems_version = Gem::Requirement.new(\"> 0\") if s.respond_to? :required_rubygems_version=
s.authors = ["A User"]
s.date = "#{Gem::Specification::TODAY.strftime "%Y-%m-%d"}"
s.description = "This is a test description"
s.email = "[email protected]"
s.files = ["lib/code.rb"]
s.homepage = "http://example.com"
s.require_paths = ["lib"]
s.rubygems_version = "#{Gem::VERSION}"
s.summary = "this is a summary"
if s.respond_to? :specification_version then
s.specification_version = #{Gem::Specification::CURRENT_SPECIFICATION_VERSION}
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<b>, [\"= 1\"])
else
s.add_dependency(%q<b>, [\"= 1\"])
end
else
s.add_dependency(%q<b>, [\"= 1\"])
end
end
SPEC
assert_equal expected, ruby_code
same_spec = eval ruby_code
assert_equal @a2, same_spec
end
def test_to_ruby_for_cache
@a2.add_runtime_dependency 'b', '1'
@a2.dependencies.first.instance_variable_set :@type, nil
@a2.required_rubygems_version = Gem::Requirement.new '> 0'
# cached specs do not have spec.files populated:
ruby_code = @a2.to_ruby_for_cache
expected = <<-SPEC
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "a"
s.version = "2"
s.required_rubygems_version = Gem::Requirement.new(\"> 0\") if s.respond_to? :required_rubygems_version=
s.authors = ["A User"]
s.date = "#{Gem::Specification::TODAY.strftime "%Y-%m-%d"}"
s.description = "This is a test description"
s.email = "[email protected]"
s.homepage = "http://example.com"
s.require_paths = ["lib"]
s.rubygems_version = "#{Gem::VERSION}"
s.summary = "this is a summary"
if s.respond_to? :specification_version then
s.specification_version = #{Gem::Specification::CURRENT_SPECIFICATION_VERSION}
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<b>, [\"= 1\"])
else
s.add_dependency(%q<b>, [\"= 1\"])
end
else
s.add_dependency(%q<b>, [\"= 1\"])
end
end
SPEC
assert_equal expected, ruby_code
same_spec = eval ruby_code
# cached specs do not have spec.files populated:
@a2.files = []
assert_equal @a2, same_spec
end
def test_to_ruby_fancy
@a1.platform = Gem::Platform.local
ruby_code = @a1.to_ruby
local = Gem::Platform.local
expected_platform = "[#{local.cpu.inspect}, #{local.os.inspect}, #{local.version.inspect}]"
expected = <<-SPEC
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "a"
s.version = "1"
s.platform = Gem::Platform.new(#{expected_platform})
s.required_rubygems_version = Gem::Requirement.new(\">= 0\") if s.respond_to? :required_rubygems_version=
s.authors = ["A User"]
s.date = "#{Gem::Specification::TODAY.strftime "%Y-%m-%d"}"
s.description = "This is a test description"
s.email = "[email protected]"
s.executables = ["exec"]
s.extensions = ["ext/a/extconf.rb"]
s.files = ["lib/code.rb", "test/suite.rb", "bin/exec", "ext/a/extconf.rb"]
s.homepage = "http://example.com"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.requirements = ["A working computer"]
s.rubyforge_project = "example"
s.rubygems_version = "#{Gem::VERSION}"
s.summary = "this is a summary"
s.test_files = ["test/suite.rb"]
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rake>, [\"> 0.4\"])
s.add_runtime_dependency(%q<jabber4r>, [\"> 0.0.0\"])
s.add_runtime_dependency(%q<pqa>, [\"<= 0.6\", \"> 0.4\"])
else
s.add_dependency(%q<rake>, [\"> 0.4\"])
s.add_dependency(%q<jabber4r>, [\"> 0.0.0\"])
s.add_dependency(%q<pqa>, [\"<= 0.6\", \"> 0.4\"])
end
else
s.add_dependency(%q<rake>, [\"> 0.4\"])
s.add_dependency(%q<jabber4r>, [\"> 0.0.0\"])
s.add_dependency(%q<pqa>, [\"<= 0.6\", \"> 0.4\"])
end
end
SPEC
assert_equal expected, ruby_code
same_spec = eval ruby_code
assert_equal @a1, same_spec
end
def test_to_ruby_legacy
gemspec1 = Gem::Deprecate.skip_during do
eval LEGACY_RUBY_SPEC
end
ruby_code = gemspec1.to_ruby
gemspec2 = eval ruby_code
assert_equal gemspec1, gemspec2
end
def test_to_ruby_platform
@a2.platform = Gem::Platform.local
@a2.instance_variable_set :@original_platform, 'old_platform'
ruby_code = @a2.to_ruby
same_spec = eval ruby_code
assert_equal 'old_platform', same_spec.original_platform
end
def test_to_yaml
yaml_str = @a1.to_yaml
refute_match '!!null', yaml_str
same_spec = Gem::Specification.from_yaml(yaml_str)
assert_equal @a1, same_spec
end
def test_to_yaml_encoding
@a1.description = 'π'
yaml_str = @a1.to_yaml
same_spec = Gem::Specification.from_yaml(yaml_str)
assert_equal @a1.description, same_spec.description
end
def test_to_yaml_fancy
@a1.platform = Gem::Platform.local
yaml_str = @a1.to_yaml
same_spec = Gem::Specification.from_yaml(yaml_str)
assert_equal Gem::Platform.local, same_spec.platform
assert_equal @a1, same_spec
end
def test_to_yaml_platform_empty_string
@a1.instance_variable_set :@original_platform, ''
assert_match %r|^platform: ruby$|, @a1.to_yaml
end
def test_to_yaml_platform_legacy
@a1.platform = 'powerpc-darwin7.9.0'
@a1.instance_variable_set :@original_platform, 'powerpc-darwin7.9.0'
yaml_str = @a1.to_yaml
same_spec = YAML.load yaml_str
assert_equal Gem::Platform.new('powerpc-darwin7'), same_spec.platform
assert_equal 'powerpc-darwin7.9.0', same_spec.original_platform
end
def test_to_yaml_platform_nil
@a1.instance_variable_set :@original_platform, nil
assert_match %r|^platform: ruby$|, @a1.to_yaml
end
def test_to_yaml_emits_syck_compat_yaml
if YAML.const_defined?(:ENGINE) && !YAML::ENGINE.syck?
@a1.add_dependency "gx", "1.0.0"
y = @a1.to_yaml
refute_match %r!^\s*- - =!, y
else
skip "Only validates psych yaml"
end
end
def test_validate
util_setup_validate
Dir.chdir @tempdir do
assert @a1.validate
end
end
def x s; s.gsub(/xxx/, ''); end
def w; x "WARxxxNING"; end
def t; x "TOxxxDO"; end
def f; x "FxxxIXME"; end
def test_validate_authors
util_setup_validate
Dir.chdir @tempdir do
@a1.authors = [""]
use_ui @ui do
@a1.validate
end
assert_equal "#{w}: no author specified\n", @ui.error, 'error'
@a1.authors = [Object.new]
assert_equal [], @a1.authors
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal "authors may not be empty", e.message
@a1.authors = ["#{f} (who is writing this software)"]
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal %{"#{f}" or "#{t}" is not an author}, e.message
@a1.authors = ["#{t} (who is writing this software)"]
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal %{"#{f}" or "#{t}" is not an author}, e.message
end
end
def test_validate_autorequire
util_setup_validate
Dir.chdir @tempdir do
@a1.autorequire = 'code'
use_ui @ui do
@a1.validate
end
assert_equal "#{w}: deprecated autorequire specified\n",
@ui.error, 'error'
end
end
def test_validate_description
util_setup_validate
Dir.chdir @tempdir do
@a1.description = ''
use_ui @ui do
@a1.validate
end
assert_equal "#{w}: no description specified\n", @ui.error, "error"
@ui = Gem::MockGemUi.new
@a1.summary = "this is my summary"
@a1.description = @a1.summary
use_ui @ui do
@a1.validate
end
assert_equal "#{w}: description and summary are identical\n",
@ui.error, "error"
@a1.description = "#{f} (describe your package)"
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal %{"#{f}" or "#{t}" is not a description}, e.message
@a1.description = "#{t} (describe your package)"
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal %{"#{f}" or "#{t}" is not a description}, e.message
end
end
def test_validate_email
util_setup_validate
Dir.chdir @tempdir do
@a1.email = ""
use_ui @ui do
@a1.validate
end
assert_equal "#{w}: no email specified\n", @ui.error, "error"
@a1.email = "FIxxxXME (your e-mail)".sub(/xxx/, "")
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal %{"#{f}" or "#{t}" is not an email}, e.message
@a1.email = "#{t} (your e-mail)"
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal %{"#{f}" or "#{t}" is not an email}, e.message
end
end
def test_validate_empty
e = assert_raises Gem::InvalidSpecificationException do
Gem::Specification.new.validate
end
assert_equal 'missing value for attribute name', e.message
end
def test_validate_executables
util_setup_validate
FileUtils.mkdir_p File.join(@tempdir, 'bin')
File.open File.join(@tempdir, 'bin', 'exec'), 'w' do end
FileUtils.mkdir_p File.join(@tempdir, 'exec')
use_ui @ui do
Dir.chdir @tempdir do
assert @a1.validate
end
end
assert_equal %w[exec], @a1.executables
assert_equal '', @ui.output, 'output'
assert_equal "#{w}: bin/exec is missing #! line\n", @ui.error, 'error'
end
def test_validate_empty_require_paths
if win_platform? then
skip 'test_validate_empty_require_paths skipped on MS Windows (symlink)'
else
util_setup_validate
@a1.require_paths = []
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal 'specification must have at least one require_path',
e.message
end
end
def test_validate_files
skip 'test_validate_files skipped on MS Windows (symlink)' if win_platform?
util_setup_validate
@a1.files += ['lib', 'lib2']
Dir.chdir @tempdir do
FileUtils.ln_s '/root/path', 'lib2' unless vc_windows?
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal '["lib2"] are not files', e.message
end
assert_equal %w[lib/code.rb test/suite.rb bin/exec ext/a/extconf.rb lib2],
@a1.files
end
def test_validate_homepage
util_setup_validate
Dir.chdir @tempdir do
@a1.homepage = nil
use_ui @ui do
@a1.validate
end
assert_equal "#{w}: no homepage specified\n", @ui.error, 'error'
@ui = Gem::MockGemUi.new
@a1.homepage = ''
use_ui @ui do
@a1.validate
end
assert_equal "#{w}: no homepage specified\n", @ui.error, 'error'
@a1.homepage = 'over at my cool site'
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal '"over at my cool site" is not a URI', e.message
end
end
def test_validate_name
util_setup_validate
e = assert_raises Gem::InvalidSpecificationException do
@a1.name = :json
@a1.validate
end
assert_equal 'invalid value for attribute name: ":json"', e.message
end
def test_validate_non_nil
util_setup_validate
Dir.chdir @tempdir do
assert @a1.validate
Gem::Specification.non_nil_attributes.each do |name|
next if name == :files # set by #normalize
spec = @a1.dup
spec.instance_variable_set "@#{name}", nil
e = assert_raises Gem::InvalidSpecificationException do
spec.validate
end
assert_match %r%^#{name}%, e.message
end
end
end
def test_validate_platform_legacy
util_setup_validate
Dir.chdir @tempdir do
@a1.platform = 'mswin32'
assert @a1.validate
@a1.platform = 'i586-linux'
assert @a1.validate
@a1.platform = 'powerpc-darwin'
assert @a1.validate
end
end
def test_validate_rubygems_version
util_setup_validate
@a1.rubygems_version = "3"
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal "expected RubyGems version #{Gem::VERSION}, was 3",
e.message
end
def test_validate_specification_version
util_setup_validate
Dir.chdir @tempdir do
@a1.specification_version = '1.0'
e = assert_raises Gem::InvalidSpecificationException do
use_ui @ui do
@a1.validate
end
end
err = 'specification_version must be a Fixnum (did you mean version?)'
assert_equal err, e.message
end
end
def test_validate_summary
util_setup_validate
Dir.chdir @tempdir do
@a1.summary = ''
use_ui @ui do
@a1.validate
end
assert_equal "#{w}: no summary specified\n", @ui.error, 'error'
@a1.summary = "#{f} (describe your package)"
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal %{"#{f}" or "#{t}" is not a summary}, e.message
@a1.summary = "#{t} (describe your package)"
e = assert_raises Gem::InvalidSpecificationException do
@a1.validate
end
assert_equal %{"#{f}" or "#{t}" is not a summary}, e.message
end
end
def test_version
assert_equal Gem::Version.new('1'), @a1.version
end
def test__load_fixes_Date_objects
spec = new_spec "a", 1
spec.instance_variable_set :@date, Date.today
spec = Marshal.load Marshal.dump(spec)
assert_kind_of Time, spec.date
end
def test_load_errors_contain_filename
specfile = Tempfile.new(self.class.name.downcase)
specfile.write "raise 'boom'"
specfile.close
begin
capture_io do
Gem::Specification.load(specfile.path)
end
rescue => e
name_rexp = Regexp.new(Regexp.escape(specfile.path))
assert e.backtrace.grep(name_rexp).any?
end
ensure
specfile.delete
end
##
# KEEP p-1-x86-darwin-8
# KEEP p-1
# KEEP c-1.2
# KEEP a_evil-9
# a-1
# a-1-x86-my_platform-1
# KEEP a-2
# a-2-x86-other_platform-1
# KEEP a-2-x86-my_platform-1
# a-3.a
# KEEP a-3-x86-other_platform-1
def test_latest_specs
util_clear_gems
util_setup_fake_fetcher
quick_spec 'p', '1'
p1_curr = quick_spec 'p', '1' do |spec|
spec.platform = Gem::Platform::CURRENT
end
quick_spec @a1.name, @a1.version do |s|
s.platform = Gem::Platform.new 'x86-my_platform1'
end
quick_spec @a1.name, @a1.version do |s|
s.platform = Gem::Platform.new 'x86-third_platform1'
end
quick_spec @a2.name, @a2.version do |s|
s.platform = Gem::Platform.new 'x86-my_platform1'
end
quick_spec @a2.name, @a2.version do |s|
s.platform = Gem::Platform.new 'x86-other_platform1'
end
quick_spec @a2.name, @a2.version.bump do |s|
s.platform = Gem::Platform.new 'x86-other_platform1'
end
Gem::Specification.remove_spec @b2
Gem::Specification.remove_spec @pl1
expected = %W[
a-2
a-2-x86-my_platform-1
a-3-x86-other_platform-1
a_evil-9
c-1.2
p-1
#{p1_curr.full_name}
]
latest_specs = Gem::Specification.latest_specs.map(&:full_name).sort
assert_equal expected, latest_specs
end
def util_setup_deps
@gem = quick_spec "awesome", "1.0" do |awesome|
awesome.add_runtime_dependency "bonobo", []
awesome.add_development_dependency "monkey", []
end
@bonobo = Gem::Dependency.new("bonobo", [])
@monkey = Gem::Dependency.new("monkey", [], :development)
end
def util_setup_validate
Dir.chdir @tempdir do
FileUtils.mkdir_p File.join("ext", "a")
FileUtils.mkdir_p "lib"
FileUtils.mkdir_p "test"
FileUtils.mkdir_p "bin"
FileUtils.touch File.join("ext", "a", "extconf.rb")
FileUtils.touch File.join("lib", "code.rb")
FileUtils.touch File.join("test", "suite.rb")
File.open "bin/exec", "w" do |fp|
fp.puts "#!#{Gem.ruby}"
end
end
end
def with_syck
begin
require "yaml"
old_engine = YAML::ENGINE.yamler
YAML::ENGINE.yamler = 'syck'
load 'rubygems/syck_hack.rb'
rescue NameError
# probably on 1.8, ignore
end
yield
ensure
begin
YAML::ENGINE.yamler = old_engine
load 'rubygems/syck_hack.rb'
rescue NameError
# ignore
end
end
def with_psych
begin
require "yaml"
old_engine = YAML::ENGINE.yamler
YAML::ENGINE.yamler = 'psych'
load 'rubygems/syck_hack.rb'
rescue NameError
# probably on 1.8, ignore
end
yield
ensure
begin
YAML::ENGINE.yamler = old_engine
load 'rubygems/syck_hack.rb'
rescue NameError
# ignore
end
end
def silence_warnings
old_verbose, $VERBOSE = $VERBOSE, false
yield
ensure
$VERBOSE = old_verbose
end
end
| 24.668775 | 110 | 0.660994 |
7ad11571d784555589840c3022b0b8190c34f7c4
| 3,708 |
=begin
The Trust Payments API allows an easy interaction with the Trust Payments web service.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require "uri"
module TrustPayments
class TransactionPaymentPageService
attr_accessor :api_client
def initialize(api_client = ApiClient.default)
@api_client = api_client
end
# Build Payment Page URL
# This operation creates the URL to which the user should be redirected to when the payment page should be used.
# @param space_id
# @param id The id of the transaction which should be returned.
# @param [Hash] opts the optional parameters
# @return [String]
def payment_page_url(space_id, id, opts = {})
data, _status_code, _headers = payment_page_url_with_http_info(space_id, id, opts)
return data
end
# Build Payment Page URL
# This operation creates the URL to which the user should be redirected to when the payment page should be used.
# @param space_id
# @param id The id of the transaction which should be returned.
# @param [Hash] opts the optional parameters
# @return [Array<(String, Fixnum, Hash)>] String data, response status code and response headers
def payment_page_url_with_http_info(space_id, id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug "Calling API: TransactionPaymentPageService.payment_page_url ..."
end
# verify the required parameter 'space_id' is set
fail ArgumentError, "Missing the required parameter 'space_id' when calling TransactionPaymentPageService.payment_page_url" if space_id.nil?
# verify the required parameter 'id' is set
fail ArgumentError, "Missing the required parameter 'id' when calling TransactionPaymentPageService.payment_page_url" if id.nil?
# resource path
local_var_path = "/transaction-payment-page/payment-page-url".sub('{format}','json')
# query parameters
query_params = {}
query_params[:'spaceId'] = space_id
query_params[:'id'] = id
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
local_header_accept = ['application/json', 'text/plain;charset=utf-8']
local_header_accept_result = @api_client.select_header_accept(local_header_accept) and header_params['Accept'] = local_header_accept_result
# HTTP header 'Content-Type'
local_header_content_type = []
header_params['Content-Type'] = @api_client.select_header_content_type(local_header_content_type)
# form parameters
form_params = {}
# http body (model)
post_body = nil
auth_names = []
data, status_code, headers = @api_client.call_api(:GET, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'String')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: TransactionPaymentPageService#payment_page_url\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
end
end
| 40.304348 | 173 | 0.717907 |
287d378baa17be27b9d5b83ee62235dde4aef2a7
| 3,482 |
module Gilt
autoload :Sku, "gilt/sku"
class Product
def self.defer(future)
require 'weary/deferred'
Weary::Deferred.new future, self, lambda {|product, response| product.new(response.parse) }
end
def self.client(apikey, affid=nil)
Gilt::Client::Products.new(apikey, affid)
end
def self.detail(product_id, apikey, affid=nil)
client(apikey, affid).detail(:product_id => product_id)
end
def self.create(product_id, apikey, affid=nil)
response = detail(product_id, apikey, affid).perform
self.new response.parse
end
def self.categories(apikey, affid=nil)
response = client(apikey, affid).categories.perform
response.parse["categories"] || []
end
def initialize(product_body)
@product = product_body
end
def name
@product["name"]
end
def product
URI(@product["product"])
end
def id
@product["id"].to_i
end
def brand
@product["brand"]
end
def url
URI(@product["url"])
end
def categories
@product["categories"] || []
end
def description
fetch_content :description
end
def fit_notes
fetch_content :fit_notes
end
def material
fetch_content :material
end
def care_instructions
fetch_content :care_instructions
end
def origin
fetch_content :origin
end
def content
keys = [:description, :fit_notes, :material, :care_instructions, :origin]
Hash[keys.map {|content| [content, self.send(content) ]}]
end
def skus
@product["skus"].map {|sku| Sku.new(sku) }
end
def min_price
sorted_price.last
end
def max_price
sorted_price.first
end
def price_range
[min_price, max_price]
end
def format_price
range = price_range
return range.first.format if range.first == range.last
price_range.map(&:format).join(" - ")
end
def images
@product["image_urls"]
end
def colors
skus.map {|sku| sku.attributes[:color] }.uniq
end
def sizes
skus.map {|sku| sku.attributes[:size] }.uniq
end
def skus_of_size(size)
skus_with_attribute :size, size
end
def skus_of_color(color)
skus_with_attribute :color, color
end
def skus_with_attribute(attribute, value)
skus.select {|sku| !!sku.attributes[attribute.to_sym].match(value) }
end
def select_sku(attributes)
attribute_map = attributes.map {|k, v| skus_with_attribute(k, v).map(&:id) }
ids = attribute_map.reduce(:&)
skus.find {|sku| sku.id == ids.first } if ids.size > 0
end
def inventory_status
sku_inventory = skus.map(&:inventory_status).uniq
if sku_inventory.include? Gilt::Sku::FOR_SALE
Gilt::Sku::FOR_SALE
elsif sku_inventory.all? {|status| status == Gilt::Sku::RESERVED}
Gilt::Sku::RESERVED
else
Gilt::Sku::SOLD_OUT
end
end
def for_sale?
inventory_status == Gilt::Sku::FOR_SALE
end
def sold_out?
inventory_status == Gilt::Sku::SOLD_OUT
end
def reserved?
inventory_status == Gilt::Sku::RESERVED
end
private
def fetch_content(key)
content = @product["content"]
content[key.to_s] unless content.nil?
end
def sorted_price
set = skus.map(&:sale_price).uniq
return set if set.size == 1
set.sort
end
end
end
| 20.482353 | 97 | 0.621769 |
e8290297e3f7119bc552a78bf64058a0ae4f0354
| 450 |
require "cases/helper"
require 'models/person'
module ActiveRecord
class CustomLockingTest < ActiveRecord::TestCase
fixtures :people
def test_custom_lock
if current_adapter?(:MysqlAdapter, :Mysql2Adapter)
assert_match 'SHARE MODE', Person.lock('LOCK IN SHARE MODE').to_sql
assert_sql(/LOCK IN SHARE MODE/) do
Person.all.merge!(:lock => 'LOCK IN SHARE MODE').find(1)
end
end
end
end
end
| 25 | 75 | 0.675556 |
187f8605137d128425d918c5bb0300524de252f2
| 1,517 |
require "services"
module Republisher
module_function
def republish_all
all_content_ids_and_locales.each do |content_id, locale|
RepublishWorker.perform_async(content_id, locale)
end
end
def republish_document_type(document_type)
content_id_and_locale_pairs_for_document_type(
document_type,
).each do |content_id, locale|
RepublishWorker.perform_async(content_id, locale)
end
end
def republish_one(content_id, locale)
RepublishWorker.new.perform(content_id, locale)
end
def republish_many(content_ids_and_locales)
content_ids_and_locales.each do |content_id, locale|
RepublishWorker.perform_async(content_id, locale)
end
end
def all_content_ids_and_locales
document_types.flat_map do |t|
content_id_and_locale_pairs_for_document_type(t)
end
end
def content_id_and_locale_pairs_for_document_type(document_type)
unless document_types.include?(document_type)
raise ArgumentError, "Unknown document_type: '#{document_type}'"
end
Services.with_timeout(30) do
Services.publishing_api.get_content_items(
document_type: document_type,
fields: %i[content_id locale],
per_page: 999_999,
order: "updated_at",
)["results"].map { |r| [r["content_id"], r["locale"]] }
end
end
def document_types
@document_types ||= all_document_types
end
def all_document_types
Rails.application.eager_load!
Document.subclasses.map(&:document_type)
end
end
| 25.283333 | 70 | 0.738299 |
911e26bb75945e265fafc1f4a4a40ee46bf1b48e
| 1,206 |
# frozen_string_literal: true
module Google
module Apis
module CalendarV3
class Event
MEETING_URL_REGEX = %r{https://.*?\.zoom\.us/(?:j/(\d+)|my/(\S+))}
include ActionView::Helpers::DateHelper
def meeting_id
@meeting_id ||= (matches[1] || matches[2])
end
def meeting_url
@meeting_url ||= URI(matches[0]) if matches
end
def zoom_url
"zoommtg://zoom.us/join?confno=#{meeting_id}" if meeting_id && !vanity_url?
end
def already_started?
start.date_time <= DateTime.now
end
def more_than_five_minutes_from_now?
start.date_time.to_time >= (DateTime.now.to_time + 5 * 60)
end
def start_time_in_words
distance = time_ago_in_words(start.date_time)
if already_started?
"#{distance} ago".bold.red
else
"in #{distance}".bold
end
end
private
def matches
@matches ||= "#{location} #{description}".match(MEETING_URL_REGEX)
end
def vanity_url?
meeting_id && meeting_id !~ /\A\d+\z/
end
end
end
end
end
| 23.192308 | 85 | 0.55058 |
6a6e34dfce02a2477c303dabfefd63d875375a0e
| 339 |
class AddConfirmableToUsers < ActiveRecord::Migration
def change
add_column :users, :confirmation_token, :string
add_column :users, :confirmed_at, :datetime
add_column :users, :confirmation_sent_at, :datetime
add_index :users, :confirmation_token, :unique => true
User.update_all(:confirmed_at => Time.now)
end
end
| 33.9 | 58 | 0.749263 |
f8d927f90713751893270a04efdb1f42cd2b7eaa
| 924 |
require 'minitest/reporters'
require 'minitest'
require_relative '../lib/minitest/xs_and_os_plugin'
module Minitest
# copied from minitest
def self.init_plugins(options)
extensions.each do |name|
msg = "plugin_#{name}_init"
send msg, options if respond_to?(msg)
end
fix_reporters
end
def self.fix_reporters
dr = reporter.reporters.find { |r| r.is_a? Minitest::Reporters::DelegateReporter }
# getting rid of default reporters
drr = dr.instance_variable_get(:@reporters)
drr.delete_if { |r| r.is_a?(Minitest::SummaryReporter) || r.is_a?(Minitest::ProgressReporter) }
# getting rid of rails reporters
if defined?(Rails)
reporter.reporters.delete_if { |r| r.is_a?(Minitest::SuppressedSummaryReporter) || r.is_a?(::Rails::TestUnitReporter) }
end
end
end
Minitest::Reporters.use!
Minitest::Reporters.use! Minitest::Reporters::TravisReporter.new
| 29.806452 | 125 | 0.709957 |
0814622171edf074e2c61f90267877d0b14dcd00
| 889 |
require 'resque-serializer/version'
require 'resque-serializer/mutex'
require 'resque-serializer/serializers/both'
require 'resque-serializer/serializers/combined'
require 'resque-serializer/serializers/job'
require 'resque-serializer/serializers/queue'
module Resque
module Plugins
module Serializer
def serialize(resource)
case resource
when :job then extend(Serializers::Job)
when :queue then extend(Serializers::Queue)
when :both then extend(Serializers::Both)
when :combined then extend(Serializers::Combined)
else raise_invalid_resource
end
end
private
def raise_invalid_resource
error_msg = begin
'The passed argument must be one of: [:job, :queue, :both, :combined]'
end
raise ArgumentError, error_msg
end
end
end
end
| 26.939394 | 80 | 0.669291 |
6a518b38003071efb8d14c133aacb1e9e93aba83
| 449 |
module ISBNdb
module Api
class Author
attr_reader :client
def initialize(client:)
@client = client
end
def find(name, options = {})
@client.request("/author/#{name}", options)
end
def batch(query, options = {})
@client.request("/authors/#{query}", options)
end
def search(options = {})
@client.request("/search/authors/", options)
end
end
end
end
| 18.708333 | 53 | 0.556793 |
f7599ccd5ed668b9f528568b6133c494382840db
| 708 |
require File.join(File.dirname(__FILE__), 'abstract-php-extension')
class Php54Amqp < AbstractPhp54Extension
init
homepage 'http://pecl.php.net/package/amqp'
url 'http://pecl.php.net/get/amqp-1.0.9.tgz'
sha1 '020ae71214fa2f57aeb70f0ec139ffe43a477ded'
head 'http://svn.php.net/repository/pecl/amqp/trunk/', :using => :svn
depends_on 'rabbitmq-c'
def install
Dir.chdir "amqp-#{version}" unless ARGV.build_head?
ENV.universal_binary if build.universal?
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig
system "make"
prefix.install "modules/amqp.so"
write_config_file unless build.include? "without-config-file"
end
end
| 28.32 | 71 | 0.696328 |
183a35442de53e781852a388df589940a2e20b9a
| 6,850 |
require File.dirname(__FILE__) + '/spec_helper'
require File.dirname(__FILE__) + '/gen-rb/ThriftSpec_types'
class ThriftTypesSpec < Spec::ExampleGroup
include Thrift
before(:each) do
Thrift.type_checking = true
end
after(:each) do
Thrift.type_checking = false
end
describe "Type checking" do
it "should return the proper name for each type" do
Thrift.type_name(Types::I16).should == "Types::I16"
Thrift.type_name(Types::VOID).should == "Types::VOID"
Thrift.type_name(Types::LIST).should == "Types::LIST"
Thrift.type_name(42).should be_nil
end
it "should check types properly" do
# lambda { Thrift.check_type(nil, Types::STOP) }.should raise_error(TypeError)
lambda { Thrift.check_type(3, {:type => Types::STOP}, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::VOID}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type(3, {:type => Types::VOID}, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type(true, {:type => Types::BOOL}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type(3, {:type => Types::BOOL}, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type(42, {:type => Types::BYTE}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type(42, {:type => Types::I16}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type(42, {:type => Types::I32}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type(42, {:type => Types::I64}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type(3.14, {:type => Types::I32}, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type(3.14, {:type => Types::DOUBLE}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type(3, {:type => Types::DOUBLE}, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type("3", {:type => Types::STRING}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type(3, {:type => Types::STRING}, :foo) }.should raise_error(TypeError)
hello = SpecNamespace::Hello.new
lambda { Thrift.check_type(hello, {:type => Types::STRUCT}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type("foo", {:type => Types::STRUCT}, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type({:foo => 1}, {:type => Types::MAP}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type([1], {:type => Types::MAP}, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type([1], {:type => Types::LIST}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type({:foo => 1}, {:type => Types::LIST}, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type(Set.new([1,2]), {:type => Types::SET}, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type([1,2], {:type => Types::SET}, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type({:foo => true}, {:type => Types::SET}, :foo) }.should raise_error(TypeError)
end
it "should error out if nil is passed and skip_types is false" do
lambda { Thrift.check_type(nil, {:type => Types::BOOL}, :foo, false) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::BYTE}, :foo, false) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::I16}, :foo, false) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::I32}, :foo, false) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::I64}, :foo, false) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::DOUBLE}, :foo, false) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::STRING}, :foo, false) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::STRUCT}, :foo, false) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::LIST}, :foo, false) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::SET}, :foo, false) }.should raise_error(TypeError)
lambda { Thrift.check_type(nil, {:type => Types::MAP}, :foo, false) }.should raise_error(TypeError)
end
it "should check element types on containers" do
field = {:type => Types::LIST, :element => {:type => Types::I32}}
lambda { Thrift.check_type([1, 2], field, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type([1, nil, 2], field, :foo) }.should raise_error(TypeError)
field = {:type => Types::MAP, :key => {:type => Types::I32}, :value => {:type => Types::STRING}}
lambda { Thrift.check_type({1 => "one", 2 => "two"}, field, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type({1 => "one", nil => "nil"}, field, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type({1 => nil, 2 => "two"}, field, :foo) }.should raise_error(TypeError)
field = {:type => Types::SET, :element => {:type => Types::I32}}
lambda { Thrift.check_type(Set.new([1, 2]), field, :foo) }.should_not raise_error(TypeError)
lambda { Thrift.check_type(Set.new([1, nil, 2]), field, :foo) }.should raise_error(TypeError)
lambda { Thrift.check_type(Set.new([1, 2.3, 2]), field, :foo) }.should raise_error(TypeError)
end
it "should give the TypeError a readable message" do
msg = "Expected Types::STRING, received Fixnum for field foo"
lambda { Thrift.check_type(3, {:type => Types::STRING}, :foo) }.should raise_error(TypeError, msg)
msg = "Expected Types::STRING, received Fixnum for field foo.element"
field = {:type => Types::LIST, :element => {:type => Types::STRING}}
lambda { Thrift.check_type([3], field, :foo) }.should raise_error(TypeError, msg)
msg = "Expected Types::I32, received NilClass for field foo.element.key"
field = {:type => Types::LIST,
:element => {:type => Types::MAP,
:key => {:type => Types::I32},
:value => {:type => Types::I32}}}
lambda { Thrift.check_type([{nil => 3}], field, :foo) }.should raise_error(TypeError, msg)
msg = "Expected Types::I32, received NilClass for field foo.element.value"
lambda { Thrift.check_type([{1 => nil}], field, :foo) }.should raise_error(TypeError, msg)
end
end
end
| 71.354167 | 116 | 0.625839 |
28f33bae9fa0da0de49ff5f56c7b4401f21883cc
| 5,496 |
require 'spec_helper'
RSpec.describe AppArchetype::TemplateManager do
let(:template_dir) { 'path/to/templates' }
subject do
described_class.new(template_dir)
end
describe '#load' do
let(:manifest_file) { 'path/to/dir/manifest.json' }
let(:manifest) { double(AppArchetype::Template::Manifest) }
let(:files) do
[
manifest_file,
manifest_file
]
end
before do
allow(Dir).to receive(:glob).and_return(files)
end
context 'when manifest is valid' do
before do
allow(AppArchetype::Template::Manifest).to receive(:new_from_file)
.and_return(manifest)
subject.load
end
it 'loads manifests' do
expect(subject.manifests.count).to be 2
subject.manifests.each do |manifest|
expect(manifest).to eq manifest
end
end
end
context 'when a manifest is invalid' do
before do
allow(AppArchetype::Template::Manifest).to receive(:new_from_file)
.and_raise('something went wrong parsing manifest')
allow(subject).to receive(:puts)
subject.load
end
it 'prints invalid template warning for each failed template' do
expect(subject)
.to have_received(:puts)
.with('WARN: `path/to/dir/manifest.json` is invalid, skipping')
.twice
end
end
end
describe '#filter' do
let(:query) do
->(manifest) { manifest == target_manifest }
end
let(:manifest) { double(AppArchetype::Template::Manifest) }
let(:target_manifest) { double(AppArchetype::Template::Manifest) }
let(:manifests) do
[
manifest,
manifest,
target_manifest
]
end
before do
subject.instance_variable_set(:@manifests, manifests)
@result = subject.filter(query)
end
it 'finds target manifest in set' do
expect(@result.count).to be 1
expect(@result.first).to eq target_manifest
end
context 'when no query is defined' do
before do
@results = subject.filter
end
it 'returns everything' do
expect(@results.count).to eq 3
end
end
end
describe '#search_by_name' do
let(:search_term) { 'target' }
let(:lang) { '.rb' }
let(:manifest) do
AppArchetype::Template::Manifest.new(
'path/to/manifest.json',
'name' => 'manifest'
)
end
let(:target_manifest) do
AppArchetype::Template::Manifest.new(
'path/to/manifest.json',
'name' => 'target'
)
end
let(:another_target_manifest) do
AppArchetype::Template::Manifest.new(
'path/to/manifest.json',
'name' => 'target and more'
)
end
let(:manifests) do
[
manifest,
manifest,
target_manifest,
another_target_manifest
]
end
before do
subject.instance_variable_set(:@manifests, manifests)
@result = subject.search_by_name(search_term)
end
it 'returns both matching manifests' do
expect(@result).to eq(
[
target_manifest,
another_target_manifest
]
)
end
end
describe '#find_by_name' do
let(:search_term) { 'target' }
let(:lang) { '.rb' }
let(:manifest) do
AppArchetype::Template::Manifest.new(
'path/to/manifest.json',
'name' => 'manifest'
)
end
let(:target_manifest) do
AppArchetype::Template::Manifest.new(
'path/to/manifest.json',
'name' => 'target'
)
end
let(:almost_target_manifest) do
AppArchetype::Template::Manifest.new(
'path/to/manifest.json',
'name' => 'target and more'
)
end
let(:manifests) do
[
manifest,
manifest,
target_manifest,
almost_target_manifest
]
end
before do
subject.instance_variable_set(:@manifests, manifests)
end
it 'returns only matching manifest' do
expect(subject.find_by_name(search_term)).to eq target_manifest
end
context 'when there are 2 manifests with the same name' do
let(:manifests) do
[
manifest,
manifest,
target_manifest,
target_manifest
]
end
it 'raises runtime error' do
expect do
subject.find_by_name(search_term)
end.to raise_error(
'more than one manifest matching the given name were found'
)
end
context 'when ignoring duplicates' do
it 'ignores error' do
expect do
subject.find_by_name(search_term, ignore_dupe: true)
end.not_to raise_error
end
it 'returns first manifest' do
expect(subject.find_by_name(search_term, ignore_dupe: true))
.to eq target_manifest
end
end
end
end
describe '#manifest_names' do
let(:manifest) do
AppArchetype::Template::Manifest.new(
'path/to/manifest.json',
'name' => 'manifest'
)
end
let(:manifests) do
[
manifest,
manifest,
manifest
]
end
before do
subject.instance_variable_set(:@manifests, manifests)
@result = subject.manifest_names
end
it 'returns array of manifest names' do
expect(@result).to eq(
%w[
manifest
manifest
manifest
]
)
end
end
end
| 21.637795 | 74 | 0.586063 |
084665447c6276419b557990b82513635e496e24
| 1,315 |
require 'rails_helper'
RSpec.describe 'Form' do
subject { page }
after { delete_dummy_config_file! }
before do
delete_dummy_config_file!
visit '/settings/dummy_group'
end
it 'has form elements' do
# Title
is_expected.to have_content 'Settings'
is_expected.to have_content 'Dummy Group'
# String field
is_expected.to have_css 'label', text: 'Dummy String'
is_expected.to have_field 'Dummy String', with: 'dummy_string'
# Checkbox
is_expected.to have_css 'label', text: 'Dummy Bool'
is_expected.to have_checked_field 'Dummy Bool'
# # Integer field
is_expected.to have_css 'label', text: 'Dummy Integer'
is_expected.to have_field 'Dummy Integer', with: '777'
# Select field
is_expected.to have_css 'label', text: 'Dummy Select'
is_expected.to have_select 'Dummy Select', selected: 'foo', with_options: %w(foo bar baz)
# Non validation field
is_expected.to have_css 'label', text: 'Dummy Not Validate'
is_expected.to have_field 'Dummy Not Validate'
# Non UI field
is_expected.not_to have_css 'label', text: 'Dummy Not For UI'
is_expected.not_to have_field 'Dummy Not For UI'
# Actions
is_expected.to have_button 'Save Dummy Settings'
is_expected.to have_link 'Cancel Dummy Settings'
end
end
| 27.395833 | 93 | 0.706464 |
e2b366d9604af25afe9bafc015fe3c8a94570e8f
| 72 |
class Category < ActiveRecord::Base
has_many :products
resourcify
end
| 14.4 | 35 | 0.805556 |
abc340486a4c0ba8d86479d9db4d0689a08c460e
| 432 |
module Twfarm
class DB
def initialize
$db ||= SQLite3::Database.new(Files::DATABASE)
Dir.glob("#{__dir__}/../tables/*").each do |file|
File.open(file, "r") do |f|
table = f.read
table_name = table.split(" ")[2]
current_schema = `sqlite3 #{Files::DATABASE} '.schema #{table_name}'`
$db.execute(table) if current_schema.empty?
end
end
end
end
end
| 25.411765 | 79 | 0.560185 |
339433eb62d6a1b812a4975523c2e40b8129d3a1
| 766 |
cask "bettermouse" do
version "1.3.1823"
sha256 "74f9050c23575177bdd7c596bc59556cfe803a6d7a9a61a59f47f0f72ecbfc2c"
url "https://better-mouse.com/wp-content/uploads/BetterMouse.#{version}.zip"
name "BetterMouse"
desc "Utility improving 3rd party mouse performance and functionalities"
homepage "https://better-mouse.com/"
livecheck do
url :homepage
regex(/Version (\d+(?:\.\d+)+)/i)
end
app "BetterMouse.app"
uninstall quit: bundle_id = "com.naotanhaocan.BetterMouse"
zap trash: [
"~/Library/Application Support/BetterMouse",
"~/Library/Caches/#{bundle_id}",
"~/Library/HTTPStorages/#{bundle_id}*",
"~/Library/Preferences/#{bundle_id}.plist",
"~/Library/Saved Application State/#{bundle_id}.savedState",
]
end
| 28.37037 | 78 | 0.710183 |
bfefdfa9510760814a18823a0c4e417f313e265a
| 780 |
require 'from_json'
require 'snake'
require 'food'
require 'wall'
require 'space'
class Board
extend FromJson
attr_reader :width, :height, :food, :snakes
def initialize(width:, height:, food:, snakes:)
@width = width
@height = height
@snakes = snakes.map { |json| Snake.from_json(json) }
@food = food.map { |json| Food.from_json(json) }
end
def [](x, y, offset = 0)
if (0...width).include?(x) && (0...height).include?(y)
snake_at(x, y, offset) ||
food_at(x, y, offset) ||
Space.new
else
Wall.new
end
end
private
def snake_at(x, y, offset)
snakes.detect { |snake| snake.includes?(x, y, offset) }
end
def food_at(x, y, _offset)
food.detect { |apple| apple.x == x && apple.y == y }
end
end
| 20 | 59 | 0.597436 |
21c5ca321182a440f6811ee462b4008532490945
| 163 |
class UpdateColumnsForUser < ActiveRecord::Migration[5.2]
def change
add_column :users, :country, :string
add_column :users, :locate, :string
end
end
| 23.285714 | 57 | 0.723926 |
4aab86c2d1639fd443fcb8649d869afc531af1f0
| 72 |
Ansuz::PluginManagerInstance.register_plugin(Ansuz::JAdams::FeedReader)
| 36 | 71 | 0.861111 |
91be2db66b0eb76de9d423feafc9a74563081246
| 3,334 |
require 'spec_helper'
module VCAP::CloudController
module Jobs::Runtime
describe AppBitsCopier do
let(:src_app) { VCAP::CloudController::AppFactory.make }
let(:dest_app) { VCAP::CloudController::AppFactory.make }
let(:compressed_path) { File.expand_path('../../../fixtures/good.zip', File.dirname(__FILE__)) }
let(:local_tmp_dir) { Dir.mktmpdir }
let(:blobstore_dir) { Dir.mktmpdir }
let(:app_event_repository) { double(:app_event_repository, record_src_copy_bits: nil, record_dest_copy_bits: nil) }
let(:package_blobstore) do
CloudController::Blobstore::Client.new({ provider: 'Local', local_root: blobstore_dir }, 'package')
end
let(:user) { 'some-user' }
let(:email) { '[email protected]' }
subject(:job) do
AppBitsCopier.new(src_app, dest_app, app_event_repository, user, email)
end
it { is_expected.to be_a_valid_job }
before do
Fog.unmock!
end
after do
Fog.mock!
FileUtils.remove_entry_secure local_tmp_dir
FileUtils.remove_entry_secure blobstore_dir
end
describe '#perform' do
before do
package_blobstore.cp_to_blobstore(compressed_path, src_app.guid)
end
it 'creates blob stores' do
expect(CloudController::DependencyLocator.instance).to receive(:package_blobstore).and_return(package_blobstore)
job.perform
end
it 'copies the source package zip to the package blob store for the destination app' do
allow(CloudController::DependencyLocator.instance).to receive(:package_blobstore).and_return(package_blobstore)
job.perform
expect(package_blobstore.exists?(dest_app.guid)).to be true
end
it 'uploads the package zip to the package blob store' do
allow(CloudController::DependencyLocator.instance).to receive(:package_blobstore).and_return(package_blobstore)
job.perform
package_blobstore.download_from_blobstore(dest_app.guid, File.join(local_tmp_dir, 'package.zip'))
expect(`unzip -l #{local_tmp_dir}/package.zip`).to include('bye')
end
it 'changes the package hash in the destination app' do
allow(CloudController::DependencyLocator.instance).to receive(:package_blobstore).and_return(package_blobstore)
expect {
job.perform
}.to change {
dest_app.refresh.package_hash
}
end
it 'creates a copy_bits audit event for source app' do
allow(CloudController::DependencyLocator.instance).to receive(:package_blobstore).and_return(package_blobstore)
job.perform
expect(app_event_repository).to have_received(:record_src_copy_bits).with(dest_app, src_app, user, email)
end
it 'creates a copy_bits audit event for destination app' do
allow(CloudController::DependencyLocator.instance).to receive(:package_blobstore).and_return(package_blobstore)
job.perform
expect(app_event_repository).to have_received(:record_dest_copy_bits).with(dest_app, src_app, user, email)
end
it 'knows its job name' do
expect(job.job_name_in_configuration).to equal(:app_bits_copier)
end
end
end
end
end
| 39.223529 | 122 | 0.679364 |
bff4d2b5555efa9c58f7ae430856ae8edbe648af
| 286 |
require 'rails_helper'
RSpec.describe BodyType, type: :model do
describe 'instance method' do
let(:body_type) {FactoryGirl.create(:body_type)}
describe '#name' do
it 'returns the name' do
expect(body_type.name).to eq("Dáil Éireann")
end
end
end
end
| 22 | 52 | 0.667832 |
edaced6c2829fbd9a0fccf065ea91cde61c714f4
| 1,248 |
#
# Cookbook Name:: openvpn
# Recipe:: service
#
# Copyright 2009-2013, Chef Software, Inc.
# Copyright 2015, Chef Software, Inc. <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
include_recipe 'openvpn::install'
# systemd platforms use an instance service
case node['platform_family']
when 'rhel'
if node['platform_version'] >= '7'
link '/etc/systemd/system/multi-user.target.wants/[email protected]' do
to '/usr/lib/systemd/system/[email protected]'
end
service_name = '[email protected]'
else
service_name = 'openvpn'
end
when 'arch'
service_name = '[email protected]'
else
service_name = 'openvpn'
end
service 'openvpn' do
service_name service_name
action [:enable, :start]
end
| 29.023256 | 80 | 0.737981 |
284fb35c0788caba6bc273a656f9329b5993d955
| 429 |
class Interface
attr_accessor :name,:ip,:mask_ip
def initialize(parameters)
parameters = parameters.with_indifferent_access
@name = parameters[:name]
@ip = parameters[:ip]
@mask_ip = parameters[:mask_ip]
end
def self.build_interfaces(amount)
interfaces = [Interface.new(name: "lo")]
amount.times do |time|
interfaces << Interface.new(name: "eth#{time}")
end
interfaces
end
end
| 22.578947 | 53 | 0.678322 |
7ab9f4cb50e0cfc354648e2ec0d3c82de18e42fd
| 4,601 |
#
# Author:: Shane Davis (<[email protected]>)
# Cookbook Name:: datashades
# Recipe:: default
#
# Implements base configuration for instances
#
# Copyright 2016, Link Digital
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Add a larger swapfile if we have spare disk space
#
include_recipe "datashades::swapfile"
# Set timezone to default value
#
link "/etc/localtime" do
to "/usr/share/zoneinfo/#{node['datashades']['timezone']}"
link_type :symbolic
end
# Store timezone config so yum updates don't reset the timezone
#
template '/etc/sysconfig/clock' do
source 'clock.erb'
owner 'root'
group 'root'
mode '0755'
end
# Enable RedHat EPEL
#
execute "Enable EPEL" do
command "sed -i 's/enabled=0/enabled=1/g' /etc/yum.repos.d/epel.repo"
end
# Install/remove core packages
#
node['datashades']['core']['packages'].each do |p|
package p
end
execute "Update AWS command-line interface" do
command "pip --cache-dir=/tmp/ install --upgrade awscli"
end
node['datashades']['core']['unwanted-packages'].each do |p|
package p do
action :remove
end
end
# real basic stuff needs to go in first so jq available for new stack params that uses jq early on
#
include_recipe "datashades::stackparams"
# Install Icinga2 package
#
include_recipe "datashades::icinga-setup"
# Enable yum-cron so updates are downloaded on running nodes
#
service "yum-cron" do
action [:enable, :start]
end
# Enable nano syntax highlighing
#
cookbook_file '/etc/nanorc' do
source 'nanorc'
owner 'root'
group 'root'
mode '0755'
end
# Add some helpful stuff to bash
#
cookbook_file "/etc/profile.d/datashades.sh" do
source "datashades_bash.sh"
owner 'root'
group 'root'
mode '0755'
end
# Tag the root EBS volume so we can manage it in AWS Backup etc.
#
bash "Tag root EBS volume" do
code <<-EOS
ROOT_DISK_ID=$(aws ec2 describe-volumes --region=#{node['datashades']['region']} --filters Name=attachment.instance-id,Values=#{node['datashades']['instid']} Name=attachment.device,Values=/dev/xvda --query 'Volumes[*].[VolumeId]' --out text | cut -f 1)
aws ec2 create-tags --region #{node['datashades']['region']} --resources $ROOT_DISK_ID --tags Key=Name,Value=#{node['datashades']['hostname']}-root-volume Key=Environment,Value=#{node['datashades']['version']} Key=Service,Value=#{node['datashades']['sitename']} Key=Division,Value="Qld Online" Key=Owner,Value="Development and Delivery" Key=Version,Value="1.0"
EOS
end
# Make sure all instances have an /etc/zoneid
#
bash "Adding AWS ZoneID" do
user "root"
code <<-EOS
zoneid=$(aws route53 list-hosted-zones-by-name --dns-name "#{node['datashades']['tld']}" | jq '.HostedZones[0].Id' | tr -d '"/hostedzone')
echo "zoneid=${zoneid}" > /etc/awszoneid
EOS
end
# Create DNS helper script
#
cookbook_file "/bin/checkdns" do
source "checkdns"
owner 'root'
group 'root'
mode '0755'
end
# Create ASG helper script
#
cookbook_file "/bin/updateasg" do
source "updateasg"
owner 'root'
group 'root'
mode '0755'
end
# Push stats to enable Cloudwatch monitoring
#
cwmon_artifact = "CloudWatchMonitoringScripts-1.2.2.zip"
remote_file "/opt/aws/#{cwmon_artifact}" do
source "https://aws-cloudwatch.s3.amazonaws.com/downloads/#{cwmon_artifact}"
mode '0644'
end
execute 'Unzip CloudWatch monitoring scripts' do
command "unzip -u -q /opt/aws/#{cwmon_artifact} -d /opt/aws/"
end
file "/etc/cron.d/cwpump" do
content "*/5 * * * * root perl /opt/aws/aws-scripts-mon/mon-put-instance-data.pl --mem-util --mem-used --mem-avail --swap-util --disk-space-util --disk-space-avail --disk-path=/ --auto-scaling --from-cron\n"
mode '0644'
end
# Replace default mail relay with Nuxeo AWS SMTP Relay
directory "/usr/share/aws-smtp-relay" do
action :create
end
cookbook_file "/usr/share/aws-smtp-relay/aws-smtp-relay-1.0.0-jar-with-dependencies.jar" do
source "aws-smtp-relay-1.0.0-jar-with-dependencies.jar"
end
template "/etc/init.d/aws-smtp-relay" do
source "aws-smtp-relay.erb"
mode "0755"
end
| 28.054878 | 368 | 0.704195 |
1ddc33aef43fbfbb11e415c453af4ef60ed7e5fd
| 14,316 |
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/cloud/memcache/v1/cloud_memcache_pb"
require "google/cloud/memcache/v1/cloud_memcache_services_pb"
require "google/cloud/memcache/v1/cloud_memcache"
class ::Google::Cloud::Memcache::V1::CloudMemcache::OperationsTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_list_operations
# Create GRPC objects.
grpc_response = ::Google::Longrunning::ListOperationsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
filter = "hello world"
page_size = 42
page_token = "hello world"
list_operations_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_operations, name
assert_kind_of ::Google::Longrunning::ListOperationsRequest, request
assert_equal "hello world", request["name"]
assert_equal "hello world", request["filter"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_operations_client_stub do
# Create client
client = ::Google::Cloud::Memcache::V1::CloudMemcache::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_operations({ name: name, filter: filter, page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_operations name: name, filter: filter, page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_operations ::Google::Longrunning::ListOperationsRequest.new(name: name, filter: filter, page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_operations({ name: name, filter: filter, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_operations(::Google::Longrunning::ListOperationsRequest.new(name: name, filter: filter, page_size: page_size, page_token: page_token), grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_operations_client_stub.call_rpc_count
end
end
def test_get_operation
# Create GRPC objects.
grpc_response = ::Google::Longrunning::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_operation, name
assert_kind_of ::Google::Longrunning::GetOperationRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_operation_client_stub do
# Create client
client = ::Google::Cloud::Memcache::V1::CloudMemcache::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_operation({ name: name }) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_operation name: name do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_operation ::Google::Longrunning::GetOperationRequest.new(name: name) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_operation({ name: name }, grpc_options) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_operation(::Google::Longrunning::GetOperationRequest.new(name: name), grpc_options) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_operation_client_stub.call_rpc_count
end
end
def test_delete_operation
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
delete_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_operation, name
assert_kind_of ::Google::Longrunning::DeleteOperationRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_operation_client_stub do
# Create client
client = ::Google::Cloud::Memcache::V1::CloudMemcache::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_operation({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_operation name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_operation ::Google::Longrunning::DeleteOperationRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_operation({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_operation(::Google::Longrunning::DeleteOperationRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_operation_client_stub.call_rpc_count
end
end
def test_cancel_operation
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
cancel_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :cancel_operation, name
assert_kind_of ::Google::Longrunning::CancelOperationRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, cancel_operation_client_stub do
# Create client
client = ::Google::Cloud::Memcache::V1::CloudMemcache::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.cancel_operation({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.cancel_operation name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.cancel_operation ::Google::Longrunning::CancelOperationRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.cancel_operation({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.cancel_operation(::Google::Longrunning::CancelOperationRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, cancel_operation_client_stub.call_rpc_count
end
end
def test_wait_operation
# Create GRPC objects.
grpc_response = ::Google::Longrunning::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
timeout = {}
wait_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :wait_operation, name
assert_kind_of ::Google::Longrunning::WaitOperationRequest, request
assert_equal "hello world", request["name"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Duration), request["timeout"]
refute_nil options
end
Gapic::ServiceStub.stub :new, wait_operation_client_stub do
# Create client
client = ::Google::Cloud::Memcache::V1::CloudMemcache::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.wait_operation({ name: name, timeout: timeout }) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use named arguments
client.wait_operation name: name, timeout: timeout do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object
client.wait_operation ::Google::Longrunning::WaitOperationRequest.new(name: name, timeout: timeout) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use hash object with options
client.wait_operation({ name: name, timeout: timeout }, grpc_options) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.wait_operation(::Google::Longrunning::WaitOperationRequest.new(name: name, timeout: timeout), grpc_options) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, wait_operation_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Memcache::V1::CloudMemcache::Operations.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::Memcache::V1::CloudMemcache::Operations::Configuration, config
end
end
| 37.574803 | 191 | 0.715144 |
38a49818f430fdfba6fe5e00124a12e70a88e642
| 1,456 |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: github.com/TheThingsNetwork/api/protocol/lorawan/device_address.proto
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("github.com/TheThingsNetwork/api/protocol/lorawan/device_address.proto", :syntax => :proto3) do
add_message "lorawan.PrefixesRequest" do
end
add_message "lorawan.PrefixesResponse" do
repeated :prefixes, :message, 1, "lorawan.PrefixesResponse.PrefixMapping"
end
add_message "lorawan.PrefixesResponse.PrefixMapping" do
optional :prefix, :string, 1
repeated :usage, :string, 2
end
add_message "lorawan.DevAddrRequest" do
repeated :usage, :string, 1
end
add_message "lorawan.DevAddrResponse" do
optional :dev_addr, :bytes, 1
end
end
end
module Lorawan
PrefixesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("lorawan.PrefixesRequest").msgclass
PrefixesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("lorawan.PrefixesResponse").msgclass
PrefixesResponse::PrefixMapping = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("lorawan.PrefixesResponse.PrefixMapping").msgclass
DevAddrRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("lorawan.DevAddrRequest").msgclass
DevAddrResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("lorawan.DevAddrResponse").msgclass
end
| 44.121212 | 143 | 0.775412 |
089b7b7d20c3fca1f580ef8a597c974b9c96b90e
| 13,351 |
#
# --------------------------------------------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="verify_digital_options.rb">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# --------------------------------------------------------------------------------------------------------------------
#
require 'date'
module GroupDocsSignatureCloud
# Defines options to verify Digital signature within a document
class VerifyDigitalOptions
# Specifies the signature type of processing
attr_accessor :signature_type
# Gets or sets a document page number for processing. Value is optional
attr_accessor :page
# Process all document pages. Type of processing depends on SignatureType For Images Document Type it can be used only for multi-frames images like .tiff
attr_accessor :all_pages
# Options to specify pages for processing
attr_accessor :pages_setup
# Password of Digital Certificate if required
attr_accessor :password
# File Guid of Digital Certificate
attr_accessor :certificate_file_path
# Comments of Digital Signature to validate Suitable for Spreadsheet and Words Processing document types
attr_accessor :comments
# Date and time range of Digital Signature to validate. Null value will be ignored. Suitable for Spreadsheet and Words Processing document types
attr_accessor :sign_date_time_from
# Date and time range of Digital Signature to validate. Null value will be ignored Suitable for Spreadsheet and Words Processing document types
attr_accessor :sign_date_time_to
# Reason of Digital Signature to validate Suitable for Pdf document type
attr_accessor :reason
# Signature Contact to validate Suitable for Pdf document type
attr_accessor :contact
# Signature Location to validate Suitable for Pdf document type
attr_accessor :location
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'signature_type' => :'SignatureType',
:'page' => :'Page',
:'all_pages' => :'AllPages',
:'pages_setup' => :'PagesSetup',
:'password' => :'Password',
:'certificate_file_path' => :'CertificateFilePath',
:'comments' => :'Comments',
:'sign_date_time_from' => :'SignDateTimeFrom',
:'sign_date_time_to' => :'SignDateTimeTo',
:'reason' => :'Reason',
:'contact' => :'Contact',
:'location' => :'Location'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'signature_type' => :'String',
:'page' => :'Integer',
:'all_pages' => :'BOOLEAN',
:'pages_setup' => :'PagesSetup',
:'password' => :'String',
:'certificate_file_path' => :'String',
:'comments' => :'String',
:'sign_date_time_from' => :'DateTime',
:'sign_date_time_to' => :'DateTime',
:'reason' => :'String',
:'contact' => :'String',
:'location' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.key?(:'SignatureType')
self.signature_type = attributes[:'SignatureType']
end
if attributes.key?(:'Page')
self.page = attributes[:'Page']
end
if attributes.key?(:'AllPages')
self.all_pages = attributes[:'AllPages']
end
if attributes.key?(:'PagesSetup')
self.pages_setup = attributes[:'PagesSetup']
end
if attributes.key?(:'Password')
self.password = attributes[:'Password']
end
if attributes.key?(:'CertificateFilePath')
self.certificate_file_path = attributes[:'CertificateFilePath']
end
if attributes.key?(:'Comments')
self.comments = attributes[:'Comments']
end
if attributes.key?(:'SignDateTimeFrom')
self.sign_date_time_from = attributes[:'SignDateTimeFrom']
end
if attributes.key?(:'SignDateTimeTo')
self.sign_date_time_to = attributes[:'SignDateTimeTo']
end
if attributes.key?(:'Reason')
self.reason = attributes[:'Reason']
end
if attributes.key?(:'Contact')
self.contact = attributes[:'Contact']
end
if attributes.key?(:'Location')
self.location = attributes[:'Location']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = []
if @signature_type.nil?
invalid_properties.push("invalid value for 'signature_type', signature_type cannot be nil.")
end
if @all_pages.nil?
invalid_properties.push("invalid value for 'all_pages', all_pages cannot be nil.")
end
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @signature_type.nil?
signature_type_validator = EnumAttributeValidator.new('String', ["None", "Text", "Image", "Digital", "Barcode", "QRCode", "Stamp", "FormField", "Metadata"])
return false unless signature_type_validator.valid?(@signature_type)
return false if @all_pages.nil?
return true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] signature_type Object to be assigned
def signature_type=(signature_type)
validator = EnumAttributeValidator.new('String', ["None", "Text", "Image", "Digital", "Barcode", "QRCode", "Stamp", "FormField", "Metadata"])
if signature_type.to_i == 0
unless validator.valid?(signature_type)
raise ArgumentError, "invalid value for 'signature_type', must be one of #{validator.allowable_values}."
end
@signature_type = signature_type
else
@signature_type = validator.allowable_values[signature_type.to_i]
end
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(other)
return true if self.equal?(other)
self.class == other.class &&
signature_type == other.signature_type &&
page == other.page &&
all_pages == other.all_pages &&
pages_setup == other.pages_setup &&
password == other.password &&
certificate_file_path == other.certificate_file_path &&
comments == other.comments &&
sign_date_time_from == other.sign_date_time_from &&
sign_date_time_to == other.sign_date_time_to &&
reason == other.reason &&
contact == other.contact &&
location == other.location
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(other)
self == other
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[signature_type, page, all_pages, pages_setup, password, certificate_file_path, comments, sign_date_time_from, sign_date_time_to, reason, contact, location].hash
end
# Downcases first letter.
# @return downcased string
def uncap(str)
str[0, 1].downcase + str[1..-1]
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
pname = uncap(self.class.attribute_map[key]).intern
value = attributes[pname]
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if value.is_a?(Array)
self.send("#{key}=", value.map { |v| _deserialize($1, v) })
end
elsif !value.nil?
self.send("#{key}=", _deserialize(type, value))
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
Date.parse value
when :Date
Date.parse value
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else
# model
# Signature type fix
ttype = type
if value.is_a?(Hash) and !value[:signatureType].nil?
ttype = value[:signatureType] + 'Signature'
if value[:signatureType] == 'FormField' and !value[:type].nil?
type = value[:type]
if type == 'Checkbox'
ttype = 'CheckboxFormFieldSignature'
end
if type == 'Text'
ttype = 'TextFormFieldSignature'
end
if type == 'Combobox'
ttype = 'ComboboxFormFieldSignature'
end
if type == 'DigitalSignature'
ttype = 'DigitalFormFieldSignature'
end
if type == 'Radio'
ttype = 'RadioButtonFormFieldSignature'
end
end
end
temp_model = GroupDocsSignatureCloud.const_get(ttype).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 33.8 | 167 | 0.619429 |
79f903133813c8e826fed60cd0f8ab3dbf8e4be2
| 1,233 |
#
# Cookbook Name:: desktop
# Recipe:: default
#
# Copyright 2015, Your Name
#
packages = [
# Basic packages
"gawk",
"aptitude",
"build-essential",
"libgmp-dev",
"curl",
"dkms",
"git",
"htop",
"nfs-common",
"nfs-kernel-server",
"python-pip",
"sl",
"unzip",
"vim",
"zip",
# Media packages
"ubuntu-restricted-extras",
"vlc"
]
packages.each do |pack|
package pack
end
# Set clock at Ubuntu from UTC to Local Time
execute "Set Ubuntu clock to UTC" do
command "timedatectl set-local-rtc 1"
action :run
end
# AWS configuration
directory "#{ENV['HOME']}/.aws" do
owner "#{node["user"]}"
group "#{node["group"]}"
mode '0775'
action :create
end
template "#{ENV['HOME']}/.aws/config" do
source "config.erb"
owner "#{node["user"]}"
group "#{node["group"]}"
mode '0775'
end
template "#{ENV['HOME']}/.aws/credentials" do
source "credentials.erb"
owner "#{node["user"]}"
group "#{node["group"]}"
mode '0775'
end
# Git configuration
template "#{ENV['HOME']}/.gitconfig" do
source "gitconfig.erb"
owner "#{node["user"]}"
group "#{node["group"]}"
mode '0775'
end
execute "Install AWS-Cli" do
user "#{node["user"]}"
command "pip install awscli"
action :run
end
| 16.44 | 45 | 0.621249 |
876270632b6790ac29c9e910597ca965c64212c4
| 4,891 |
module Embulk
module Input
class InputNicoComment < InputPlugin
Plugin.register_input('nicocomment', self)
def self.transaction(config, &control)
threads = 1
id = config.param('id', :string)
password = config.param('password', :string)
term = config.param('term', :string, default: 'hourly')
target = config.param('target', :string, default: 'fav')
category = config.param('category', :string, default: 'all')
task = {'id' => id, 'password' => password, 'term' => term, 'target' => target, 'category' => category}
columns = [
Column.new(0, 'smid', :string),
Column.new(1, 'comment', :string),
Column.new(2, 'date', :timestamp)
]
puts "File information generation started."
commit_reports = yield(task, columns, threads)
puts "File information input finished."
return {}
end
def initialize(task, schema, index, page_builder)
super
end
def run
require 'net/https'
require 'rexml/document'
require 'json'
id = @task['id']
password = @task['password']
term = @task['term']
target = @task['target']
category = @task['category']
cookie = login_nicovideo(id, password)
smids = get_ranking(cookie, term, target, category)
thread_info = get_flv_info(cookie, smids)
com_info = get_comments(cookie, thread_info)
com_info.each { |com|
begin
if com['id'] != nil
@page_builder.add([com['id'], com['content'], com['date']])
end
rescue
end
}
@page_builder.finish
commit_report = {
}
return commit_report
end
def login_nicovideo(mail, pass)
host = 'secure.nicovideo.jp'
path = '/secure/login?site=niconico'
body = "mail=#{mail}&password=#{pass}"
https = Net::HTTP.new(host, 443)
https.use_ssl = true
https.verify_mode = OpenSSL::SSL::VERIFY_NONE
response = https.start { |https|
https.post(path, body)
}
cookie = ''
response['set-cookie'].split('; ').each do |st|
if idx = st.index('user_session_')
cookie = "user_session=#{st[idx..-1]}"
break
end
end
return cookie
end
def get_response(host ,path, watitime, cookie)
sleep(watitime)
response = Net::HTTP.new(host).start { |http|
request = Net::HTTP::Get.new(path)
request['cookie'] = cookie
http.request(request)
}
return response
end
def http_response(host, path, cookie)
response = get_response(host, path ,3, cookie)
if response.body.include?("error")
response = get_response(host, path ,30, cookie)
end
return response
end
def get_ranking(cookie, term, target, category)
host = 'www.nicovideo.jp'
path = "/ranking/#{target}/#{term}/#{category}?rss=2.0"
response = http_response(host, path, cookie)
doc = REXML::Document.new response.body
smids = []
doc.elements.each('/rss/channel/item/link'){|e| smids << e.text.split('/').last}
return smids
end
def get_flv_info(cookie, smids)
host = 'flapi.nicovideo.jp'
ret = []
smids.each { |sm|
path = "/api/getflv/#{sm}"
response = http_response(host, path, cookie)
flv_info = {}
flv_info[:sm] = sm
response.body.split('&').each do |st|
stt = st.split('=')
if stt[0] == 'thread_id'
flv_info[:thread_id] = stt[1]
end
if stt[0] == 'ms'
flv_info[:ms] = stt[1].split('%2F')[3]
end
end
ret << flv_info
}
return ret
end
def get_comments(cookie, thread_info)
host = 'msg.nicovideo.jp'
ret = []
thread_info.each { |ar|
thread_val = ar[:thread_id]
ms = ar[:ms]
next if ms == nil
path = "/#{ms}/api.json/thread?version=20090904&thread=#{thread_val}&res_from=-1000"
begin
response = http_response(host, path, cookie)
JSON.load(response.body).each { |js|
comjs = js['chat']
if comjs != nil
com_info = {}
com_info['id'] = ar[:sm]
com_info['content'] = comjs['content']
com_info['date'] = Time.at(comjs['date'])
ret << com_info
end
}
rescue
end
}
return ret
end
end
end
end
| 26.15508 | 111 | 0.510734 |
ff95d30dde49423df455675da7a254789a07b5a5
| 8,536 |
require 'puppet/node'
require 'puppet/resource/catalog'
require 'puppet/indirector/catalog/compiler'
class Puppet::Resource::Catalog::StaticCompiler < Puppet::Resource::Catalog::Compiler
desc %q{Compiles catalogs on demand using the optional static compiler. This
functions similarly to the normal compiler, but it replaces puppet:/// file
URLs with explicit metadata and file content hashes, expecting puppet agent
to fetch the exact specified content from the filebucket. This guarantees
that a given catalog will always result in the same file states. It also
decreases catalog application time and fileserver load, at the cost of
increased compilation time.
This terminus works today, but cannot be used without additional
configuration. Specifically:
* You must create a special filebucket resource --- with the title `puppet`
and the `path` attribute set to `false` --- in site.pp or somewhere else
where it will be added to every node's catalog. Using `puppet` as the title
is mandatory; the static compiler treats this title as magical.
filebucket { puppet:
path => false,
}
* You must set `catalog_terminus = static_compiler` in the puppet
master's puppet.conf.
* The puppet master's auth.conf must allow authenticated nodes to access the
`file_bucket_file` endpoint. This is enabled by default (see the
`path /file` rule), but if you have made your auth.conf more restrictive,
you may need to re-enable it.)
* If you are using multiple puppet masters, you must configure load balancer
affinity for agent nodes. This is because puppet masters other than the one
that compiled a given catalog may not have stored the required file contents
in their filebuckets.}
def find(request)
return nil unless catalog = super
raise "Did not get catalog back" unless catalog.is_a?(model)
catalog.resources.find_all { |res| res.type == "File" }.each do |resource|
next unless source = resource[:source]
next unless source =~ /^puppet:/
file = resource.to_ral
if file.recurse?
add_children(request.key, catalog, resource, file)
else
find_and_replace_metadata(request.key, resource, file)
end
end
catalog
end
# Take a resource with a fileserver based file source remove the source
# parameter, and insert the file metadata into the resource.
#
# This method acts to do the fileserver metadata retrieval in advance, while
# the file source is local and doesn't require an HTTP request. It retrieves
# the file metadata for a given file resource, removes the source parameter
# from the resource, inserts the metadata into the file resource, and uploads
# the file contents of the source to the file bucket.
#
# @param host [String] The host name of the node requesting this catalog
# @param resource [Puppet::Resource] The resource to replace the metadata in
# @param file [Puppet::Type::File] The file RAL associated with the resource
def find_and_replace_metadata(host, resource, file)
# We remove URL info from it, so it forces a local copy
# rather than routing through the network.
# Weird, but true.
newsource = file[:source][0].sub("puppet:///", "")
file[:source][0] = newsource
raise "Could not get metadata for #{resource[:source]}" unless metadata = file.parameter(:source).metadata
replace_metadata(host, resource, metadata)
end
# Rewrite a given file resource with the metadata from a fileserver based file
#
# This performs the actual metadata rewrite for the given file resource and
# uploads the content of the source file to the filebucket.
#
# @param host [String] The host name of the node requesting this catalog
# @param resource [Puppet::Resource] The resource to add the metadata to
# @param metadata [Puppet::FileServing::Metadata] The metadata of the given fileserver based file
def replace_metadata(host, resource, metadata)
[:mode, :owner, :group].each do |param|
resource[param] ||= metadata.send(param)
end
resource[:ensure] = metadata.ftype
if metadata.ftype == "file"
unless resource[:content]
resource[:content] = metadata.checksum
resource[:checksum] = metadata.checksum_type
end
end
store_content(resource) if resource[:ensure] == "file"
old_source = resource.delete(:source)
Puppet.info "Metadata for #{resource} in catalog for '#{host}' added from '#{old_source}'"
end
# Generate children resources for a recursive file and add them to the catalog.
#
# @param host [String] The host name of the node requesting this catalog
# @param catalog [Puppet::Resource::Catalog]
# @param resource [Puppet::Resource]
# @param file [Puppet::Type::File] The file RAL associated with the resource
def add_children(host, catalog, resource, file)
file = resource.to_ral
children = get_child_resources(host, catalog, resource, file)
remove_existing_resources(children, catalog)
children.each do |name, res|
catalog.add_resource res
catalog.add_edge(resource, res)
end
end
# Given a recursive file resource, recursively generate its children resources
#
# @param host [String] The host name of the node requesting this catalog
# @param catalog [Puppet::Resource::Catalog]
# @param resource [Puppet::Resource]
# @param file [Puppet::Type::File] The file RAL associated with the resource
#
# @return [Array<Puppet::Resource>] The recursively generated File resources for the given resource
def get_child_resources(host, catalog, resource, file)
sourceselect = file[:sourceselect]
children = {}
source = resource[:source]
# This is largely a copy of recurse_remote in File
total = file[:source].collect do |src|
next unless result = file.perform_recursion(src)
return if top = result.find { |r| r.relative_path == "." } and top.ftype != "directory"
result.each { |data| data.source = "#{src}/#{data.relative_path}" }
break result if result and ! result.empty? and sourceselect == :first
result
end.flatten.compact
# This only happens if we have sourceselect == :all
unless sourceselect == :first
found = []
total.reject! do |data|
result = found.include?(data.relative_path)
found << data.relative_path unless found.include?(data.relative_path)
result
end
end
total.each do |meta|
# This is the top-level parent directory
if meta.relative_path == "."
replace_metadata(host, resource, meta)
next
end
children[meta.relative_path] ||= Puppet::Resource.new(:file, File.join(file[:path], meta.relative_path))
# I think this is safe since it's a URL, not an actual file
children[meta.relative_path][:source] = source + "/" + meta.relative_path
resource.each do |param, value|
# These should never be passed to our children.
unless [:parent, :ensure, :recurse, :recurselimit, :target, :alias, :source].include? param
children[meta.relative_path][param] = value
end
end
replace_metadata(host, children[meta.relative_path], meta)
end
children
end
# Remove any file resources in the catalog that will be duplicated by the
# given file resources.
#
# @param children [Array<Puppet::Resource>]
# @param catalog [Puppet::Resource::Catalog]
def remove_existing_resources(children, catalog)
existing_names = catalog.resources.collect { |r| r.to_s }
both = (existing_names & children.keys).inject({}) { |hash, name| hash[name] = true; hash }
both.each { |name| children.delete(name) }
end
# Retrieve the source of a file resource using a fileserver based source and
# upload it to the filebucket.
#
# @param resource [Puppet::Resource]
def store_content(resource)
@summer ||= Puppet::Util::Checksums
type = @summer.sumtype(resource[:content])
sum = @summer.sumdata(resource[:content])
if Puppet::FileBucket::File.indirection.find("#{type}/#{sum}")
Puppet.info "Content for '#{resource[:source]}' already exists"
else
Puppet.info "Storing content for source '#{resource[:source]}'"
content = Puppet::FileServing::Content.indirection.find(resource[:source])
file = Puppet::FileBucket::File.new(content.content)
Puppet::FileBucket::File.indirection.save(file)
end
end
end
| 39.88785 | 110 | 0.698922 |
1dde55a020745ab4ef39e029dc60d31dfe78e4d9
| 2,173 |
require 'listen'
require 'cli/ui'
require 'tagrity/pid_file'
require 'tagrity/helper'
require 'tagrity/provider'
require 'tagrity/tlogger'
module Tagrity
module Command
class Start
class ErrorProcessAlreadyRunning < StandardError; end
class << self
def call(fg, fresh)
dir = Dir.pwd
assert_not_running(dir)
Process.daemon({nochdir: true}) unless fg
logger.fg = fg
tag_generator = Provider.provide(:tag_generator)
PidFile.write(PidFile.new(dir, Process.pid))
logger.info("Watching #{dir} with process pid #{Process.pid}")
if fresh
logger.info("Generating tags fresh for #{dir}")
tag_generator.generate_all
end
listener = Listen.to(
dir,
relative: true,
) do |modified, added, removed|
unless modified.empty?
logger.info("modified absolute path: #{modified}")
tag_generator.generate(modified, true)
end
unless added.empty?
logger.info("added absolute path: #{added}")
tag_generator.generate(added, true)
end
unless removed.empty?
logger.info("removed absolute path: #{removed}")
tag_generator.delete_files_tags(removed)
end
end
listener.start
sleep
rescue ErrorProcessAlreadyRunning => e
puts ::CLI::UI.fmt "{{red:#{e.message}}}"
logger.error(e.message)
rescue Interrupt => e
logger.info("Process interrupted. Killing #{Process.pid}")
PidFile.delete(dir)
end
private
def assert_not_running(dir)
running_processes = PidFile.alive_pid_files(dir: dir)
unless running_processes.empty?
pids = running_processes.map { |pid_file| pid_file.pid }
raise ErrorProcessAlreadyRunning, "Error: tagrity is already watching #{dir} with process #{pids}"
end
end
def logger
@logger ||= Provider.provide(:tlogger)
end
end
end
end
end
| 28.973333 | 110 | 0.579383 |
384cf21af7184ddb9ee9915d9579033b9b13222b
| 932 |
Pod::Spec.new do |s|
s.name = "minikitSwift"
s.version = "1.0.7"
s.swift_version ="4.0"
s.summary = "Swift 常用的一些工具或base类 封装成,组件化, miniKit."
s.description = <<-DESC
Swift 常用的一些工具或base类 封装成,组件化,方便新项目直接用,不用拖来拖去, miniKit.
DESC
s.homepage = "https://github.com/dongshangtong/swift-minikit"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "dongshangtong" => "[email protected]" }
s.platform = :ios, "10.0"
s.ios.deployment_target = "10.0"
s.source = { :git => "https://github.com/dongshangtong/swift-minikit.git", :tag => "#{s.version}" }
s.source_files = "minikit", "/**/*"
# s.subspec 'MNBase' do |sb|
# sb.source_files = 'minikit/MNBase/*.swift'
#end
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
s.requires_arc = true
# s.dependency "JSONKit", "~> 1.4"
end
| 27.411765 | 107 | 0.567597 |
26279daa0ca896202c1cf5a21bbf563285deb687
| 16,101 |
module Sass::Script::Value
# A SassScript object representing a number.
# SassScript numbers can have decimal values,
# and can also have units.
# For example, `12`, `1px`, and `10.45em`
# are all valid values.
#
# Numbers can also have more complex units, such as `1px*em/in`.
# These cannot be inputted directly in Sass code at the moment.
class Number < Base
# The Ruby value of the number.
#
# @return [Numeric]
attr_reader :value
# A list of units in the numerator of the number.
# For example, `1px*em/in*cm` would return `["px", "em"]`
# @return [Array<String>]
attr_reader :numerator_units
# A list of units in the denominator of the number.
# For example, `1px*em/in*cm` would return `["in", "cm"]`
# @return [Array<String>]
attr_reader :denominator_units
# The original representation of this number.
# For example, although the result of `1px/2px` is `0.5`,
# the value of `#original` is `"1px/2px"`.
#
# This is only non-nil when the original value should be used as the CSS value,
# as in `font: 1px/2px`.
#
# @return [Boolean, nil]
attr_accessor :original
def self.precision
@precision ||= 5
end
# Sets the number of digits of precision
# For example, if this is `3`,
# `3.1415926` will be printed as `3.142`.
def self.precision=(digits)
@precision = digits.round
@precision_factor = 10.0**@precision
end
# the precision factor used in numeric output
# it is derived from the `precision` method.
def self.precision_factor
@precision_factor ||= 10.0**precision
end
# Used so we don't allocate two new arrays for each new number.
NO_UNITS = []
# @param value [Numeric] The value of the number
# @param numerator_units [::String, Array<::String>] See \{#numerator\_units}
# @param denominator_units [::String, Array<::String>] See \{#denominator\_units}
def initialize(value, numerator_units = NO_UNITS, denominator_units = NO_UNITS)
numerator_units = [numerator_units] if numerator_units.is_a?(::String)
denominator_units = [denominator_units] if denominator_units.is_a?(::String)
super(value)
@numerator_units = numerator_units
@denominator_units = denominator_units
normalize!
end
# The SassScript `+` operation.
# Its functionality depends on the type of its argument:
#
# {Number}
# : Adds the two numbers together, converting units if possible.
#
# {Color}
# : Adds this number to each of the RGB color channels.
#
# {Value}
# : See {Value::Base#plus}.
#
# @param other [Value] The right-hand side of the operator
# @return [Value] The result of the operation
# @raise [Sass::UnitConversionError] if `other` is a number with incompatible units
def plus(other)
if other.is_a? Number
operate(other, :+)
elsif other.is_a?(Color)
other.plus(self)
else
super
end
end
# The SassScript binary `-` operation (e.g. `$a - $b`).
# Its functionality depends on the type of its argument:
#
# {Number}
# : Subtracts this number from the other, converting units if possible.
#
# {Value}
# : See {Value::Base#minus}.
#
# @param other [Value] The right-hand side of the operator
# @return [Value] The result of the operation
# @raise [Sass::UnitConversionError] if `other` is a number with incompatible units
def minus(other)
if other.is_a? Number
operate(other, :-)
else
super
end
end
# The SassScript unary `+` operation (e.g. `+$a`).
#
# @return [Number] The value of this number
def unary_plus
self
end
# The SassScript unary `-` operation (e.g. `-$a`).
#
# @return [Number] The negative value of this number
def unary_minus
Number.new(-value, @numerator_units, @denominator_units)
end
# The SassScript `*` operation.
# Its functionality depends on the type of its argument:
#
# {Number}
# : Multiplies the two numbers together, converting units appropriately.
#
# {Color}
# : Multiplies each of the RGB color channels by this number.
#
# @param other [Number, Color] The right-hand side of the operator
# @return [Number, Color] The result of the operation
# @raise [NoMethodError] if `other` is an invalid type
def times(other)
if other.is_a? Number
operate(other, :*)
elsif other.is_a? Color
other.times(self)
else
raise NoMethodError.new(nil, :times)
end
end
# The SassScript `/` operation.
# Its functionality depends on the type of its argument:
#
# {Number}
# : Divides this number by the other, converting units appropriately.
#
# {Value}
# : See {Value::Base#div}.
#
# @param other [Value] The right-hand side of the operator
# @return [Value] The result of the operation
def div(other)
if other.is_a? Number
res = operate(other, :/)
if original && other.original
res.original = "#{original}/#{other.original}"
end
res
else
super
end
end
# The SassScript `%` operation.
#
# @param other [Number] The right-hand side of the operator
# @return [Number] This number modulo the other
# @raise [NoMethodError] if `other` is an invalid type
# @raise [Sass::UnitConversionError] if `other` has incompatible units
def mod(other)
if other.is_a?(Number)
operate(other, :%)
else
raise NoMethodError.new(nil, :mod)
end
end
# The SassScript `==` operation.
#
# @param other [Value] The right-hand side of the operator
# @return [Boolean] Whether this number is equal to the other object
def eq(other)
return Bool::FALSE unless other.is_a?(Sass::Script::Value::Number)
this = self
begin
if unitless?
this = this.coerce(other.numerator_units, other.denominator_units)
else
other = other.coerce(@numerator_units, @denominator_units)
end
rescue Sass::UnitConversionError
return Bool::FALSE
end
Bool.new(this.value == other.value)
end
def hash
[value, numerator_units, denominator_units].hash
end
# Hash-equality works differently than `==` equality for numbers.
# Hash-equality must be transitive, so it just compares the exact value,
# numerator units, and denominator units.
def eql?(other)
value == other.value && numerator_units == other.numerator_units &&
denominator_units == other.denominator_units
end
# The SassScript `>` operation.
#
# @param other [Number] The right-hand side of the operator
# @return [Boolean] Whether this number is greater than the other
# @raise [NoMethodError] if `other` is an invalid type
def gt(other)
raise NoMethodError.new(nil, :gt) unless other.is_a?(Number)
operate(other, :>)
end
# The SassScript `>=` operation.
#
# @param other [Number] The right-hand side of the operator
# @return [Boolean] Whether this number is greater than or equal to the other
# @raise [NoMethodError] if `other` is an invalid type
def gte(other)
raise NoMethodError.new(nil, :gte) unless other.is_a?(Number)
operate(other, :>=)
end
# The SassScript `<` operation.
#
# @param other [Number] The right-hand side of the operator
# @return [Boolean] Whether this number is less than the other
# @raise [NoMethodError] if `other` is an invalid type
def lt(other)
raise NoMethodError.new(nil, :lt) unless other.is_a?(Number)
operate(other, :<)
end
# The SassScript `<=` operation.
#
# @param other [Number] The right-hand side of the operator
# @return [Boolean] Whether this number is less than or equal to the other
# @raise [NoMethodError] if `other` is an invalid type
def lte(other)
raise NoMethodError.new(nil, :lte) unless other.is_a?(Number)
operate(other, :<=)
end
# @return [String] The CSS representation of this number
# @raise [Sass::SyntaxError] if this number has units that can't be used in CSS
# (e.g. `px*in`)
def to_s(opts = {})
return original if original
raise Sass::SyntaxError.new("#{inspect} isn't a valid CSS value.") unless legal_units?
inspect
end
# Returns a readable representation of this number.
#
# This representation is valid CSS (and valid SassScript)
# as long as there is only one unit.
#
# @return [String] The representation
def inspect(opts = {})
return original if original
value = self.class.round(self.value)
str = value.to_s
# Ruby will occasionally print in scientific notation if the number is
# small enough. That's technically valid CSS, but it's not well-supported
# and confusing.
str = ("%0.#{self.class.precision}f" % value).gsub(/0*$/, '') if str.include?('e')
unitless? ? str : "#{str}#{unit_str}"
end
alias_method :to_sass, :inspect
# @return [Fixnum] The integer value of the number
# @raise [Sass::SyntaxError] if the number isn't an integer
def to_i
super unless int?
value
end
# @return [Boolean] Whether or not this number is an integer.
def int?
value % 1 == 0.0
end
# @return [Boolean] Whether or not this number has no units.
def unitless?
@numerator_units.empty? && @denominator_units.empty?
end
# Checks whether the number has the numerator unit specified.
#
# @example
# number = Sass::Script::Value::Number.new(10, "px")
# number.is_unit?("px") => true
# number.is_unit?(nil) => false
#
# @param unit [::String, nil] The unit the number should have or nil if the number
# should be unitless.
# @see Number#unitless? The unitless? method may be more readable.
def is_unit?(unit)
if unit
denominator_units.size == 0 && numerator_units.size == 1 && numerator_units.first == unit
else
unitless?
end
end
# @return [Boolean] Whether or not this number has units that can be represented in CSS
# (that is, zero or one \{#numerator\_units}).
def legal_units?
(@numerator_units.empty? || @numerator_units.size == 1) && @denominator_units.empty?
end
# Returns this number converted to other units.
# The conversion takes into account the relationship between e.g. mm and cm,
# as well as between e.g. in and cm.
#
# If this number has no units, it will simply return itself
# with the given units.
#
# An incompatible coercion, e.g. between px and cm, will raise an error.
#
# @param num_units [Array<String>] The numerator units to coerce this number into.
# See {\#numerator\_units}
# @param den_units [Array<String>] The denominator units to coerce this number into.
# See {\#denominator\_units}
# @return [Number] The number with the new units
# @raise [Sass::UnitConversionError] if the given units are incompatible with the number's
# current units
def coerce(num_units, den_units)
Number.new(if unitless?
value
else
value * coercion_factor(@numerator_units, num_units) /
coercion_factor(@denominator_units, den_units)
end, num_units, den_units)
end
# @param other [Number] A number to decide if it can be compared with this number.
# @return [Boolean] Whether or not this number can be compared with the other.
def comparable_to?(other)
operate(other, :+)
true
rescue Sass::UnitConversionError
false
end
# Returns a human readable representation of the units in this number.
# For complex units this takes the form of:
# numerator_unit1 * numerator_unit2 / denominator_unit1 * denominator_unit2
# @return [String] a string that represents the units in this number
def unit_str
rv = @numerator_units.sort.join("*")
if @denominator_units.any?
rv << "/"
rv << @denominator_units.sort.join("*")
end
rv
end
private
# @private
def self.round(num)
if num.is_a?(Float) && (num.infinite? || num.nan?)
num
elsif num % 1 == 0.0
num.to_i
else
((num * precision_factor).round / precision_factor).to_f
end
end
OPERATIONS = [:+, :-, :<=, :<, :>, :>=, :%]
def operate(other, operation)
this = self
if OPERATIONS.include?(operation)
if unitless?
this = this.coerce(other.numerator_units, other.denominator_units)
else
other = other.coerce(@numerator_units, @denominator_units)
end
end
# avoid integer division
value = :/ == operation ? this.value.to_f : this.value
result = value.send(operation, other.value)
if result.is_a?(Numeric)
Number.new(result, *compute_units(this, other, operation))
else # Boolean op
Bool.new(result)
end
end
def coercion_factor(from_units, to_units)
# get a list of unmatched units
from_units, to_units = sans_common_units(from_units, to_units)
if from_units.size != to_units.size || !convertable?(from_units | to_units)
raise Sass::UnitConversionError.new(
"Incompatible units: '#{from_units.join('*')}' and '#{to_units.join('*')}'.")
end
from_units.zip(to_units).inject(1) {|m, p| m * conversion_factor(p[0], p[1])}
end
def compute_units(this, other, operation)
case operation
when :*
[this.numerator_units + other.numerator_units,
this.denominator_units + other.denominator_units]
when :/
[this.numerator_units + other.denominator_units,
this.denominator_units + other.numerator_units]
else
[this.numerator_units, this.denominator_units]
end
end
def normalize!
return if unitless?
@numerator_units, @denominator_units =
sans_common_units(@numerator_units, @denominator_units)
@denominator_units.each_with_index do |d, i|
if convertable?(d) && (u = @numerator_units.find(&method(:convertable?)))
@value /= conversion_factor(d, u)
@denominator_units.delete_at(i)
@numerator_units.delete_at(@numerator_units.index(u))
end
end
end
# A hash of unit names to their index in the conversion table
CONVERTABLE_UNITS = %w(in cm pc mm pt px).inject({}) {|m, v| m[v] = m.size; m}
# in cm pc mm pt px
CONVERSION_TABLE = [[1, 2.54, 6, 25.4, 72 , 96], # in
[nil, 1, 2.36220473, 10, 28.3464567, 37.795275591], # cm
[nil, nil, 1, 4.23333333, 12 , 16], # pc
[nil, nil, nil, 1, 2.83464567, 3.7795275591], # mm
[nil, nil, nil, nil, 1 , 1.3333333333], # pt
[nil, nil, nil, nil, nil , 1]] # px
def conversion_factor(from_unit, to_unit)
res = CONVERSION_TABLE[CONVERTABLE_UNITS[from_unit]][CONVERTABLE_UNITS[to_unit]]
return 1.0 / conversion_factor(to_unit, from_unit) if res.nil?
res
end
def convertable?(units)
Array(units).all? {|u| CONVERTABLE_UNITS.include?(u)}
end
def sans_common_units(units1, units2)
units2 = units2.dup
# Can't just use -, because we want px*px to coerce properly to px*mm
units1 = units1.map do |u|
j = units2.index(u)
next u unless j
units2.delete_at(j)
nil
end
units1.compact!
return units1, units2
end
end
end
| 33.197938 | 97 | 0.618781 |
33e11e5e80c30d5c504845a6fcecd60c853076e7
| 1,152 |
# == Schema Information
#
# Table name: channel_email
#
# id :bigint not null, primary key
# email :string not null
# forward_to_address :string not null
# created_at :datetime not null
# updated_at :datetime not null
# account_id :integer not null
#
# Indexes
#
# index_channel_email_on_email (email) UNIQUE
# index_channel_email_on_forward_to_address (forward_to_address) UNIQUE
#
class Channel::Email < ApplicationRecord
self.table_name = 'channel_email'
validates :account_id, presence: true
belongs_to :account
validates :email, uniqueness: true
validates :forward_to_address, uniqueness: true
has_one :inbox, as: :channel, dependent: :destroy
before_validation :ensure_forward_to_address, on: :create
def name
'Email'
end
def has_24_hour_messaging_window?
false
end
private
def ensure_forward_to_address
email_domain = InstallationConfig.find_by(name: 'MAILER_INBOUND_EMAIL_DOMAIN')&.value
self.forward_to_address ||= "#{SecureRandom.hex}@#{email_domain}"
end
end
| 26.181818 | 89 | 0.677083 |
1c34f5654e832d75aeda1644feaba956e5aebe2a
| 472 |
require 'cg_foundation_client/version'
require 'cg_foundation_client/client'
module CgFoundationClient
class Error < StandardError; end
class << self
attr_accessor :configuration
end
def self.client
@client = CgFoundationClient::Client.new(configuration)
end
def self.configuration
@configuration ||= Configuration.new
end
def self.reset
@configuration = Configuration.new
end
def self.configure
yield(configuration)
end
end
| 18.153846 | 59 | 0.747881 |
1122d41c00b3cf85af9ec778e04fac88d8c8c310
| 1,571 |
class ActivityLogObserver < ActiveRecord::Observer
observe :page, :user, :admin
def after_create(record)
log_activity record, 'created', :created_by
end
def after_update(record)
log_activity record, 'updated', :last_updated_by
end
def after_destroy(record)
log_activity record, 'deleted', :deleted_by
end
private
def infer_actor(record, actor_method)
if record.respond_to?(actor_method)
# actor e.g. page.created_by --> admin
actor_obj = record.send(actor_method)
if actor_obj
# check if the actor has a display_name
if actor_obj.respond_to?(:display_name)
display_name = actor_obj.send(:display_name)
else
display_name = actor_obj.class.name
end
# create a hash for the actor
actor = { type: actor_obj.class.name, id: actor_obj.id, display_name: display_name }
else
actor = {}
end
else
actor = {}
end
end
def log_activity(record, action, actor_method)
actor = infer_actor(record, actor_method)
ActivityFeed.create(item_type: record.class.name, item_id: record.id,
item: record_identifier(record), actor: actor[:display_name],
actor_type: actor[:type], actor_id: actor[:id], activity: action)
end
def record_identifier(record)
if record.respond_to?(:identifier)
record.send(:identifier)
elsif record.respond_to?(:display_name)
record.send(:display_name)
else
record.class.name << '#' << record.id
end
end
end
| 26.627119 | 92 | 0.655633 |
628b969aa880f8802166c9c34df65b1508586f1f
| 984 |
# encoding: UTF-8
require 'foreman_api'
module ForemanProvision
class Environment < BaseResource
# @param [Hash] params
# @return [Hash]
def create(params)
@_params = {
}
super
end
# @param [Hash] params
# @return [Hash]
def destroy(params)
@_params = {
}
super
end
# @param [Hash] params
# @return [Hash]
def index(params)
@_params = {
}
super
end
# @param [Hash] params
# @return [Hash]
def show(params)
@_params = {
}
super
end
# @param [Hash] params
# @return [Hash]
def update(params)
@_params = {
}
super
end
# Internal stuff
# @param [Object] credentials
# @param [Object] logger
def initialize(credentials, logger)
super
@resource = ForemanApi::Resources::Environment.new(
@credentials,
@credentials[:options],
)
end
end
end
| 13.859155 | 57 | 0.527439 |
269901ea56ece05e6c1c92e5c3ccb26ad9346a48
| 4,392 |
# frozen_string_literal: true
require 'spec_helper'
require_relative '../../../../lib/rubocop/cop/sorbet/sigils/enforce_sigil_order'
RSpec.describe(RuboCop::Cop::Sorbet::EnforceSigilOrder, :config) do
subject(:cop) { described_class.new(config) }
it('makes no offense on empty files') do
expect_no_offenses(<<~RUBY)
RUBY
end
it('makes no offense with no magic comments') do
expect_no_offenses(<<~RUBY)
class Foo; end
RUBY
end
it('makes no offense with random magic comments') do
expect_no_offenses(<<~RUBY)
# foo: 1
# bar: true
# baz: "Hello, World"
class Foo; end
RUBY
end
it('makes no offense with only one magic comment') do
expect_no_offenses(<<~RUBY)
# typed: true
class Foo; end
RUBY
end
it('makes no offense when the magic comments are correctly ordered') do
expect_no_offenses(<<~RUBY)
# encoding: utf-8
# coding: utf-8
# typed: true
# warn_indent: true
# frozen_string_literal: true
class Foo; end
RUBY
end
it('makes no offense when the magic comments are correctly ordered with random comments in the middle') do
expect_no_offenses(<<~RUBY)
# coding: utf-8
# typed: true
# foo: 1
# bar: true
# frozen_string_literal: true
# baz: "Hello, World"
class Foo; end
RUBY
end
it('makes offense when two magic comments are not correctly ordered') do
expect_offense(<<~RUBY)
# frozen_string_literal: true
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Magic comments should be in the following order: encoding, typed, warn_indent, frozen_string_literal.
# typed: true
^^^^^^^^^^^^^ Magic comments should be in the following order: encoding, typed, warn_indent, frozen_string_literal.
class Foo; end
RUBY
end
it('makes offense when all magic comments are not correctly ordered') do
expect_offense(<<~RUBY)
# encoding: utf-8
^^^^^^^^^^^^^^^^^ Magic comments should be in the following order: encoding, typed, warn_indent, frozen_string_literal.
# frozen_string_literal: true
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Magic comments should be in the following order: encoding, typed, warn_indent, frozen_string_literal.
# warn_indent: true
^^^^^^^^^^^^^^^^^^^ Magic comments should be in the following order: encoding, typed, warn_indent, frozen_string_literal.
# typed: true
^^^^^^^^^^^^^ Magic comments should be in the following order: encoding, typed, warn_indent, frozen_string_literal.
# coding: utf-8
^^^^^^^^^^^^^^^ Magic comments should be in the following order: encoding, typed, warn_indent, frozen_string_literal.
class Foo; end
RUBY
end
describe('autocorrect') do
it('autocorrects two magic comments in the correct order') do
source = <<~RUBY
# frozen_string_literal: true
# typed: true
class Foo; end
RUBY
expect(autocorrect_source(source))
.to(eq(<<~RUBY))
# typed: true
# frozen_string_literal: true
class Foo; end
RUBY
end
it('autocorrects all magic comments in the correct order') do
source = <<~RUBY
# encoding: utf-8
# frozen_string_literal: true
# warn_indent: true
# typed: true
# coding: utf-8
class Foo; end
RUBY
expect(autocorrect_source(source))
.to(eq(<<~RUBY))
# encoding: utf-8
# coding: utf-8
# typed: true
# warn_indent: true
# frozen_string_literal: true
class Foo; end
RUBY
end
it('autocorrects all magic comments in the correct order even with random comments in the middle') do
source = <<~RUBY
# encoding: utf-8
# foo
# frozen_string_literal: true
# bar: true
# warn_indent: true
# baz: "Hello"
# typed: true
# coding: utf-8
# another foo
class Foo; end
RUBY
expect(autocorrect_source(source))
.to(eq(<<~RUBY))
# encoding: utf-8
# foo
# coding: utf-8
# bar: true
# typed: true
# baz: "Hello"
# warn_indent: true
# frozen_string_literal: true
# another foo
class Foo; end
RUBY
end
end
end
| 29.28 | 137 | 0.598816 |
33e8ab34a5c2cbda67b91b933c3b9cd8534505a6
| 104 |
class DropSuggestions < ActiveRecord::Migration[5.2]
def change
drop_table :suggestions
end
end
| 17.333333 | 52 | 0.759615 |
264a99087cfc503a3b3413e8e376b2852bdcc229
| 311 |
class Category < ActiveRecord::Base
has_many :forums, :order => 'forums.position', :dependent => :destroy
validates_presence_of :name, :position
validates_uniqueness_of :name, :case_sensitive => false
def to_s
name
end
def to_param
"#{id}-#{name.gsub(/\W/,'-')}"
end
end
| 19.4375 | 71 | 0.639871 |
bf12a64a438fc6fdc66e6e525e16dbe92b35e7a5
| 9,162 |
#!/usr/bin/env ruby
#
# sockhole: a SOCKS5 decrypting proxy
# Copyright (c) 2020 joshua stein <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
require "eventmachine"
require "socket"
require "logger"
require "ipaddr"
require "resolv"
require "openssl"
# a connection to these ports will make a TLS connection and decrypt data
# before handing it back to the client
TLS_PORTS = [
443, # https
993, # imaps
995, # pop3s
]
# by default, listen on the first non-loopback IPv4 address we can find or
# fallback to 127.0.0.1
LISTEN_PORT = 1080
LISTEN_IP = (Socket.ip_address_list.select{|a| a.ipv4? && !a.ipv4_loopback? }
.map{|i| i.ip_unpack[0] }.first || "127.0.0.1")
# and limit connections from IPs on our local /24 network
ALLOWED_IPS = [
"127.0.0.1/32",
"#{LISTEN_IP}/24",
]
LOGGER = Logger.new(STDOUT)
if ARGV[0] == "-d"
LOGGER.level = Logger::DEBUG
else
LOGGER.level = Logger::INFO
end
LOGGER.datetime_format = "%Y-%m-%d %H:%M:%S"
LOGGER.formatter = proc do |severity, datetime, progname, msg|
"[#{datetime}] [#{severity[0]}] #{msg}\n"
end
VERSION_SOCKS5 = 0x05
METHOD_MIN_LENGTH = 3
METHOD_AUTH_NONE = 0x0
REQUEST_MIN_LENGTH = 9
REQUEST_COMMAND_CONNECT = 0x1
REQUEST_ATYP_IP = 0x1
REQUEST_ATYP_HOSTNAME = 0x3
REQUEST_ATYP_IP6 = 0x4
REPLY_SUCCESS = 0x0
REPLY_FAIL = 0x1
REPLY_EPERM = 0x02
REPLY_NET_UNREACHABLE = 0x03
REPLY_HOST_UNREACHABLE = 0x04
REPLY_CONN_REFUSED = 0x05
REPLY_TTL_EXPIRED = 0x06
REPLY_BAD_COMMAND = 0x07
REPLY_BAD_ADDRESS = 0x08
class NilClass
def empty?
true
end
end
class ClientDead < StandardError; end
module EMProxyConnection
attr_reader :client, :hostname, :connected, :tls, :did_tls_verification
def initialize(client, hostname, tls)
@client = client
@hostname = hostname
@connected = false
@tls = tls
@did_tls_verification = false
end
def post_init
if tls
start_tls(:verify_peer => true, :cert_chain_file => "/etc/ssl/cert.pem")
end
end
def log(prio, str)
client.log(prio, str)
end
def connection_completed
@connected = true
# tls connections will call back once verification completes
if !tls
client.send_reply REPLY_SUCCESS
end
end
def ssl_verify_peer(pem)
if hostname.empty?
return true
end
# we'll get called again for other certs in the chain
if did_tls_verification
return true
end
log :debug, "verifying TLS hostname #{hostname.inspect}"
cert = OpenSSL::X509::Certificate.new(pem)
ret = OpenSSL::SSL.verify_certificate_identity(cert, hostname)
@did_tls_verification = true
# XXX: this always seems to fail, even when no OpenSSL error is reported
if !ret
log :warn, "TLS verification failed for #{hostname.inspect}, aborting"
#close_connection
#return false
end
return ret
rescue => e
log :warn, "error in ssl_verify_peer: #{e.inspect}"
return false
end
def ssl_handshake_completed
log :debug, "TLS handshake completed, sending reply"
client.send_reply REPLY_SUCCESS
end
def receive_data(_data)
client.send_data _data
end
def unbind
if connected
log :info, "closed remote connection"
client.close_connection_after_writing
else
log :info, "failed connecting to remote"
client.send_reply REPLY_FAIL
end
end
end
module EMSOCKS5Connection
attr_reader :state, :ip, :data, :remote_connection, :tls_decrypt
attr_accessor :remote_hostname, :remote_ip, :remote_port
def initialize
@state = :INIT
port, @ip = Socket.unpack_sockaddr_in(get_peername)
if !allow_connection?
# TODO: does eventmachine have a way to prevent the connection from even
# happening in the first place?
log :warn, "connection from #{ip} denied, not in allow list"
close_connection
end
end
def allow_connection?
ALLOWED_IPS.each do |r|
if IPAddr.new(r).to_range.include?(ip)
return true
end
end
false
end
def log(prio, str)
LOGGER.send(prio, "[#{ip}] #{str}")
end
def fail_close(code)
send_data [
VERSION_SOCKS5,
code,
0,
REQUEST_ATYP_IP,
0, 0, 0, 0,
0, 0,
].pack("C*")
close_connection_after_writing
@state = :DEAD
end
def hex(data)
data.unpack("C*").map{|c| sprintf("%02x", c) }.join(" ")
end
def send_reply(code)
resp = [ VERSION_SOCKS5, code, 0, REQUEST_ATYP_IP ]
resp += IPAddr.new(remote_ip).hton.unpack("C*")
resp += remote_port.to_s.unpack("n2").map(&:to_i)
send_data resp.pack("C*")
if code == REPLY_SUCCESS
@state = :PROXY
@data = ""
else
close_connection_after_writing
@state = :DEAD
end
end
def receive_data(_data)
log :debug, "<-C #{_data.inspect} #{hex(_data)}"
(@data ||= "") << _data
case state
when :INIT
if data.bytesize < METHOD_MIN_LENGTH
return
end
@state = :METHOD
verify_method
when :REQUEST
if data.bytesize < REQUEST_MIN_LENGTH
return
end
handle_request
when :PROXY
remote_connection.send_data data
@data = ""
end
end
def send_data(_data)
log :debug, "->C #{_data.inspect} #{hex(_data)}"
super
end
def verify_method
if data[0].ord != VERSION_SOCKS5
log :error, "unsupported version: #{data[0].inspect}"
return fail_close(REPLY_FAIL)
end
data[1].ord.times do |i|
case data[2 + i].ord
when METHOD_AUTH_NONE
send_data [ VERSION_SOCKS5, METHOD_AUTH_NONE ].pack("C*")
@state = :REQUEST
@data = ""
return
end
end
log :error, "no supported auth methods"
fail_close(REPLY_FAIL)
end
def handle_request
if data[0].ord != VERSION_SOCKS5
log :error, "unsupported request version: #{data[0].inspect}"
return fail_close(REPLY_FAIL)
end
if (command = data[1].ord) != REQUEST_COMMAND_CONNECT
log :error, "unsupported request command: #{data[1].inspect}"
return fail_close(REPLY_BAD_COMMAND)
end
case atype = data[3].ord
when REQUEST_ATYP_IP
begin
tmp_ip = data[4, 4].unpack("C*").join(".")
self.remote_ip = IPAddr.new(tmp_ip).to_s
rescue IPAddr::InvalidAddressError => e
log :error, "bogus IP: #{tmp_ip.inspect}"
return fail_close(REPLY_BAD_ADDRESS)
end
# network order
self.remote_port = data[8, 2].unpack("n")[0]
when REQUEST_ATYP_HOSTNAME
len = data[4].ord
if data.bytesize - 4 < len
log :error, "hostname len #{len}, but #{data.bytesize - 4} left"
return fail_close(REPLY_BAD_ADDRESS)
end
self.remote_hostname = data[5, len].unpack("a*")[0]
# network order
self.remote_port = data[5 + len, 2].unpack("n")[0]
names = Resolv.getaddresses(remote_hostname).
select{|n| IPAddr.new(n).ipv4? }
if names.length == 0
log :error, "failed to resolve #{remote_hostname.inspect}"
return fail_close(REPLY_BAD_ADDRESS)
end
self.remote_ip = names.shuffle[0]
# e.g., curl --preproxy socks5h://1.2.3.4 ...
if self.remote_ip == self.remote_hostname
@remote_hostname = nil
end
when ADDRESS_TYPE_IP_V6
log :error, "ipv6 not supported"
return fail_close(REPLY_BAD_ADDRESS)
end
if self.remote_port < 1 || self.remote_port >= 65535
log :error, "bogus port: #{remote_port.inspect}"
return fail_close(REPLY_BAD_ADDRESS)
end
case command
when REQUEST_COMMAND_CONNECT
do_connect
else
log :error, "unsupported command #{command.inspect}"
end
end
def do_connect
if TLS_PORTS.include?(remote_port)
@tls_decrypt = true
end
l = "connecting to " << remote_ip << ":" << remote_port.to_s
if remote_hostname
l << " (#{remote_hostname})"
end
if tls_decrypt
l << " (TLS decrypt)"
end
log :info, l
# this will call back with send_reply(REPLY_SUCCESS) once connected
@remote_connection = EventMachine.connect(remote_ip, remote_port,
EMProxyConnection, self, remote_hostname, tls_decrypt)
end
def unbind
if remote_connection
remote_connection.close_connection
end
log :info, "closed connection"
end
end
EM.kqueue = true
EM.run do
EM.start_server(LISTEN_IP, LISTEN_PORT, EMSOCKS5Connection)
LOGGER.info "[server] listening on #{LISTEN_IP}:#{LISTEN_PORT}"
end
| 23.921671 | 78 | 0.665575 |
91b5e8d32959ef5af42652e4c26adb17f4ac1434
| 496 |
# Be sure to restart your server when you modify this file.
# Your secret key for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
Dummy::Application.config.secret_token = '7f8287385990122c8ea8f7295d7bebc5f01d7df7fb3349e1ba324e1b72e2143e3eeb964c4559192c1aa7d19dd1daa09b2ffde3bc39ea3180bdddc97e64c46726'
| 62 | 171 | 0.832661 |
0146399cd10f93b79846fc1df8d48338ee637754
| 151 |
require "configure_semian/version"
module ConfigureSemian
require 'configure_semian/semian_configuration'
require 'configure_semian/net_http'
end
| 21.571429 | 49 | 0.847682 |
bfa9e99eea1ff07cc345726cd6d5eb411af167da
| 288 |
require 'minitest'
require 'minitest/autorun'
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'minitest/profile'
describe 'a random smattering of tests' do
50.times do |i|
it "will take #{i/1000.0} s" do
sleep(i/1000.0)
assert true
end
end
end
| 20.571429 | 58 | 0.673611 |
bb1761dee3c7234aa953abcfb08b00dd0e23d438
| 36 |
require 'probability-engine/native'
| 18 | 35 | 0.833333 |
ede8af9254b5bbfe074f81e6c88be39bbd025757
| 1,447 |
require 'csv'
Spree::Sample.load_sample('tax_categories')
Spree::Sample.load_sample('shipping_categories')
Spree::Sample.load_sample('option_types')
Spree::Sample.load_sample('taxons')
default_shipping_category = Spree::ShippingCategory.find_by!(name: 'Default')
clothing_tax_category = Spree::TaxCategory.find_by!(name: 'Clothing')
Spree::Config[:currency] = 'USD'
color = Spree::OptionType.find_by!(name: 'color')
size = Spree::OptionType.find_by!(name: 'size')
PRODUCTS = CSV.read(File.join(__dir__, 'variants.csv')).map do |(parent_name, taxon_name, product_name, _color_name)|
[parent_name, taxon_name, product_name]
end.uniq
PRODUCTS.each do |(parent_name, taxon_name, product_name)|
parent = Spree::Taxon.find_by!(name: parent_name)
taxon = parent.children.find_by!(name: taxon_name)
taxon.products.where(name: product_name.titleize).first_or_create! do |product|
product.price = rand(10...100) + 0.99
product.description = FFaker::Lorem.paragraph
product.available_on = Time.zone.now
product.option_types = [color, size]
product.shipping_category = default_shipping_category
product.tax_category = clothing_tax_category
product.sku = "#{product_name.delete(' ')}_#{product.price}"
parent.products << product
end
end
["Bestsellers", "New", "Trending", "Streetstyle", "Summer Sale"].each do |taxon_name|
Spree::Taxon.find_by!(name: taxon_name).products << Spree::Product.all.sample(30)
end
| 38.078947 | 117 | 0.748445 |
33f479a382bbee7f06da16fa2feeb1d0fd86d72f
| 1,192 |
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-codecommit/types'
require_relative 'aws-sdk-codecommit/client_api'
require_relative 'aws-sdk-codecommit/client'
require_relative 'aws-sdk-codecommit/errors'
require_relative 'aws-sdk-codecommit/resource'
require_relative 'aws-sdk-codecommit/customizations'
# This module provides support for AWS CodeCommit. This module is available in the
# `aws-sdk-codecommit` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from AWS CodeCommit all
# extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::CodeCommit::Errors::ServiceError
# # rescues all service API errors
# end
#
# See {Errors} for more information.
#
# @service
module Aws::CodeCommit
GEM_VERSION = '1.3.0'
end
| 24.833333 | 82 | 0.748322 |
e2d6f73850c1c85038cd106de8e50d3030e69a4c
| 3,019 |
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_07_17_042622) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "game_participants", force: :cascade do |t|
t.bigint "game_id"
t.string "name"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["game_id"], name: "index_game_participants_on_game_id"
end
create_table "games", force: :cascade do |t|
t.string "token"
t.integer "state", default: 0
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "round_boards", force: :cascade do |t|
t.bigint "game_participant_id"
t.bigint "round_id"
t.json "board"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["game_participant_id"], name: "index_round_boards_on_game_participant_id"
t.index ["round_id"], name: "index_round_boards_on_round_id"
end
create_table "round_decks", force: :cascade do |t|
t.bigint "round_id"
t.json "deck"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.json "discard", default: []
t.index ["round_id"], name: "index_round_decks_on_round_id"
end
create_table "round_scores", force: :cascade do |t|
t.bigint "game_participant_id"
t.bigint "round_id"
t.integer "score"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["game_participant_id"], name: "index_round_scores_on_game_participant_id"
t.index ["round_id"], name: "index_round_scores_on_round_id"
end
create_table "rounds", force: :cascade do |t|
t.bigint "game_id"
t.bigint "game_participant_id"
t.integer "round_number"
t.integer "state", default: 0
t.integer "move_state", default: 0
t.integer "drawn_card"
t.integer "current_discard"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["game_id"], name: "index_rounds_on_game_id"
t.index ["game_participant_id"], name: "index_rounds_on_game_participant_id"
end
end
| 39.207792 | 86 | 0.724081 |
1c896ed7ed48b6a3dd48f4af5c79f581a966abd1
| 176 |
require 'test_helper'
class ApplicationHelperTest < ActionView::TestCase
test 'application helper' do
ApplicationHelper.import_downtimes_from_icinga(1,2,3, 4)
end
end
| 22 | 60 | 0.795455 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.