hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
61f94d9754ca709a1f9a4884732af8f20a05b2d9 | 1,486 | module Dom
module Editor
class PublishEntryPanel < Domino
selector 'div.publish_entry'
attribute :exhausted_message, '.exhausted_message'
def save_button
node.find('button.save')
end
def publish
activate_publish_forever
save_button.click
wait_for_save_to_finish
end
def set_depublication_date(date, time)
node.find('input[name=publish_until]').set(date)
node.find('input[name=publish_until_time]').set(time)
# capybara doesn't hide the datepicker overlay properly
blur_input_fields
end
def save
save_button.click
wait_for_save_to_finish
end
def publish_until(date)
activate_publish_until
set_depublication_date(date.strftime('%d.%m.%Y'), date.strftime('%H:%M'))
save
end
def activate_password_protection(password)
node.find('input[name=password_protected]').click
node.find('input[name=password]').set(password)
end
def activate_publish_forever
node.find('#publish_entry_forever').click
end
def activate_publish_until
node.find('#publish_entry_until').click
end
private
def wait_for_save_to_finish
node.has_selector?('.publish_entry.published')
end
def blur_input_fields
node.find('h2').click
# Wait for date drop down to fade out
sleep 1
end
end
end
end
| 23.21875 | 81 | 0.639973 |
4a96d254f8758eda881e09bd74b373b32a642d52 | 2,752 | class Qemu < Formula
desc "x86 and PowerPC Emulator"
homepage "https://www.qemu.org/"
url "https://download.qemu.org/qemu-3.1.0.tar.xz"
sha256 "6a0508df079a0a33c2487ca936a56c12122f105b8a96a44374704bef6c69abfc"
revision 1
head "https://git.qemu.org/git/qemu.git"
bottle do
rebuild 1
sha256 "dd7cb5e2b5d7fc3738c72f1e8fe47ee2fd335223b1ec4694749800b6ba87d552" => :mojave
sha256 "86cad762d521c4170c0af2fa2932e0d123db2838097895e155f49f12525eb90a" => :high_sierra
sha256 "ff6d0904a871d605aefda6cd0574a0ccfb09b758cca832d347ff843eb52f97fd" => :sierra
end
deprecated_option "with-sdl" => "with-sdl2"
deprecated_option "with-gtk+" => "with-gtk+3"
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "gnutls"
depends_on "jpeg"
depends_on "libpng"
depends_on "libssh2"
depends_on "libusb"
depends_on "ncurses"
depends_on "pixman"
depends_on "vde"
depends_on "gtk+3" => :optional
depends_on "sdl2" => :optional
fails_with :gcc do
cause "qemu requires a compiler with support for the __thread specifier"
end
# 820KB floppy disk image file of FreeDOS 1.2, used to test QEMU
resource "test-image" do
url "https://dl.bintray.com/homebrew/mirror/FD12FLOPPY.zip"
sha256 "81237c7b42dc0ffc8b32a2f5734e3480a3f9a470c50c14a9c4576a2561a35807"
end
def install
ENV["LIBTOOL"] = "glibtool"
args = %W[
--prefix=#{prefix}
--cc=#{ENV.cc}
--host-cc=#{ENV.cc}
--disable-bsd-user
--disable-guest-agent
--enable-curses
--enable-libssh2
--enable-vde
--extra-cflags=-DNCURSES_WIDECHAR=1
]
# Cocoa and SDL2/GTK+ UIs cannot both be enabled at once.
if build.with?("sdl2") || build.with?("gtk+3")
args << "--disable-cocoa"
else
args << "--enable-cocoa"
end
# Sharing Samba directories in QEMU requires the samba.org smbd which is
# incompatible with the macOS-provided version. This will lead to
# silent runtime failures, so we set it to a Homebrew path in order to
# obtain sensible runtime errors. This will also be compatible with
# Samba installations from external taps.
args << "--smbd=#{HOMEBREW_PREFIX}/sbin/samba-dot-org-smbd"
args << (build.with?("sdl2") ? "--enable-sdl" : "--disable-sdl")
args << (build.with?("gtk+3") ? "--enable-gtk" : "--disable-gtk")
system "./configure", *args
system "make", "V=1", "install"
end
test do
expected = build.stable? ? version.to_s : "QEMU Project"
assert_match expected, shell_output("#{bin}/qemu-system-i386 --version")
resource("test-image").stage testpath
assert_match "file format: raw", shell_output("#{bin}/qemu-img info FLOPPY.img")
end
end
| 32 | 93 | 0.687863 |
b9e3cb181b49d077171b49cae02cfec4d0583532 | 2,854 | # frozen_string_literal: true
module Files
class UsageSnapshot
attr_reader :options, :attributes
def initialize(attributes = {}, options = {})
@attributes = attributes || {}
@options = options || {}
end
# int64 - Site usage ID
def id
@attributes[:id]
end
# date-time - Site usage report start date/time
def start_at
@attributes[:start_at]
end
# date-time - Site usage report end date/time
def end_at
@attributes[:end_at]
end
# date-time - Site usage report created at date/time
def created_at
@attributes[:created_at]
end
# double - Site usage report highest usage in time period
def high_water_user_count
@attributes[:high_water_user_count]
end
# double - Current site usage as of report
def current_storage
@attributes[:current_storage]
end
# double - Site usage report highest usage in time period
def high_water_storage
@attributes[:high_water_storage]
end
# int64 - Number of downloads in report time period
def total_downloads
@attributes[:total_downloads]
end
# int64 - Number of uploads in time period
def total_uploads
@attributes[:total_uploads]
end
# date-time - The last time this site usage report was updated
def updated_at
@attributes[:updated_at]
end
# object - A map of root folders to their total usage
def usage_by_top_level_dir
@attributes[:usage_by_top_level_dir]
end
# double - Usage for root folder
def root_storage
@attributes[:root_storage]
end
# double - Usage for files that are deleted but uploaded within last 30 days
def deleted_files_counted_in_minimum
@attributes[:deleted_files_counted_in_minimum]
end
# double - Usage for files that are deleted but retained as backups
def deleted_files_storage
@attributes[:deleted_files_storage]
end
# Parameters:
# cursor - string - Used for pagination. Send a cursor value to resume an existing list from the point at which you left off. Get a cursor from an existing list via the X-Files-Cursor-Next header.
# per_page - int64 - Number of records to show per page. (Max: 10,000, 1,000 or less is recommended).
def self.list(params = {}, options = {})
raise InvalidParameterError.new("Bad parameter: cursor must be an String") if params.dig(:cursor) and !params.dig(:cursor).is_a?(String)
raise InvalidParameterError.new("Bad parameter: per_page must be an Integer") if params.dig(:per_page) and !params.dig(:per_page).is_a?(Integer)
List.new(UsageSnapshot, params) do
Api.send_request("/usage_snapshots", :get, params, options)
end
end
def self.all(params = {}, options = {})
list(params, options)
end
end
end
| 28.828283 | 204 | 0.677996 |
f8b181ccdcc94ac2f0a40f62c80e9db78b229975 | 2,622 | class CloneTradeTariffAdmin < ActiveRecord::Migration[5.2]
class SupportedPermission < ApplicationRecord
belongs_to :application, class_name: "Doorkeeper::Application"
end
def up
ActiveRecord::Base.transaction do
existing_trade_tariff_admin = ::Doorkeeper::Application.find_by(name: "Trade Tariff Admin (PaaS)")
cloned_trade_tariff_admin = ::Doorkeeper::Application.new
existing_trade_tariff_admin.attributes.each_pair do |key, value|
if !%w(created_at updated_at id uid).include?(key)
cloned_trade_tariff_admin[key] = value
end
end
cloned_trade_tariff_admin.name = "New London: #{cloned_trade_tariff_admin.name}"
cloned_trade_tariff_admin.save!
SupportedPermission.where(application_id: existing_trade_tariff_admin.id).each do |existing_supported_permission|
# Some are created by default and the index will complain if we try to duplicate them
if !SupportedPermission.exists?(application_id: cloned_trade_tariff_admin.id, name: existing_supported_permission.name)
cloned_supported_permission = SupportedPermission.new
existing_supported_permission.attributes.each_pair do |key, value|
if !%w(created_at updated_at id).include?(key)
cloned_supported_permission[key] = value
end
end
cloned_supported_permission.application_id = cloned_trade_tariff_admin.id
cloned_supported_permission.save!
end
end
UserApplicationPermission.where(application_id: existing_trade_tariff_admin.id).each do |existing_user_application_permission|
cloned_user_application_permission = UserApplicationPermission.new
existing_user_application_permission.attributes.each_pair do |key, value|
if !%w(created_at updated_at id).include?(key)
cloned_user_application_permission[key] = value
end
# The new UserApplicationPermission will need a SupportedPermission for the new application
existing_supported_permission = existing_user_application_permission.supported_permission
cloned_supported_permission = SupportedPermission.where(name: existing_supported_permission.name, application_id: cloned_trade_tariff_admin.id).first
cloned_user_application_permission.supported_permission_id = cloned_supported_permission.id
cloned_user_application_permission.application_id = cloned_trade_tariff_admin.id
end
cloned_user_application_permission.save!
end
end
end
def down
# This change cannot be reversed
end
end
| 49.471698 | 159 | 0.750953 |
035aee2aa66190e0dfc3e4925bb58e0e4fbd58f3 | 14,428 | # frozen_string_literal: true
require "thread"
require "delegate"
module ActionView
# = Action View Template
class Template
extend ActiveSupport::Autoload
def self.finalize_compiled_template_methods
ActiveSupport::Deprecation.warn "ActionView::Template.finalize_compiled_template_methods is deprecated and has no effect"
end
def self.finalize_compiled_template_methods=(_)
ActiveSupport::Deprecation.warn "ActionView::Template.finalize_compiled_template_methods= is deprecated and has no effect"
end
# === Encodings in ActionView::Template
#
# ActionView::Template is one of a few sources of potential
# encoding issues in Rails. This is because the source for
# templates are usually read from disk, and Ruby (like most
# encoding-aware programming languages) assumes that the
# String retrieved through File IO is encoded in the
# <tt>default_external</tt> encoding. In Rails, the default
# <tt>default_external</tt> encoding is UTF-8.
#
# As a result, if a user saves their template as ISO-8859-1
# (for instance, using a non-Unicode-aware text editor),
# and uses characters outside of the ASCII range, their
# users will see diamonds with question marks in them in
# the browser.
#
# For the rest of this documentation, when we say "UTF-8",
# we mean "UTF-8 or whatever the default_internal encoding
# is set to". By default, it will be UTF-8.
#
# To mitigate this problem, we use a few strategies:
# 1. If the source is not valid UTF-8, we raise an exception
# when the template is compiled to alert the user
# to the problem.
# 2. The user can specify the encoding using Ruby-style
# encoding comments in any template engine. If such
# a comment is supplied, Rails will apply that encoding
# to the resulting compiled source returned by the
# template handler.
# 3. In all cases, we transcode the resulting String to
# the UTF-8.
#
# This means that other parts of Rails can always assume
# that templates are encoded in UTF-8, even if the original
# source of the template was not UTF-8.
#
# From a user's perspective, the easiest thing to do is
# to save your templates as UTF-8. If you do this, you
# do not need to do anything else for things to "just work".
#
# === Instructions for template handlers
#
# The easiest thing for you to do is to simply ignore
# encodings. Rails will hand you the template source
# as the default_internal (generally UTF-8), raising
# an exception for the user before sending the template
# to you if it could not determine the original encoding.
#
# For the greatest simplicity, you can support only
# UTF-8 as the <tt>default_internal</tt>. This means
# that from the perspective of your handler, the
# entire pipeline is just UTF-8.
#
# === Advanced: Handlers with alternate metadata sources
#
# If you want to provide an alternate mechanism for
# specifying encodings (like ERB does via <%# encoding: ... %>),
# you may indicate that you will handle encodings yourself
# by implementing <tt>handles_encoding?</tt> on your handler.
#
# If you do, Rails will not try to encode the String
# into the default_internal, passing you the unaltered
# bytes tagged with the assumed encoding (from
# default_external).
#
# In this case, make sure you return a String from
# your handler encoded in the default_internal. Since
# you are handling out-of-band metadata, you are
# also responsible for alerting the user to any
# problems with converting the user's data to
# the <tt>default_internal</tt>.
#
# To do so, simply raise +WrongEncodingError+ as follows:
#
# raise WrongEncodingError.new(
# problematic_string,
# expected_encoding
# )
##
# :method: local_assigns
#
# Returns a hash with the defined local variables.
#
# Given this sub template rendering:
#
# <%= render "shared/header", { headline: "Welcome", person: person } %>
#
# You can use +local_assigns+ in the sub templates to access the local variables:
#
# local_assigns[:headline] # => "Welcome"
eager_autoload do
autoload :Error
autoload :RawFile
autoload :Handlers
autoload :HTML
autoload :Inline
autoload :Sources
autoload :Text
autoload :Types
end
extend Template::Handlers
attr_reader :identifier, :handler, :original_encoding, :updated_at
attr_reader :variable, :format, :variant, :locals, :virtual_path
def initialize(source, identifier, handler, format: nil, variant: nil, locals: nil, virtual_path: nil, updated_at: nil)
unless locals
ActiveSupport::Deprecation.warn "ActionView::Template#initialize requires a locals parameter"
locals = []
end
@source = source
@identifier = identifier
@handler = handler
@compiled = false
@locals = locals
@virtual_path = virtual_path
@variable = if @virtual_path
base = @virtual_path.end_with?("/") ? "" : ::File.basename(@virtual_path)
base =~ /\A_?(.*?)(?:\.\w+)*\z/
$1.to_sym
end
if updated_at
ActiveSupport::Deprecation.warn "ActionView::Template#updated_at is deprecated"
@updated_at = updated_at
else
@updated_at = Time.now
end
@format = format
@variant = variant
@compile_mutex = Mutex.new
end
deprecate :original_encoding
deprecate :updated_at
deprecate def virtual_path=(_); end
deprecate def locals=(_); end
deprecate def formats=(_); end
deprecate def formats; Array(format); end
deprecate def variants=(_); end
deprecate def variants; [variant]; end
deprecate def refresh(_); self; end
# Returns whether the underlying handler supports streaming. If so,
# a streaming buffer *may* be passed when it starts rendering.
def supports_streaming?
handler.respond_to?(:supports_streaming?) && handler.supports_streaming?
end
# Render a template. If the template was not compiled yet, it is done
# exactly before rendering.
#
# This method is instrumented as "!render_template.action_view". Notice that
# we use a bang in this instrumentation because you don't want to
# consume this in production. This is only slow if it's being listened to.
def render(view, locals, buffer = ActionView::OutputBuffer.new, add_to_stack: true, &block)
instrument_render_template do
compile!(view)
view._run(method_name, self, locals, buffer, add_to_stack: add_to_stack, &block)
end
rescue => e
handle_render_error(view, e)
end
def type
@type ||= Types[format]
end
def short_identifier
@short_identifier ||= defined?(Rails.root) ? identifier.delete_prefix("#{Rails.root}/") : identifier
end
def inspect
"#<#{self.class.name} #{short_identifier} locals=#{@locals.inspect}>"
end
def source
@source.to_s
end
# This method is responsible for properly setting the encoding of the
# source. Until this point, we assume that the source is BINARY data.
# If no additional information is supplied, we assume the encoding is
# the same as <tt>Encoding.default_external</tt>.
#
# The user can also specify the encoding via a comment on the first
# line of the template (# encoding: NAME-OF-ENCODING). This will work
# with any template engine, as we process out the encoding comment
# before passing the source on to the template engine, leaving a
# blank line in its stead.
def encode!
source = self.source
return source unless source.encoding == Encoding::BINARY
# Look for # encoding: *. If we find one, we'll encode the
# String in that encoding, otherwise, we'll use the
# default external encoding.
if source.sub!(/\A#{ENCODING_FLAG}/, "")
encoding = magic_encoding = $1
else
encoding = Encoding.default_external
end
# Tag the source with the default external encoding
# or the encoding specified in the file
source.force_encoding(encoding)
# If the user didn't specify an encoding, and the handler
# handles encodings, we simply pass the String as is to
# the handler (with the default_external tag)
if !magic_encoding && @handler.respond_to?(:handles_encoding?) && @handler.handles_encoding?
source
# Otherwise, if the String is valid in the encoding,
# encode immediately to default_internal. This means
# that if a handler doesn't handle encodings, it will
# always get Strings in the default_internal
elsif source.valid_encoding?
source.encode!
# Otherwise, since the String is invalid in the encoding
# specified, raise an exception
else
raise WrongEncodingError.new(source, encoding)
end
end
# Exceptions are marshalled when using the parallel test runner with DRb, so we need
# to ensure that references to the template object can be marshalled as well. This means forgoing
# the marshalling of the compiler mutex and instantiating that again on unmarshalling.
def marshal_dump # :nodoc:
[ @source, @identifier, @handler, @compiled, @locals, @virtual_path, @updated_at, @format, @variant ]
end
def marshal_load(array) # :nodoc:
@source, @identifier, @handler, @compiled, @locals, @virtual_path, @updated_at, @format, @variant = *array
@compile_mutex = Mutex.new
end
private
# Compile a template. This method ensures a template is compiled
# just once and removes the source after it is compiled.
def compile!(view)
return if @compiled
# Templates can be used concurrently in threaded environments
# so compilation and any instance variable modification must
# be synchronized
@compile_mutex.synchronize do
# Any thread holding this lock will be compiling the template needed
# by the threads waiting. So re-check the @compiled flag to avoid
# re-compilation
return if @compiled
mod = view.compiled_method_container
instrument("!compile_template") do
compile(mod)
end
@compiled = true
end
end
class LegacyTemplate < DelegateClass(Template) # :nodoc:
attr_reader :source
def initialize(template, source)
super(template)
@source = source
end
end
# Among other things, this method is responsible for properly setting
# the encoding of the compiled template.
#
# If the template engine handles encodings, we send the encoded
# String to the engine without further processing. This allows
# the template engine to support additional mechanisms for
# specifying the encoding. For instance, ERB supports <%# encoding: %>
#
# Otherwise, after we figure out the correct encoding, we then
# encode the source into <tt>Encoding.default_internal</tt>.
# In general, this means that templates will be UTF-8 inside of Rails,
# regardless of the original source encoding.
def compile(mod)
source = encode!
code = @handler.call(self, source)
# Make sure that the resulting String to be eval'd is in the
# encoding of the code
original_source = source
source = +<<-end_src
def #{method_name}(local_assigns, output_buffer)
@virtual_path = #{@virtual_path.inspect};#{locals_code};#{code}
end
end_src
# Make sure the source is in the encoding of the returned code
source.force_encoding(code.encoding)
# In case we get back a String from a handler that is not in
# BINARY or the default_internal, encode it to the default_internal
source.encode!
# Now, validate that the source we got back from the template
# handler is valid in the default_internal. This is for handlers
# that handle encoding but screw up
unless source.valid_encoding?
raise WrongEncodingError.new(source, Encoding.default_internal)
end
begin
mod.module_eval(source, identifier, 0)
rescue SyntaxError
# Account for when code in the template is not syntactically valid; e.g. if we're using
# ERB and the user writes <%= foo( %>, attempting to call a helper `foo` and interpolate
# the result into the template, but missing an end parenthesis.
raise SyntaxErrorInTemplate.new(self, original_source)
end
end
def handle_render_error(view, e)
if e.is_a?(Template::Error)
e.sub_template_of(self)
raise e
else
raise Template::Error.new(self)
end
end
def locals_code
# Only locals with valid variable names get set directly. Others will
# still be available in local_assigns.
locals = @locals - Module::RUBY_RESERVED_KEYWORDS
locals = locals.grep(/\A@?(?![A-Z0-9])(?:[[:alnum:]_]|[^\0-\177])+\z/)
# Assign for the same variable is to suppress unused variable warning
locals.each_with_object(+"") { |key, code| code << "#{key} = local_assigns[:#{key}]; #{key} = #{key};" }
end
def method_name
@method_name ||= begin
m = +"_#{identifier_method_name}__#{@identifier.hash}_#{__id__}"
m.tr!("-", "_")
m
end
end
def identifier_method_name
short_identifier.tr("^a-z_", "_")
end
def instrument(action, &block) # :doc:
ActiveSupport::Notifications.instrument("#{action}.action_view", instrument_payload, &block)
end
def instrument_render_template(&block)
ActiveSupport::Notifications.instrument("!render_template.action_view", instrument_payload, &block)
end
def instrument_payload
{ virtual_path: @virtual_path, identifier: @identifier }
end
end
end
| 36.994872 | 128 | 0.656986 |
f7e21ff7d2aef494b7f2f265fc51b0f639806cd6 | 1,006 | # -*- encoding: utf-8 -*-
# stub: molinillo 0.8.0 ruby lib
Gem::Specification.new do |s|
s.name = "molinillo".freeze
s.version = "0.8.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Samuel E. Giddins".freeze]
s.date = "2021-08-09"
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/CocoaPods/Molinillo".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.3.0".freeze)
s.rubygems_version = "3.2.32".freeze
s.summary = "Provides support for dependency resolution".freeze
s.installed_by_version = "3.2.32" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
else
s.add_dependency(%q<rake>.freeze, [">= 0"])
end
end
| 32.451613 | 112 | 0.696819 |
bbc639d402ede2e03625214d22edb3ba28344749 | 3,153 | require 'test_helper'
class SearchProfileControllerTest < ActionDispatch::IntegrationTest
test "should get search" do
assert true
end
def test_general_infos_search
perl_search=GeneralInfo.search :first_name =>general_infos(:perl_cb)
perl_search1=GeneralInfo.search :first_name =>general_infos(:perl_cb)
assert_equal perl_search,perl_search1
end
def test_general_infos_search_2
perl_search=GeneralInfo.search :last_name =>general_infos(:perl_cb)
perl_search1=GeneralInfo.search :last_name =>general_infos(:perl_cb)
assert_equal perl_search,perl_search1
end
def test_general_infos_search_3
searchArg=Hash.new
searchArg["first_name"]="Avinash"
searchArg["first_name_regex"]="Contains"
perl_search=GeneralInfo.search searchArg
assert_not_nil perl_search
# perl_search1=GeneralInfo.search :last_name =>general_infos(:perl_cb)
#assert_equal perl_search,perl_search1
end
#test equality of two searched strings
def test_general_infos_search_4
searchArg=Hash.new
searchArg["first_name"]="Avinash"
searchArg["first_name_regex"]="Contains"
perl_search=GeneralInfo.search searchArg
assert_not_nil perl_search
searchArg=Hash.new
searchArg["last_name"]="Saxena"
searchArg["last_name_regex"]="Contains"
perl_search1=GeneralInfo.search searchArg
assert_not_nil perl_search1
# perl_search1=GeneralInfo.search :last_name =>general_infos(:perl_cb)
assert_equal perl_search,perl_search1
end
#Phone number and lastname searches for same
def test_general_infos_search_5
searchArg=Hash.new
searchArg["phone"]="979"
perl_search=GeneralInfo.search searchArg
assert_not_nil perl_search
searchArg=Hash.new
searchArg["last_name"]="Saxena"
searchArg["last_name_regex"]="Contains"
perl_search1=GeneralInfo.search searchArg
assert_not_nil perl_search1
# perl_search1=GeneralInfo.search :last_name =>general_infos(:perl_cb)
assert_equal perl_search,perl_search1
end
#search for different profiles
def test_general_infos_search_6
searchArg=Hash.new
searchArg["phone"]="979"
searchArg["first_name_regex"]="Contains"
perl_search=GeneralInfo.search searchArg
assert_not_nil perl_search
searchArg=Hash.new
searchArg["phone"]="828"
perl_search1=GeneralInfo.search searchArg
assert_not_nil perl_search1
assert_equal perl_search,perl_search1
end
def test_login_infos_search_1
perl_search=LoginInfo.search :email =>general_infos(:perl_cb)
perl_search1=LoginInfo.search :email =>general_infos(:java_cb)
assert_not_equal perl_search,perl_search1
end
def test_login_infos_search_2
perl_search=LoginInfo.search :email =>general_infos(:perl_cb)
perl_search1=LoginInfo.search :email =>general_infos(:perl_cb)
assert_equal perl_search,perl_search1
end
def test_login_infos_search_3
perl_search=LoginInfo.search :email =>general_infos(:perl_cb)
assert_not_nil perl_search
end
def test_login_infos_search_3
perl_search=LoginInfo.search :email =>'[email protected]'
assert_not_nil perl_search
end
end
| 28.663636 | 74 | 0.7745 |
f7a82521cfb1d3718d29f294c52d6447e17e075a | 5,396 | module Agents
class GoogleFlightsAgent < Agent
include FormConfigurable
cannot_receive_events!
default_schedule "every_12h"
description <<-MD
The GoogleFlightsAgent will tell you the minimum airline prices between a pair of cities. The api limit is 50 requests/day.
Follow their documentation here (https://developers.google.com/qpx-express/v1/prereqs#get-a-google-account) to retrieve an api key.
After you get to the google developer console, created a project, enabled qpx express api then you can choose `api key` credential to be created.
The `origin` and `destination` options require an [airport code](http://www.expedia.com/daily/airports/AirportCodes.asp).
All the default options must exist. For `infantInSeatCount`, `infantInLapCount`, `seniorCount`, and `childCount`, leave them to the default value of `0` if you do not need them.
Make sure `date` and `return_date` is in this date format: `YYYY-MM-DAY`.
You can choose one way tickets only by setting `roundtrip` to `false`.
You can limit the number of `solutions` returned. The first solution will be the lowest priced ticket.
MD
event_description <<-MD
The event payload will have objects that contains valuable data like this
"carrier": [
{
"code": "B6",
"name": "Jetblue Airways Corporation"
}
]
"tripOption": [
"saleTotal": "USD49.10"
"slice": [
...
...
"flight": {
"carrier": "B6",
"number": "833"
}
]
]
MD
def default_options
{
'qpx_api_key' => '',
'adultCount'=> 1,
'origin' => 'BOS',
'destination' => 'SFO',
'date' => '2016-04-11',
'childCount' => 0,
'infantInSeatCount' => 0,
'infantInLapCount'=> 0,
'seniorCount'=> 0,
'return_date' => '2016-04-18',
'roundtrip' => true,
'solutions'=> 3
}
end
form_configurable :qpx_api_key, type: :string
form_configurable :adultCount
form_configurable :origin, type: :string
form_configurable :destination, type: :string
form_configurable :date, type: :string
form_configurable :childCount
form_configurable :infantInSeatCount
form_configurable :infantInLapCount
form_configurable :seniorCount
form_configurable :roundtrip, type: :boolean
form_configurable :return_date, type: :string
form_configurable :solutions
def validate_options
errors.add(:base, "You need a qpx api key") unless options['qpx_api_key'].present?
errors.add(:base, "Adult Count must exist") unless options['adultCount'].present?
errors.add(:base, "Origin must exist") unless options['origin'].present?
errors.add(:base, "Destination must exist") unless options['destination'].present?
errors.add(:base, "Date must exist") unless options['date'].present?
errors.add(:base, "Child Count") unless options['childCount'].present?
errors.add(:base, "Infant In Seat Count must exist") unless options['infantInSeatCount'].present?
errors.add(:base, "Infant In Lap Count") unless options['infantInLapCount'].present?
errors.add(:base, "Senior Count must exist") unless options['seniorCount'].present?
errors.add(:base, "Solutions must exist") unless options['solutions'].present?
errors.add(:base, "Return Date must exist") if options["return_date"].blank? && boolify(options['roundtrip'])
end
def working?
!recent_error_logs?
end
def round_trip?
boolify(interpolated['roundtrip'])
end
def post_params
if round_trip?
post_params = {:request=>{:passengers=>{:kind=>"qpxexpress#passengerCounts", :adultCount=> interpolated["adultCount"], :childCount=> interpolated["childCount"], :infantInLapCount=>interpolated["infantInLapCount"], :infantInSeatCount=>interpolated['infantInSeatCount'], :seniorCount=>interpolated["seniorCount"]}, :slice=>[ {:origin=> interpolated["origin"].to_s , :destination=> interpolated["destination"].to_s , :date=> interpolated["date"].to_s }, {:origin=> interpolated["destination"].to_s , :destination=> interpolated["origin"].to_s , :date=> interpolated["return_date"].to_s } ], :solutions=> interpolated["solutions"]}}
else
post_params = {:request=>{:passengers=>{:kind=>"qpxexpress#passengerCounts", :adultCount=> interpolated["adultCount"], :childCount=> interpolated["childCount"], :infantInLapCount=>interpolated["infantInLapCount"], :infantInSeatCount=>interpolated['infantInSeatCount'], :seniorCount=>interpolated["seniorCount"]}, :slice=>[{:kind=>"qpxexpress#sliceInput", :origin=> interpolated["origin"].to_s , :destination=> interpolated["destination"].to_s , :date=> interpolated["date"].to_s }], :solutions=> interpolated["solutions"]}}
end
end
def check
body = JSON.generate(post_params)
request = HTTParty.post(event_url, :body => body, :headers => {"Content-Type" => "application/json"})
events = JSON.parse request.body
create_event :payload => events
end
def event_url
endpoint = 'https://www.googleapis.com/qpxExpress/v1/trips/search?key=' + "#{URI.encode(interpolated[:qpx_api_key].to_s)}"
end
end
end
| 44.229508 | 636 | 0.661045 |
f8b734a5b541e0cd4bc0c3da2abbf25a453ce1bf | 2,057 | require "rexml/child"
require "rexml/source"
module REXML
# Represents an XML Instruction; IE, <? ... ?>
# TODO: Add parent arg (3rd arg) to constructor
class Instruction < Child
START = '<\?'
STOP = '\?>'
# target is the "name" of the Instruction; IE, the "tag" in <?tag ...?>
# content is everything else.
attr_accessor :target, :content
# Constructs a new Instruction
# @param target can be one of a number of things. If String, then
# the target of this instruction is set to this. If an Instruction,
# then the Instruction is shallowly cloned (target and content are
# copied). If a Source, then the source is scanned and parsed for
# an Instruction declaration.
# @param content Must be either a String, or a Parent. Can only
# be a Parent if the target argument is a Source. Otherwise, this
# String is set as the content of this instruction.
def initialize(target, content=nil)
if target.kind_of? String
super()
@target = target
@content = content
elsif target.kind_of? Instruction
super(content)
@target = target.target
@content = target.content
end
@content.strip! if @content
end
def clone
Instruction.new self
end
# == DEPRECATED
# See the rexml/formatters package
#
def write writer, indent=-1, transitive=false, ie_hack=false
Kernel.warn( "#{self.class.name}.write is deprecated" )
indent(writer, indent)
writer << START.sub(/\\/u, '')
writer << @target
writer << ' '
writer << @content
writer << STOP.sub(/\\/u, '')
end
# @return true if other is an Instruction, and the content and target
# of the other matches the target and content of this object.
def ==( other )
other.kind_of? Instruction and
other.target == @target and
other.content == @content
end
def node_type
:processing_instruction
end
def inspect
"<?p-i #{target} ...?>"
end
end
end
| 28.971831 | 75 | 0.625182 |
0835ce82e5e8cb0adccff4f89876d41a0e7def20 | 14,855 | RSpec.describe CardanoWallet::Shelley do
describe CardanoWallet::Shelley::Wallets do
after(:each) do
teardown
end
it "I can list wallets" do
l = SHELLEY.wallets.list
expect(l).to be_correct_and_respond 200
expect(l.size).to eq 0
create_shelley_wallet
l = SHELLEY.wallets.list
expect(l).to be_correct_and_respond 200
expect(l.size).to eq 1
end
it "When wallet does not exist it gives 404" do
wid = create_shelley_wallet
SHELLEY.wallets.delete wid
g = SHELLEY.wallets.get wid
expect(g).to be_correct_and_respond 404
d = SHELLEY.wallets.delete wid
expect(d).to be_correct_and_respond 404
end
describe "Create wallets" do
it "I can create, get and delete wallet from mnemonics" do
w = SHELLEY.wallets
wallet = w.create({ name: "Wallet from mnemonic_sentence",
passphrase: "Secure Passphrase",
mnemonic_sentence: mnemonic_sentence(15),
})
expect(wallet).to be_correct_and_respond 201
wid = wallet['id']
g = w.get(wid)
expect(g).to be_correct_and_respond 200
expect(w.delete(wid)).to be_correct_and_respond 204
end
it "I can create, get and delete wallet from mnemonics / second factor" do
w = SHELLEY.wallets
wallet = w.create({ name: "Wallet from mnemonic_sentence",
passphrase: "Secure Passphrase",
mnemonic_sentence: mnemonic_sentence(15),
mnemonic_second_factor: mnemonic_sentence(12)
})
expect(wallet).to be_correct_and_respond 201
wid = wallet['id']
g = w.get(wid)
expect(g).to be_correct_and_respond 200
expect(w.delete(wid)).to be_correct_and_respond 204
end
it "I can set address pool gap" do
pool_gap = 55
w = SHELLEY.wallets
wallet = w.create({ name: "Wallet from mnemonic_sentence",
passphrase: "Secure Passphrase",
mnemonic_sentence: mnemonic_sentence(15),
address_pool_gap: pool_gap
})
expect(wallet).to be_correct_and_respond 201
addr = SHELLEY.addresses.list(wallet['id'])
expect(addr).to be_correct_and_respond 200
expect(addr.size).to eq pool_gap
end
it "I can create, get and delete wallet from pub key" do
w = SHELLEY.wallets
wallet = w.create({ name: "Wallet from pub key",
account_public_key: "b47546e661b6c1791452d003d375756dde6cac2250093ce4630f16b9b9c0ac87411337bda4d5bc0216462480b809824ffb48f17e08d95ab9f1b91d391e48e66b",
address_pool_gap: 20,
})
expect(wallet).to be_correct_and_respond 201
wid = wallet['id']
g = w.get(wid)
expect(g).to be_correct_and_respond 200
expect(w.delete(wid)).to be_correct_and_respond 204
end
end
describe "Update wallet" do
it "Can update_metadata" do
new_name = "New wallet name"
w = SHELLEY.wallets
id = create_shelley_wallet
u = w.update_metadata(id, { name: new_name })
expect(u).to be_correct_and_respond 200
expect(w.get(id)['name']).to eq new_name
end
it "Can update_passphrase" do
w = SHELLEY.wallets
id = create_shelley_wallet
upd = w.update_passphrase(id, { old_passphrase: "Secure Passphrase",
new_passphrase: "Securer Passphrase" })
expect(upd).to be_correct_and_respond 204
end
end
it "Can see utxo" do
id = create_shelley_wallet
utxo = SHELLEY.wallets.utxo(id)
expect(utxo).to be_correct_and_respond 200
end
it "Can see utxo snapshot" do
id = create_shelley_wallet
utxo = SHELLEY.wallets.utxo_snapshot(id)
expect(utxo).to be_correct_and_respond 200
end
end
describe CardanoWallet::Shelley::Addresses do
after(:each) do
teardown
end
it "Can list addresses" do
id = create_shelley_wallet
shelley_addr = CardanoWallet.new.shelley.addresses
addresses = shelley_addr.list id
expect(addresses).to be_correct_and_respond 200
expect(addresses.size).to eq 20
addresses.each_with_index do |a, i|
expect(a['derivation_path']).to eq ['1852H', '1815H', '0H', '0', i.to_s]
end
addresses_unused = shelley_addr.list id, { state: "used" }
expect(addresses_unused).to be_correct_and_respond 200
expect(addresses_unused.size).to eq 0
addresses_unused = shelley_addr.list id, { state: "unused" }
expect(addresses_unused).to be_correct_and_respond 200
expect(addresses_unused.size).to eq 20
addresses_unused.each_with_index do |a, i|
expect(a['derivation_path']).to eq ['1852H', '1815H', '0H', '0', i.to_s]
end
end
end
describe CardanoWallet::Shelley::CoinSelections do
after(:each) do
teardown
end
it "I could trigger random coin selection - if had money" do
wid = create_shelley_wallet
addresses = SHELLEY.addresses.list(wid)
addr_amount = [
{ addresses[0]['id'] => 123 },
{ addresses[1]['id'] => 456 }
]
rnd = SHELLEY.coin_selections.random wid, addr_amount
expect(rnd).to be_correct_and_respond 403
expect(rnd.to_s).to include "not_enough_money"
end
end
describe CardanoWallet::Shelley::Transactions do
after(:each) do
teardown
end
it "I could get a tx if I had proper id" do
wid = create_shelley_wallet
txs = SHELLEY.transactions
g = txs.get(wid, TXID)
expect(g).to be_correct_and_respond 404
expect(g.to_s).to include "no_such_transaction"
end
it "Can list transactions" do
id = create_shelley_wallet
txs = SHELLEY.transactions
l = txs.list(id)
l_ext = txs.list(id, { start: "2012-09-25T10:15:00Z",
end: "2016-11-21T10:15:00Z",
order: "ascending" })
l_bad = txs.list(id, { order: "bad_order" })
expect(l).to be_correct_and_respond 200
expect(l_ext).to be_correct_and_respond 200
expect(l_bad).to be_correct_and_respond 400
end
it "I could create transaction - if I had money" do
id = create_shelley_wallet
target_id = create_shelley_wallet
address = SHELLEY.addresses.list(target_id)[0]['id']
txs = SHELLEY.transactions
amt = [{ address => 1000000 }]
tx_sent = txs.create(id, PASS, amt)
expect(tx_sent).to be_correct_and_respond 403
expect(tx_sent.to_s).to include "not_enough_money"
end
it "I could create transaction using rewards - if I had money" do
id = create_shelley_wallet
target_id = create_shelley_wallet
address = SHELLEY.addresses.list(target_id)[0]['id']
txs = SHELLEY.transactions
amt = [{ address => 1000000 }]
tx_sent = txs.create(id, PASS, amt, 'self')
expect(tx_sent).to be_correct_and_respond 403
expect(tx_sent.to_s).to include "not_enough_money"
end
it "I could estimate transaction fee - if I had money" do
id = create_shelley_wallet
target_id = create_shelley_wallet
address = SHELLEY.addresses.list(target_id)[0]['id']
amt = [{ address => 1000000 }]
txs = SHELLEY.transactions
fees = txs.payment_fees(id, amt)
expect(fees).to be_correct_and_respond 403
expect(fees.to_s).to include "not_enough_money"
fees = txs.payment_fees(id, amt, 'self')
expect(fees).to be_correct_and_respond 403
expect(fees.to_s).to include "not_enough_money"
metadata = { "0" => { "string" => "cardano" },
"1" => { "int" => 14 },
"2" => { "bytes" => "2512a00e9653fe49a44a5886202e24d77eeb998f" },
"3" => { "list" => [ { "int" => 14 }, { "int" => 42 }, { "string" => "1337" } ] },
"4" => { "map" => [ { "k" => { "string" => "key" }, "v" => { "string" => "value" } },
{ "k" => { "int" => 14 }, "v" => { "int" => 42 } } ] } }
fees = txs.payment_fees(id, amt, 'self', metadata)
expect(fees).to be_correct_and_respond 403
expect(fees.to_s).to include "not_enough_money"
end
it "I could forget transaction" do
id = create_shelley_wallet
txs = SHELLEY.transactions
res = txs.forget(id, TXID)
expect(res).to be_correct_and_respond 404
end
end
describe CardanoWallet::Shelley::StakePools do
after(:each) do
settings = CardanoWallet.new.misc.settings
s = settings.update({ :pool_metadata_source => "none" })
teardown
end
it "ADP-634 - Pool metadata is updated when settings are updated" do
settings = CardanoWallet.new.misc.settings
pools = SHELLEY.stake_pools
s = settings.update({ :pool_metadata_source => "direct" })
expect(s).to be_correct_and_respond 204
eventually "Pools have metadata when 'pool_metadata_source' => 'direct'" do
sps = pools.list({ stake: 1000 })
sps.select { |p| p['metadata'] }.size > 0
end
s = settings.update({ :pool_metadata_source => "none" })
expect(s).to be_correct_and_respond 204
eventually "Pools have no metadata when 'pool_metadata_source' => 'none'" do
sps = pools.list({ stake: 1000 })
sps.select { |p| p['metadata'] }.size == 0
end
s = settings.update({ :pool_metadata_source => ENV['TESTS_E2E_SMASH'] })
expect(s).to be_correct_and_respond 204
eventually "Pools have metadata when 'pool_metadata_source' => '#{ENV['TESTS_E2E_SMASH']}'" do
sps = pools.list({ stake: 1000 })
sps.select { |p| p['metadata'] }.size > 0
end
s = settings.update({ :pool_metadata_source => "none" })
expect(s).to be_correct_and_respond 204
eventually "Pools have no metadata when 'pool_metadata_source' => 'none'" do
sps = pools.list({ stake: 1000 })
sps.select { |p| p['metadata'] }.size == 0
end
end
describe "Stake Pools GC Maintenance" do
matrix = [{ "direct" => "not_applicable" },
{ "none" => "not_applicable" },
{ "https://smash.cardano-testnet.iohkdev.io" => "has_run" }]
matrix.each do |tc|
it "GC metadata maintenance action on metadata source #{tc}" do
settings = CardanoWallet.new.misc.settings
pools = SHELLEY.stake_pools
s = settings.update({ :pool_metadata_source => tc.keys.first })
expect(s).to be_correct_and_respond 204
t = pools.trigger_maintenance_actions({ maintenance_action: "gc_stake_pools" })
expect(t).to be_correct_and_respond 204
eventually "Maintenance action has status = #{tc.values.first}" do
r = pools.view_maintenance_actions
(r.code == 200) && (r.to_s.include? tc.values.first)
end
end
end
end
it "I could quit stake pool - if I was delegating" do
id = create_shelley_wallet
pools = SHELLEY.stake_pools
quit = pools.quit(id, PASS)
expect(quit).to be_correct_and_respond 403
expect(quit.to_s).to include "not_delegating_to"
end
end
describe CardanoWallet::Shelley::Migrations do
after(:each) do
teardown
end
it "I could create migration plan" do
id = create_shelley_wallet
target_id = create_shelley_wallet
addrs = SHELLEY.addresses.list(target_id).map { |a| a['id'] }
plan = SHELLEY.migrations.plan(id, addrs)
expect(plan).to be_correct_and_respond 403
expect(plan.to_s).to include "nothing_to_migrate"
end
it "I could migrate all my funds" do
id = create_shelley_wallet
target_id = create_shelley_wallet
addrs = SHELLEY.addresses.list(target_id).map { |a| a['id'] }
migr = SHELLEY.migrations.migrate(id, PASS, addrs)
expect(migr).to be_correct_and_respond 403
expect(migr.to_s).to include "nothing_to_migrate"
end
end
describe CardanoWallet::Shelley::Keys do
after(:each) do
teardown
end
it "Get signed metadata" do
wid = create_shelley_wallet
["utxo_internal", "utxo_external", "mutable_account"].each do |role|
id = [*0..100000].sample
res = SHELLEY.keys.sign_metadata(wid,
role,
id,
"Secure Passphrase",
{ "0" => { "string" => "cardano" } })
puts "#{wid}/#{role}/#{id}"
expect(res).to respond_with 200
end
end
it "Get public key" do
wid = create_shelley_wallet
["utxo_internal", "utxo_external", "mutable_account"].each do |role|
id = [*0..100000].sample
res = SHELLEY.keys.get_public_key(wid, role, id)
puts "#{wid}/#{role}/#{id}"
expect(res).to be_correct_and_respond 200
end
end
it "Create account public key - extended" do
wid = create_shelley_wallet
["0H", "1H", "2147483647H", "44H"].each do |index|
res = SHELLEY.keys.create_acc_public_key(wid, index, PASS, 'extended')
expect(res).to be_correct_and_respond 202
expect(res.to_s).to include "acc"
end
end
it "Create account public key - non_extended" do
wid = create_shelley_wallet
["0H", "1H", "2147483647H", "44H"].each do |index|
res = SHELLEY.keys.create_acc_public_key(wid, index, PASS, 'non_extended')
expect(res).to be_correct_and_respond 202
expect(res.to_s).to include "acc"
end
end
it "Get account public key - wallet from mnemonics" do
wid = create_shelley_wallet
res = SHELLEY.keys.get_acc_public_key(wid, { format: "extended" })
expect(res).to be_correct_and_respond 200
expect(res.to_s).to include "acct_xvk"
end
it "Get account public key - wallet from acc pub key" do
w = SHELLEY.wallets
wallet = w.create({ name: "Wallet from pub key",
account_public_key: "b47546e661b6c1791452d003d375756dde6cac2250093ce4630f16b9b9c0ac87411337bda4d5bc0216462480b809824ffb48f17e08d95ab9f1b91d391e48e66b",
address_pool_gap: 20,
})
expect(wallet).to be_correct_and_respond 201
res = SHELLEY.keys.get_acc_public_key(wallet['id'], { format: "non_extended" })
expect(res).to be_correct_and_respond 200
expect(res.to_s).to include "acct_vk"
end
end
end
| 34.546512 | 178 | 0.612858 |
5d3cf7b39afd8ef9cd26988a69d67c1b9d5db95b | 1,781 | module CatchNotes
module CRUDMethods
module ClassMethods
private
def build_from_hash(hash)
send :new, hash
end
end
def self.included(klass) #:nodoc:
klass.extend ClassMethods
end
def initialize(attributes)
@attr = self.class.send :stringify_keys, attributes
@id = @attr['id']
@created_at = @attr['created_at']
@modified_at = @attr['modified_at']
@source = @attr['source']
@source_url = @attr['source_url']
@children = @attr['children']
@text = @attr['text']
@summary = @attr['summary']
@tags = @attr['tags']
@reminder_at = @attr['reminder_at']
@location = @attr['location']
unless @attr['user'].nil?
@user_name = @attr['user']['user_name']
@user_id = @attr['user']['id']
end
end
def save!
res = if new_record?
self.class.send(:post, "/notes", :body => post_body)
else
self.class.send(:post, "/notes/#{self.id}", :body => post_body)
end
if self.class.send(:ok?, res)
rebuild res.parsed_response['notes'].first
end
true
end
def save
save!
rescue
false
end
def destroy!
raise CatchNotes::NotFound if new_record? # can't delete an unsaved note
res = self.class.send(:delete, "/notes/#{self.id}")
self.class.send(:ok?, res)
end
def destroy
destroy!
rescue
false
end
def new_record?
i = self.send :id
i.nil?
end
private
def post_body
{
'text' => send(:text)
}.map{|k,v| "#{URI.encode(k)}=#{URI.encode(v)}"}.join("&")
end
def rebuild(attrs)
initialize(attrs)
end
end
end | 21.719512 | 78 | 0.54183 |
f7e20492270fc7a0a2b8c38b95b61091948782ca | 192 | module AhaApi
class Resource
module Users
def user(user_id, options={})
get("api/#{api_version}/users/#{CGI.escape(user_id)}", options)
end
end
end
end
| 16 | 71 | 0.59375 |
ed02666489d31c1fb4ae564dec246a03822d696f | 283 | class CreateLocations < ActiveRecord::Migration[5.1]
def change
create_table :locations do |t|
t.integer :user_id
t.string :category
t.string :description
t.string :name
t.string :city
t.string :country
t.timestamps
end
end
end
| 18.866667 | 52 | 0.636042 |
7949e46c028bfc7840b96f432fbee8bbef680b2b | 24 | require "paxful_engine"
| 12 | 23 | 0.833333 |
2669921aa388522525812138bf22d9145fb13b6e | 368 | lines = File.read('input.txt').split("\n").map {|x| x.split(' ')}
horizontal = 0
depth = 0
aim = 0
lines.each do |command, argument|
case command
when 'forward'
horizontal += argument.to_i
depth += aim * argument.to_i
when 'down'
aim += argument.to_i
when 'up'
aim -= argument.to_i
else
puts 'what ' + command
end
end
puts horizontal * depth
| 18.4 | 65 | 0.641304 |
1dec81e3f341cd2f95f0be988b99f54f67b350ff | 1,595 | #
# Be sure to run `pod lib lint AppBaseKit.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'AppBaseKit'
s.version = '0.1.0'
s.summary = 'AppBaseKit use project init'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
AppBaseKit use project init, help dev quickly starting code.
DESC
s.homepage = 'https://github.com/jessecoding/AppBaseKit'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Jesse Xu' => '[email protected]' }
s.source = { :git => 'https://github.com/jessecoding/AppBaseKit.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '9.0'
s.source_files = 'AppBaseKit/Classes/**/*'
# s.resource_bundles = {
# 'AppBaseKit' => ['AppBaseKit/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
s.frameworks = 'UIKit'
s.dependency 'Alamofire', '~> 4.7'
end
| 37.093023 | 106 | 0.640752 |
79ffd795a1ba9b339ee833e375e64830ede2bd42 | 5,726 | class ForumPost < ApplicationRecord
attr_readonly :topic_id
belongs_to :creator, class_name: "User"
belongs_to_updater
belongs_to :topic, class_name: "ForumTopic", inverse_of: :forum_posts
has_many :dtext_links, as: :model, dependent: :destroy
has_many :moderation_reports, as: :model
has_many :votes, class_name: "ForumPostVote"
has_one :tag_alias
has_one :tag_implication
has_one :bulk_update_request
before_save :update_dtext_links, if: :dtext_links_changed?
before_create :autoreport_spam
after_create :update_topic_updated_at_on_create
after_update :update_topic_updated_at_on_update_for_original_posts
after_destroy :update_topic_updated_at_on_destroy
validates_presence_of :body
after_save :delete_topic_if_original_post
after_update(:if => ->(rec) {rec.updater_id != rec.creator_id}) do |rec|
ModAction.log("#{CurrentUser.user.name} updated forum ##{rec.id}", :forum_post_update)
end
after_destroy(:if => ->(rec) {rec.updater_id != rec.creator_id}) do |rec|
ModAction.log("#{CurrentUser.user.name} deleted forum ##{rec.id}", :forum_post_delete)
end
after_create_commit :async_send_discord_notification
deletable
mentionable(
:message_field => :body,
:title => ->(user_name) {%{#{creator.name} mentioned you in topic ##{topic_id} (#{topic.title})}},
:body => ->(user_name) {%{@#{creator.name} mentioned you in topic ##{topic_id} ("#{topic.title}":[#{Routes.forum_topic_path(topic, page: forum_topic_page)}]):\n\n[quote]\n#{DText.extract_mention(body, "@" + user_name)}\n[/quote]\n}}
)
module SearchMethods
def visible(user)
where(topic_id: ForumTopic.visible(user))
end
def wiki_link_matches(title)
where(id: DtextLink.forum_post.wiki_link.where(link_target: WikiPage.normalize_title(title)).select(:model_id))
end
def search(params)
q = search_attributes(params, :id, :created_at, :updated_at, :is_deleted, :body, :creator, :updater, :topic, :dtext_links, :votes, :tag_alias, :tag_implication, :bulk_update_request)
q = q.text_attribute_matches(:body, params[:body_matches], index_column: :text_index)
if params[:linked_to].present?
q = q.wiki_link_matches(params[:linked_to])
end
q.apply_default_order(params)
end
end
extend SearchMethods
def self.new_reply(params)
if params[:topic_id]
new(:topic_id => params[:topic_id])
elsif params[:post_id]
forum_post = ForumPost.find(params[:post_id])
forum_post.build_response
else
new
end
end
def voted?(user, score)
votes.where(creator_id: user.id, score: score).exists?
end
def autoreport_spam
if SpamDetector.new(self, user_ip: CurrentUser.ip_addr).spam?
moderation_reports << ModerationReport.new(creator: User.system, reason: "Spam.")
end
end
def update_topic_updated_at_on_create
if topic
# need to do this to bypass the topic's original post from getting touched
ForumTopic.where(:id => topic.id).update_all(["updater_id = ?, response_count = response_count + 1, updated_at = ?", creator.id, Time.now])
topic.response_count += 1
end
end
def update_topic_updated_at_on_update_for_original_posts
if is_original_post?
topic.touch
end
end
def delete!
update(is_deleted: true)
update_topic_updated_at_on_delete
end
def undelete!
update(is_deleted: false)
update_topic_updated_at_on_undelete
end
def dtext_links_changed?
body_changed? && DText.dtext_links_differ?(body, body_was)
end
def update_dtext_links
self.dtext_links = DtextLink.new_from_dtext(body)
end
def update_topic_updated_at_on_delete
max = ForumPost.where(:topic_id => topic.id, :is_deleted => false).order("updated_at desc").first
if max
ForumTopic.where(:id => topic.id).update_all(["updated_at = ?, updater_id = ?", max.updated_at, max.updater_id])
end
end
def update_topic_updated_at_on_undelete
if topic
ForumTopic.where(:id => topic.id).update_all(["updater_id = ?, updated_at = ?", CurrentUser.id, Time.now])
end
end
def update_topic_updated_at_on_destroy
max = ForumPost.where(:topic_id => topic.id, :is_deleted => false).order("updated_at desc").first
if max
ForumTopic.where(:id => topic.id).update_all(["response_count = response_count - 1, updated_at = ?, updater_id = ?", max.updated_at, max.updater_id])
else
ForumTopic.where(:id => topic.id).update_all("response_count = response_count - 1")
end
topic.response_count -= 1
end
def quoted_response
DText.quote(body, creator.name)
end
def forum_topic_page
(ForumPost.where("topic_id = ? and created_at <= ?", topic_id, created_at).count / Danbooru.config.posts_per_page.to_f).ceil
end
def is_original_post?(original_post_id = nil)
if original_post_id
return id == original_post_id
else
ForumPost.exists?(["id = ? and id = (select _.id from forum_posts _ where _.topic_id = ? order by _.id asc limit 1)", id, topic_id])
end
end
def delete_topic_if_original_post
if is_deleted? && is_original_post?
topic.update_attribute(:is_deleted, true)
end
end
def async_send_discord_notification
DiscordNotificationJob.perform_later(forum_post: self)
end
def send_discord_notification
return unless policy(User.anonymous).show?
DiscordApiClient.new.post_message(self)
end
def build_response
dup.tap do |x|
x.body = x.quoted_response
end
end
def dtext_shortlink(**options)
"forum ##{id}"
end
def self.available_includes
[:creator, :updater, :topic, :dtext_links, :votes, :tag_alias, :tag_implication, :bulk_update_request]
end
end
| 31.811111 | 236 | 0.716032 |
87cd154190229d8b52309875248643ea43d84f37 | 1,959 | ###
# Do not use this file to override the opsworks_nodejs cookbook's default
# attributes. Instead, please use the customize.rb attributes file,
# which will keep your adjustments separate from the AWS OpsWorks
# codebase and make it easier to upgrade.
#
# However, you should not edit customize.rb directly. Instead, create
# "opsworks_nodejs/attributes/customize.rb" in your cookbook repository and
# put the overrides in YOUR customize.rb file.
#
# Do NOT create an 'opsworks_nodejs/attributes/opsworks_nodejs.rb' in your cookbooks. Doing so
# would completely override this file and might cause upgrade issues.
#
# See also: http://docs.aws.amazon.com/opsworks/latest/userguide/customizing.html
###
include_attribute 'deploy'
include_attribute 'opsworks_commons::default'
# This value is only to give some sane initialization. This value:
# - May not point to the latest available version
# - May not be valid for the used OS
# - Will be overwritten by the value set on the OpsWorks NodeJS layer of your Stack
# - May be overwritten by a value set in your Stack's custom JSON
default[:opsworks_nodejs][:version] = '0.12.10'
default[:opsworks_nodejs][:pkgrelease] = '1'
arch = RUBY_PLATFORM.match(/64/) ? 'amd64' : 'i386'
default[:opsworks_nodejs][:deb] = "opsworks-nodejs-#{node[:opsworks_nodejs][:version]}-#{node[:opsworks_nodejs][:pkgrelease]}_#{arch}.deb"
default[:opsworks_nodejs][:deb_url] = "#{node[:opsworks_commons][:assets_url]}/packages/#{node[:platform]}/#{node[:platform_version]}/#{node[:opsworks_nodejs][:deb]}"
rhel_arch = RUBY_PLATFORM.match(/64/) ? 'x86_64' : 'i686'
default[:opsworks_nodejs][:rpm] = "opsworks-nodejs-#{node[:opsworks_nodejs][:version]}-#{node[:opsworks_nodejs][:pkgrelease]}.#{rhel_arch}.rpm"
default[:opsworks_nodejs][:rpm_url] = "#{node[:opsworks_commons][:assets_url]}/packages/#{node[:platform]}/#{node[:platform_version]}/#{node[:opsworks_nodejs][:rpm]}"
include_attribute "opsworks_nodejs::customize"
| 51.552632 | 166 | 0.752935 |
08c59c60e71457bf1835a0c1234f4630cde3e165 | 3,637 | # frozen_string_literal: true
module ShopifyTransporter
module Exporters
module Magento
class ProductOptions
def initialize(database_table_exporter, database_cache)
@database_table_exporter = database_table_exporter
@database_cache = database_cache
export_required_tables
end
def shopify_option_names(parent_product_id)
option_names(parent_product_id).each_with_index.with_object({}) do |(option_name, index), obj|
obj["option#{index + 1}_name".to_sym] = option_name
end
end
def lowercase_option_names(parent_product_id)
option_names(parent_product_id).map(&:downcase)
end
def shopify_variant_options(simple_product)
return {} unless simple_product_has_required_option_keys(simple_product)
parent_product_options = lowercase_option_names(simple_product[:parent_id])
variant_attributes = simple_product[:additional_attributes][:item]
variant_attributes = [variant_attributes] unless variant_attributes.is_a?(Array)
parent_product_options.each_with_index.with_object({}) do |(option_name, index), obj|
option_value_id = fetch_option_value_id(option_name, variant_attributes)
obj["option#{index + 1}_name".to_sym] = option_name.capitalize
obj["option#{index + 1}_value".to_sym] = option_value(option_value_id)
end
end
private
def option_names(product_id)
option_lookup[product_id] || []
end
def option_value(soap_value_id)
option_value_lookup[soap_value_id] || nil
end
def fetch_option_value_id(option_name, variant_attributes)
option_attribute_hash = variant_attributes.select do |attribute|
attribute[:key] == option_name
end.first
return nil if option_attribute_hash.nil?
option_attribute_hash[:value]
end
def simple_product_has_required_option_keys(simple_product)
simple_product.key?(:parent_id) && simple_product.key?(:additional_attributes)
end
def export_required_tables
@database_table_exporter.export_table('catalog_product_super_attribute', 'product_super_attribute_id')
@database_table_exporter.export_table('catalog_product_super_attribute_label', 'value_id')
@database_table_exporter.export_table('eav_attribute_option_value', 'value_id')
end
def option_lookup
@option_lookup ||= @database_cache
.table('catalog_product_super_attribute')
.each_with_object({}) do |attribute, option_hash|
option_hash[attribute['product_id']] ||= []
option_hash[attribute['product_id']] << option_label_lookup[attribute['product_super_attribute_id']]
end
end
def option_label_lookup
@option_label_lookup ||= @database_cache
.table('catalog_product_super_attribute_label')
.each_with_object({}) do |label, label_lookup|
label_lookup[label['product_super_attribute_id']] = label['value']
end
end
def option_value_lookup
@option_value_lookup ||= begin
soap_value_id_column_key = 'option_id'
@database_cache
.table('eav_attribute_option_value')
.each_with_object({}) do |option_value, value_lookup|
value_lookup[option_value[soap_value_id_column_key]] = option_value['value']
end
end
end
end
end
end
end
| 36.737374 | 112 | 0.667583 |
e9b2a4c5bfd0d407e58904dfa73956a434c64027 | 525 | require 'rails_helper'
describe "asset_events/_maintenance_update_event_form.html.haml", :type => :view do
it 'fields' do
assign(:asset, Asset.get_typed_asset(create(:buslike_asset)))
assign(:asset_event, MaintenanceUpdateEvent.new)
render
expect(rendered).to have_field('asset_event_maintenance_type_id')
expect(rendered).to have_field('asset_event_current_mileage')
expect(rendered).to have_field('asset_event_event_date')
expect(rendered).to have_field('asset_event_comments')
end
end
| 32.8125 | 83 | 0.771429 |
33f4aece1d47d6b8d1a66832a1e0a658634f62b7 | 126 | require 'test_helper'
class LeagueSeasonTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.75 | 48 | 0.714286 |
6abf0e719be43afc1d0c6e8621113c5504f635db | 2,960 | module DockerCookbook
module DockerHelpers
module Base
require 'shellwords'
##########
# coersion
##########
def coerce_labels(v)
case v
when Hash, nil
v
else
Array(v).each_with_object({}) do |label, h|
parts = label.split(':')
h[parts[0]] = parts[1]
end
end
end
def coerce_shell_command(v)
return nil if v.nil?
return DockerBase::ShellCommandString.new(
::Shellwords.join(v)) if v.is_a?(Array
)
DockerBase::ShellCommandString.new(v)
end
################
# Helper methods
################
def api_version
@api_version ||= Docker.version(connection)['ApiVersion']
end
def connection
@connection ||= begin
opts = {}
opts[:read_timeout] = read_timeout if read_timeout
opts[:write_timeout] = write_timeout if write_timeout
if host =~ /^tcp:/
opts[:scheme] = 'https' if tls || !tls_verify.nil?
opts[:ssl_ca_file] = tls_ca_cert if tls_ca_cert
opts[:client_cert] = tls_client_cert if tls_client_cert
opts[:client_key] = tls_client_key if tls_client_key
end
Docker::Connection.new(host || Docker.url, opts)
end
end
def with_retries(&block)
tries = api_retries
begin
block.call
# Only catch errors that can be fixed with retries.
rescue Docker::Error::ServerError, # 404
Docker::Error::UnexpectedResponseError, # 400
Docker::Error::TimeoutError,
Docker::Error::IOError
tries -= 1
retry if tries > 0
raise
end
end
def call_action(_action)
new_resource.run_action
end
def default_host
return nil unless ENV['DOCKER_HOST']
ENV['DOCKER_HOST']
end
def default_tls
return nil unless ENV['DOCKER_TLS']
ENV['DOCKER_TLS']
end
def default_tls_verify
return nil unless ENV['DOCKER_TLS_VERIFY']
ENV['DOCKER_TLS_VERIFY']
end
def default_tls_cert_path(v)
return false unless ENV['DOCKER_CERT_PATH']
case v
when 'ca'
"#{ENV['DOCKER_CERT_PATH']}/ca.pem"
when 'cert'
"#{ENV['DOCKER_CERT_PATH']}/cert.pem"
when 'key'
"#{ENV['DOCKER_CERT_PATH']}/key.pem"
end
end
# recursively remove nil values from a hash
def compact!(v)
v.reject! do |_, value|
compact!(value) if value.is_a?(Hash)
value.nil?
end
end
end
end
end
| 26.909091 | 83 | 0.499662 |
e8c775d8cd0acfadaa323a3179ac99539b9e29a7 | 1,226 | require 'test_helper'
class PeopleControllerTest < ActionDispatch::IntegrationTest
setup do
@person = people(:one)
end
test "should get index" do
get people_url
assert_response :success
end
test "should get new" do
get new_person_url
assert_response :success
end
test "should create person" do
assert_difference('Person.count') do
post people_url, params: { person: { aadhar_number: @person.aadhar_number, address: @person.address, mobile_number: @person.mobile_number, name: @person.name } }
end
assert_redirected_to person_url(Person.last)
end
test "should show person" do
get person_url(@person)
assert_response :success
end
test "should get edit" do
get edit_person_url(@person)
assert_response :success
end
test "should update person" do
patch person_url(@person), params: { person: { aadhar_number: @person.aadhar_number, address: @person.address, mobile_number: @person.mobile_number, name: @person.name } }
assert_redirected_to person_url(@person)
end
test "should destroy person" do
assert_difference('Person.count', -1) do
delete person_url(@person)
end
assert_redirected_to people_url
end
end
| 25.020408 | 175 | 0.721044 |
11288c123016e4fe20075347337a02a0e02aeb0c | 1,817 | # -*- encoding : utf-8 -*-
class StatisticsBackToStateMachine < ActiveRecord::Migration
def up
execute <<-SQL
CREATE OR REPLACE VIEW statistics AS
SELECT
(SELECT count(*) FROM users WHERE primary_user_id IS NULL) AS total_users,
total_backs,
total_backers,
total_backed,
total_projects,
total_projects_success,
total_projects_online
FROM
(
SELECT count(*) AS total_backs,
count(DISTINCT user_id) AS total_backers,
sum(value) AS total_backed
FROM backers WHERE confirmed
) AS backers_totals,
(
SELECT
count(*) AS total_projects,
count(CASE WHEN state = 'successful' THEN 1 ELSE NULL END) AS total_projects_success,
count(CASE WHEN state = 'online' THEN 1 ELSE NULL END) AS total_projects_online
FROM projects WHERE state NOT IN ('draft', 'rejected')
) AS projects_totals
SQL
end
def down
execute <<SQL
CREATE OR REPLACE VIEW statistics AS
SELECT
(SELECT count(*) FROM users WHERE primary_user_id IS NULL) AS total_users,
total_backs,
total_backers,
total_backed,
total_projects,
total_projects_success,
total_projects_online
FROM
(SELECT count(*) AS total_backs, count(DISTINCT user_id) AS total_backers, sum(value) AS total_backed FROM backers WHERE confirmed) AS backers_totals,
(SELECT count(*) AS total_projects, count(CASE WHEN successful THEN 1 ELSE NULL END) AS total_projects_success, count(CASE WHEN finished = false AND expires_at >= current_timestamp THEN 1 ELSE NULL END) AS total_projects_online FROM projects WHERE visible) AS projects_totals
SQL
end
end
| 37.081633 | 282 | 0.651624 |
accc430a1d9162ba6e7cf667ac99caf9013b230a | 986 | #
# Cookbook Name:: dynect
# Recipe:: a_record
#
# Copyright:: 2010, Opscode, Inc <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'dynect'
dynect_rr node['hostname'] do
record_type 'A'
rdata('address' => node['ipaddress'])
fqdn "#{node['hostname']}.#{node['dynect']['domain']}"
customer node['dynect']['customer']
username node['dynect']['username']
password node['dynect']['password']
zone node['dynect']['zone']
action :update
end
| 30.8125 | 74 | 0.719067 |
bbdf8555b1fa408c4fb052babfe2227d0033609a | 1,817 | # This migration comes from solidus_i18n (originally 20150609154031)
class RemoveTranslationsFromSpreeTables < ActiveRecord::Migration
def up
# Don't migrate if we still use Globalize, i.e. through spree_globalize Gem
return if defined?(Globalize)
%w(
OptionType
OptionValue
ProductProperty
Product
Promotion
Property
Store
Taxon
Taxonomy
).each do |class_name|
migrate_translation_data!(class_name)
end
end
def down
return if defined?(Globalize)
raise ActiveRecord::IrreversibleMigration
end
private
def current_locale
I18n.default_locale || 'en'
end
def migrate_translation_data!(class_name)
klass = "Spree::#{class_name}".constantize
table_name = klass.table_name
singular_table_name = table_name.singularize
return if !table_exists?(table_name) || !table_exists?("#{singular_table_name}_translations")
# We can't rely on Globalize drop_translation_table! here,
# because the Gem has been already removed, so we need to run custom SQL
records = exec_query("SELECT * FROM #{singular_table_name}_translations WHERE locale = '#{current_locale}';")
records.each do |record|
id = record["#{singular_table_name}_id"]
attributes = record.except(
'id',
"#{singular_table_name}_id",
'locale',
'deleted_at',
'created_at',
'updated_at'
)
object = if klass.respond_to?(:with_deleted)
klass.with_deleted.find(id)
else
klass.find(id)
end
object.update_columns(attributes)
end
say "Migrated #{current_locale} translation for #{class_name} back into original table."
drop_table "#{singular_table_name}_translations"
end
end
| 27.119403 | 113 | 0.666483 |
e270b25920362f727cad90f883a1030d462eb882 | 1,239 | require 'aws-sdk-s3'
module ContentCaching
module Adapter
class Aws
T_1_DAY = 86400.freeze
attr_reader :options
def initialize options
@options = options
end
def store document_path, content, type
::Retryable.retryable(tries: 3) do
if type === :html
bucket.object(document_path).put(body: content)
else
bucket.object(document_path).upload_file(content)
end
end
end
def url document_path, expires_in: nil
bucket.object(document_path).presigned_url :get, expires_in: expires_in || T_1_DAY
end
def delete document_path
Retryable.retryable(tries: 3) do
bucket.object(document_path).delete
end
end
private
def service
@service ||= ::Aws::S3::Resource.new(
credentials: aws_credentials,
region: self.options[:aws_region]
)
end
def bucket
@bucket ||= service.bucket(self.options[:directory])
end
def aws_credentials
::Aws::Credentials.new(self.options[:aws_access_key_id],
self.options[:aws_secret_access_key])
end
end
end
end
| 22.944444 | 90 | 0.592413 |
f8b845fee92869f667b214963ff33e98d24713c9 | 225 | class NamespaceProductAssemblyForSpreeOne < ActiveRecord::Migration
def up
rename_table :assemblies_parts, :spree_assemblies_parts
end
def down
rename_table :spree_assemblies_parts, :assemblies_parts
end
end
| 22.5 | 67 | 0.808889 |
61a94b7edcb9b0fd64ea5b234a2fb02a11467704 | 874 | # frozen_string_literal: true
class Message < ApplicationRecord
validates :text, presence: true
belongs_to :sender, class_name: 'User', inverse_of: :sent_messages
belongs_to :recipient, class_name: 'User', inverse_of: :received_messages
scope :received_by, ->(user) { where(recipient: user).where.not(recipient_status: 'd') }
scope :sent_by, ->(user) { where(sender: user).where.not(sender_status: 'd') }
after_save :destroy_deleted_message
def mark_as_deleted(user)
self.sender_status = 'd' if sender == user
self.recipient_status = 'd' if recipient == user
end
private
def destroy_deleted_message
destroy if deleted_by_both?
end
def deleted_by_sender?
sender_status == 'd'
end
def deleted_by_recipient?
recipient_status == 'd'
end
def deleted_by_both?
deleted_by_recipient? && deleted_by_sender?
end
end
| 23.621622 | 90 | 0.724256 |
1a8a2bbf8eae5e6be618625828ba76a6f74bbe88 | 2,585 | require 'rubygems'
class Debunker
module Rubygem
class << self
def installed?(name)
if Gem::Specification.respond_to?(:find_all_by_name)
Gem::Specification.find_all_by_name(name).any?
else
Gem.source_index.find_name(name).first
end
end
# Get the gem spec object for the given gem name.
#
# @param [String] name
# @return [Gem::Specification]
def spec(name)
specs = if Gem::Specification.respond_to?(:each)
Gem::Specification.find_all_by_name(name)
else
Gem.source_index.find_name(name)
end
first_spec = specs.sort_by{ |spec| Gem::Version.new(spec.version) }.last
first_spec or raise CommandError, "Gem `#{name}` not found"
end
# List gems matching a pattern.
#
# @param [Regexp] pattern
# @return [Array<Gem::Specification>]
def list(pattern = /.*/)
if Gem::Specification.respond_to?(:each)
Gem::Specification.select{|spec| spec.name =~ pattern }
else
Gem.source_index.gems.values.select{|spec| spec.name =~ pattern }
end
end
# Completion function for gem-cd and gem-open.
#
# @param [String] so_far what the user's typed so far
# @return [Array<String>] completions
def complete(so_far)
if so_far =~ / ([^ ]*)\z/
self.list(%r{\A#{$2}}).map(&:name)
else
self.list.map(&:name)
end
end
# Installs a gem with all its dependencies.
#
# @param [String] name
# @return [void]
def install(name)
require 'rubygems/dependency_installer'
gem_config = Gem.configuration['gem']
gemrc_opts = (gem_config.nil? ? "" : gem_config.split(' '))
destination = if gemrc_opts.include?('--user-install')
Gem.user_dir
elsif File.writable?(Gem.dir)
Gem.dir
else
Gem.user_dir
end
installer = Gem::DependencyInstaller.new(:install_dir => destination)
installer.install(name)
rescue Errno::EACCES
raise CommandError,
"Insufficient permissions to install #{ Debunker::Helpers::Text.green(name) }."
rescue Gem::GemNotFoundException
raise CommandError,
"Gem #{ Debunker::Helpers::Text.green(name) } not found. Aborting installation."
else
Gem.refresh
end
end
end
end
| 30.411765 | 90 | 0.560542 |
4a1158c7a618026b12b25fa6f8bd613ff17d2aa8 | 2,007 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Apps
module Script
module Type
module Drive
# Drive add-on manifest.
# @!attribute [rw] homepage_trigger
# @return [::Google::Apps::Script::Type::HomepageExtensionPoint]
# If present, this overrides the configuration from
# `addOns.common.homepageTrigger`.
# @!attribute [rw] on_items_selected_trigger
# @return [::Google::Apps::Script::Type::Drive::DriveExtensionPoint]
# Corresponds to behvior that should execute when items are selected
# in relevant Drive view (e.g. the My Drive Doclist).
class DriveAddOnManifest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# A generic extension point with common features, e.g. something that simply
# needs a corresponding run function to work.
# @!attribute [rw] run_function
# @return [::String]
# Required. The endpoint to execute when this extension point is
# activated.
class DriveExtensionPoint
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
end
| 37.166667 | 86 | 0.655207 |
ff35794a3fb9dd5ad884652908937d21e3aeffcb | 5,099 | require 'rails_helper'
describe PublifyTime do
describe 'delta' do
it 'returns nil when nil year, nil month and nil day' do
expect(PublifyTime.delta).to be_nil
end
it 'returns year when year given' do
start_at = Time.zone.local(2009, 1, 1, 0, 0, 0)
end_at = start_at.end_of_year
expect(PublifyTime.delta(2009)).to eq(start_at..end_at)
end
it 'returns year and month when year and month given' do
start_at = Time.zone.local(2009, 10, 1, 0, 0, 0)
end_at = start_at.end_of_month
expect(PublifyTime.delta(2009, 10)).to eq(start_at..end_at)
end
it 'returns year, month and day when year, month and day given' do
start_at = Time.zone.local(2009, 10, 23, 0, 0, 0)
end_at = start_at.end_of_day
expect(PublifyTime.delta(2009, 10, 23)).to eq(start_at..end_at)
end
it 'returns year, when year given type string' do
start_at = Time.zone.local(2009, 1, 1, 0, 0, 0)
end_at = start_at.end_of_year
expect(PublifyTime.delta('2009')).to eq(start_at..end_at)
end
it 'returns year and month when year and month given type string' do
start_at = Time.zone.local(2009, 9, 1, 0, 0, 0)
end_at = start_at.end_of_month
expect(PublifyTime.delta('2009', '09')).to eq(start_at..end_at)
end
it 'returns year, month and day when year, month and day given type string' do
start_at = Time.zone.local(2009, 1, 1, 0, 0, 0)
end_at = start_at.end_of_day
expect(PublifyTime.delta('2009', '01', '01')).to eq(start_at..end_at)
end
it 'returns nil when year, month and day are not numeric' do
expect(PublifyTime.delta 'foo', 'bar', 'baz').to be_nil
end
end
describe 'delta_like' do
it 'given year' do
start_at = Time.zone.local(2013, 1, 1, 0, 0, 0)
end_at = start_at.end_of_year
expect(PublifyTime.delta_like('2013')).to eq(start_at..end_at)
end
it 'given year month' do
start_at = Time.zone.local(2013, 9, 1, 0, 0, 0)
end_at = start_at.end_of_month
expect(PublifyTime.delta_like('2013-09')).to eq(start_at..end_at)
end
it 'given year month day' do
start_at = Time.zone.local(2013, 8, 1, 0, 0, 0)
end_at = start_at.end_of_day
expect(PublifyTime.delta_like('2013-08-01')).to eq(start_at..end_at)
end
end
end
describe 'find Article date range ' do
let!(:blog) { create(:blog) }
before do
@timezone = Time.zone
end
after do
Time.zone = @timezone
end
describe 'UTC' do
before do
Time.zone = 'UTC'
@a = FactoryGirl.build(:article)
@a.published_at = '1 Jan 2013 01:00 UTC'
@a.save!
params = @a.permalink_url.gsub('http://myblog.net/', '').split('/')
@year, @month, @day = params[0], params[1], params[2]
end
it 'delta given year' do
range = PublifyTime.delta(@year)
expect(Article.where(published_at: range)).to eq([@a])
end
it 'delta given year month' do
range = PublifyTime.delta(@year, @month)
expect(Article.where(published_at: range)).to eq([@a])
end
it 'delta given year month day' do
range = PublifyTime.delta(@year, @month, @day)
expect(Article.where(published_at: range)).to eq([@a])
end
it 'delta_like given year' do
range = PublifyTime.delta_like("#{@year}")
expect(Article.where(published_at: range)).to eq([@a])
end
it 'delta_like given year month' do
range = PublifyTime.delta_like("#{@year}-#{@month}")
expect(Article.where(published_at: range)).to eq([@a])
end
it 'delta_like given year month day' do
range = PublifyTime.delta_like("#{@year}-#{@month}-#{@day}")
expect(Article.where(published_at: range)).to eq([@a])
end
end
describe 'JST(+0900) ' do
before do
Time.zone = 'Tokyo'
@a = FactoryGirl.build(:article)
@a.published_at = '1 Jan 2013 01:00 +0900'
@a.save!
params = @a.permalink_url.gsub('http://myblog.net/', '').split('/')
@year, @month, @day = params[0], params[1], params[2]
end
it 'delta given year' do
range = PublifyTime.delta(@year)
expect(Article.where(published_at: range)).to eq([@a])
end
it 'delta given year month' do
range = PublifyTime.delta(@year, @month)
expect(Article.where(published_at: range)).to eq([@a])
end
it 'delta given year month day' do
range = PublifyTime.delta(@year, @month, @day)
expect(Article.where(published_at: range)).to eq([@a])
end
it 'delta_like given year' do
range = PublifyTime.delta_like("#{@year}")
expect(Article.where(published_at: range)).to eq([@a])
end
it 'delta_like given year month' do
range = PublifyTime.delta_like("#{@year}-#{@month}")
expect(Article.where(published_at: range)).to eq([@a])
end
it 'delta_like given year month day' do
range = PublifyTime.delta_like("#{@year}-#{@month}-#{@day}")
expect(Article.where(published_at: range)).to eq([@a])
end
end
end
| 30.35119 | 82 | 0.622279 |
f761340d7c5faf943ebf1cda532ff5de0539893f | 1,129 | class Clojurescript < Formula
desc "Clojure to JS compiler"
homepage "https://github.com/clojure/clojurescript"
url "https://github.com/clojure/clojurescript/releases/download/r1.10.896/cljs.jar"
sha256 "72f1470c97eac06cf1fb5c3da966527643ffb440f2900441e79351cd46752443"
license "EPL-1.0"
head "https://github.com/clojure/clojurescript.git", branch: "master"
livecheck do
url :stable
strategy :github_latest
regex(%r{href=.*?/tag/r?(\d+(?:\.\d+)+)["' >]}i)
end
bottle do
sha256 cellar: :any_skip_relocation, all: "810375f973928608536e314655b82d6f6f1e50c61654ebbb36b61f9081078b5e"
end
depends_on "openjdk"
def install
libexec.install "cljs.jar"
bin.write_jar_script libexec/"cljs.jar", "cljsc"
end
def caveats
<<~EOS
This formula is useful if you need to use the ClojureScript compiler directly.
For a more integrated workflow use Leiningen, Boot, or Maven.
EOS
end
test do
(testpath/"t.cljs").write <<~EOS
(ns hello)
(defn ^:export greet [n]
(str "Hello " n))
EOS
system "#{bin}/cljsc", testpath/"t.cljs"
end
end
| 26.255814 | 112 | 0.689991 |
0300c3dd48f717d8ae71f0eda7a8c9f35bfa8e7a | 92,444 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/endpoint_discovery.rb'
require 'aws-sdk-core/plugins/endpoint_pattern.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/client_metrics_plugin.rb'
require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/query.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:cloudwatch)
module Aws::CloudWatch
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :cloudwatch
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::EndpointDiscovery)
add_plugin(Aws::Plugins::EndpointPattern)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::ClientMetricsPlugin)
add_plugin(Aws::Plugins::ClientMetricsSendPlugin)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::Query)
# @overload initialize(options)
# @param [Hash] options
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::SharedCredentials` - Used for loading credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2 IMDS instance profile - When used by default, the timeouts are
# very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` to enable retries and extended
# timeouts.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is search for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :active_endpoint_cache (false)
# When set to `true`, a thread polling for endpoints will be running in
# the background every 60 secs (default). Defaults to `false`.
#
# @option options [Boolean] :client_side_monitoring (false)
# When `true`, client-side metrics will be collected for all API requests from
# this client.
#
# @option options [String] :client_side_monitoring_client_id ("")
# Allows you to provide an identifier for this client which will be attached to
# all generated client side metrics. Defaults to an empty string.
#
# @option options [Integer] :client_side_monitoring_port (31000)
# Required for publishing client metrics. The port that the client side monitoring
# agent is running on, where client metrics will be published via UDP.
#
# @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher)
# Allows you to provide a custom client-side monitoring publisher class. By default,
# will use the Client Side Monitoring Agent Publisher.
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [Boolean] :disable_host_prefix_injection (false)
# Set to true to disable SDK automatically adding host prefix
# to default service endpoint when available.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test endpoints. This should be avalid HTTP(S) URI.
#
# @option options [Integer] :endpoint_cache_max_entries (1000)
# Used for the maximum size limit of the LRU cache storing endpoints data
# for endpoint discovery enabled operations. Defaults to 1000.
#
# @option options [Integer] :endpoint_cache_max_threads (10)
# Used for the maximum threads in use for polling endpoints to be cached, defaults to 10.
#
# @option options [Integer] :endpoint_cache_poll_interval (60)
# When :endpoint_discovery and :active_endpoint_cache is enabled,
# Use this option to config the time interval in seconds for making
# requests fetching endpoints information. Defaults to 60 sec.
#
# @option options [Boolean] :endpoint_discovery (false)
# When set to `true`, endpoint discovery will be enabled for operations when available. Defaults to `false`.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Float] :retry_base_delay (0.3)
# The base delay in seconds used by the default backoff function.
#
# @option options [Symbol] :retry_jitter (:none)
# A delay randomiser function used by the default backoff function. Some predefined functions can be referenced by name - :none, :equal, :full, otherwise a Proc that takes and returns a number.
#
# @see https://www.awsarchitectureblog.com/2015/03/backoff.html
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors and auth
# errors from expired credentials.
#
# @option options [Integer] :retry_max_delay (0)
# The maximum number of seconds to delay between retries (0 for no limit) used by the default backoff function.
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
# @option options [URI::HTTP,String] :http_proxy A proxy to send
# requests through. Formatted like 'http://proxy.com:123'.
#
# @option options [Float] :http_open_timeout (15) The number of
# seconds to wait when opening a HTTP session before rasing a
# `Timeout::Error`.
#
# @option options [Integer] :http_read_timeout (60) The default
# number of seconds to wait for response data. This value can
# safely be set
# per-request on the session yeidled by {#session_for}.
#
# @option options [Float] :http_idle_timeout (5) The number of
# seconds a connection is allowed to sit idble before it is
# considered stale. Stale connections are closed and removed
# from the pool before making a request.
#
# @option options [Float] :http_continue_timeout (1) The number of
# seconds to wait for a 100-continue response before sending the
# request body. This option has no effect unless the request has
# "Expect" header set to "100-continue". Defaults to `nil` which
# disables this behaviour. This value can safely be set per
# request on the session yeidled by {#session_for}.
#
# @option options [Boolean] :http_wire_trace (false) When `true`,
# HTTP debug output will be sent to the `:logger`.
#
# @option options [Boolean] :ssl_verify_peer (true) When `true`,
# SSL peer certificates are verified when establishing a
# connection.
#
# @option options [String] :ssl_ca_bundle Full path to the SSL
# certificate authority bundle file that should be used when
# verifying peer certificates. If you do not pass
# `:ssl_ca_bundle` or `:ssl_ca_directory` the the system default
# will be used if available.
#
# @option options [String] :ssl_ca_directory Full path of the
# directory that contains the unbundled SSL certificate
# authority files for verifying peer certificates. If you do
# not pass `:ssl_ca_bundle` or `:ssl_ca_directory` the the
# system default will be used if available.
#
def initialize(*args)
super
end
# @!group API Operations
# Deletes the specified alarms. In the event of an error, no alarms are
# deleted.
#
# @option params [required, Array<String>] :alarm_names
# The alarms to be deleted.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_alarms({
# alarm_names: ["AlarmName"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/DeleteAlarms AWS API Documentation
#
# @overload delete_alarms(params = {})
# @param [Hash] params ({})
def delete_alarms(params = {}, options = {})
req = build_request(:delete_alarms, params)
req.send_request(options)
end
# Deletes all dashboards that you specify. You may specify up to 100
# dashboards to delete. If there is an error during this call, no
# dashboards are deleted.
#
# @option params [required, Array<String>] :dashboard_names
# The dashboards to be deleted. This parameter is required.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_dashboards({
# dashboard_names: ["DashboardName"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/DeleteDashboards AWS API Documentation
#
# @overload delete_dashboards(params = {})
# @param [Hash] params ({})
def delete_dashboards(params = {}, options = {})
req = build_request(:delete_dashboards, params)
req.send_request(options)
end
# Retrieves the history for the specified alarm. You can filter the
# results by date range or item type. If an alarm name is not specified,
# the histories for all alarms are returned.
#
# CloudWatch retains the history of an alarm even if you delete the
# alarm.
#
# @option params [String] :alarm_name
# The name of the alarm.
#
# @option params [String] :history_item_type
# The type of alarm histories to retrieve.
#
# @option params [Time,DateTime,Date,Integer,String] :start_date
# The starting date to retrieve alarm history.
#
# @option params [Time,DateTime,Date,Integer,String] :end_date
# The ending date to retrieve alarm history.
#
# @option params [Integer] :max_records
# The maximum number of alarm history records to retrieve.
#
# @option params [String] :next_token
# The token returned by a previous call to indicate that there is more
# data available.
#
# @return [Types::DescribeAlarmHistoryOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeAlarmHistoryOutput#alarm_history_items #alarm_history_items} => Array<Types::AlarmHistoryItem>
# * {Types::DescribeAlarmHistoryOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_alarm_history({
# alarm_name: "AlarmName",
# history_item_type: "ConfigurationUpdate", # accepts ConfigurationUpdate, StateUpdate, Action
# start_date: Time.now,
# end_date: Time.now,
# max_records: 1,
# next_token: "NextToken",
# })
#
# @example Response structure
#
# resp.alarm_history_items #=> Array
# resp.alarm_history_items[0].alarm_name #=> String
# resp.alarm_history_items[0].timestamp #=> Time
# resp.alarm_history_items[0].history_item_type #=> String, one of "ConfigurationUpdate", "StateUpdate", "Action"
# resp.alarm_history_items[0].history_summary #=> String
# resp.alarm_history_items[0].history_data #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/DescribeAlarmHistory AWS API Documentation
#
# @overload describe_alarm_history(params = {})
# @param [Hash] params ({})
def describe_alarm_history(params = {}, options = {})
req = build_request(:describe_alarm_history, params)
req.send_request(options)
end
# Retrieves the specified alarms. If no alarms are specified, all alarms
# are returned. Alarms can be retrieved by using only a prefix for the
# alarm name, the alarm state, or a prefix for any action.
#
# @option params [Array<String>] :alarm_names
# The names of the alarms.
#
# @option params [String] :alarm_name_prefix
# The alarm name prefix. If this parameter is specified, you cannot
# specify `AlarmNames`.
#
# @option params [String] :state_value
# The state value to be used in matching alarms.
#
# @option params [String] :action_prefix
# The action name prefix.
#
# @option params [Integer] :max_records
# The maximum number of alarm descriptions to retrieve.
#
# @option params [String] :next_token
# The token returned by a previous call to indicate that there is more
# data available.
#
# @return [Types::DescribeAlarmsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeAlarmsOutput#metric_alarms #metric_alarms} => Array<Types::MetricAlarm>
# * {Types::DescribeAlarmsOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_alarms({
# alarm_names: ["AlarmName"],
# alarm_name_prefix: "AlarmNamePrefix",
# state_value: "OK", # accepts OK, ALARM, INSUFFICIENT_DATA
# action_prefix: "ActionPrefix",
# max_records: 1,
# next_token: "NextToken",
# })
#
# @example Response structure
#
# resp.metric_alarms #=> Array
# resp.metric_alarms[0].alarm_name #=> String
# resp.metric_alarms[0].alarm_arn #=> String
# resp.metric_alarms[0].alarm_description #=> String
# resp.metric_alarms[0].alarm_configuration_updated_timestamp #=> Time
# resp.metric_alarms[0].actions_enabled #=> Boolean
# resp.metric_alarms[0].ok_actions #=> Array
# resp.metric_alarms[0].ok_actions[0] #=> String
# resp.metric_alarms[0].alarm_actions #=> Array
# resp.metric_alarms[0].alarm_actions[0] #=> String
# resp.metric_alarms[0].insufficient_data_actions #=> Array
# resp.metric_alarms[0].insufficient_data_actions[0] #=> String
# resp.metric_alarms[0].state_value #=> String, one of "OK", "ALARM", "INSUFFICIENT_DATA"
# resp.metric_alarms[0].state_reason #=> String
# resp.metric_alarms[0].state_reason_data #=> String
# resp.metric_alarms[0].state_updated_timestamp #=> Time
# resp.metric_alarms[0].metric_name #=> String
# resp.metric_alarms[0].namespace #=> String
# resp.metric_alarms[0].statistic #=> String, one of "SampleCount", "Average", "Sum", "Minimum", "Maximum"
# resp.metric_alarms[0].extended_statistic #=> String
# resp.metric_alarms[0].dimensions #=> Array
# resp.metric_alarms[0].dimensions[0].name #=> String
# resp.metric_alarms[0].dimensions[0].value #=> String
# resp.metric_alarms[0].period #=> Integer
# resp.metric_alarms[0].unit #=> String, one of "Seconds", "Microseconds", "Milliseconds", "Bytes", "Kilobytes", "Megabytes", "Gigabytes", "Terabytes", "Bits", "Kilobits", "Megabits", "Gigabits", "Terabits", "Percent", "Count", "Bytes/Second", "Kilobytes/Second", "Megabytes/Second", "Gigabytes/Second", "Terabytes/Second", "Bits/Second", "Kilobits/Second", "Megabits/Second", "Gigabits/Second", "Terabits/Second", "Count/Second", "None"
# resp.metric_alarms[0].evaluation_periods #=> Integer
# resp.metric_alarms[0].datapoints_to_alarm #=> Integer
# resp.metric_alarms[0].threshold #=> Float
# resp.metric_alarms[0].comparison_operator #=> String, one of "GreaterThanOrEqualToThreshold", "GreaterThanThreshold", "LessThanThreshold", "LessThanOrEqualToThreshold"
# resp.metric_alarms[0].treat_missing_data #=> String
# resp.metric_alarms[0].evaluate_low_sample_count_percentile #=> String
# resp.metric_alarms[0].metrics #=> Array
# resp.metric_alarms[0].metrics[0].id #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.metric.namespace #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.metric.metric_name #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.metric.dimensions #=> Array
# resp.metric_alarms[0].metrics[0].metric_stat.metric.dimensions[0].name #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.metric.dimensions[0].value #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.period #=> Integer
# resp.metric_alarms[0].metrics[0].metric_stat.stat #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.unit #=> String, one of "Seconds", "Microseconds", "Milliseconds", "Bytes", "Kilobytes", "Megabytes", "Gigabytes", "Terabytes", "Bits", "Kilobits", "Megabits", "Gigabits", "Terabits", "Percent", "Count", "Bytes/Second", "Kilobytes/Second", "Megabytes/Second", "Gigabytes/Second", "Terabytes/Second", "Bits/Second", "Kilobits/Second", "Megabits/Second", "Gigabits/Second", "Terabits/Second", "Count/Second", "None"
# resp.metric_alarms[0].metrics[0].expression #=> String
# resp.metric_alarms[0].metrics[0].label #=> String
# resp.metric_alarms[0].metrics[0].return_data #=> Boolean
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/DescribeAlarms AWS API Documentation
#
# @overload describe_alarms(params = {})
# @param [Hash] params ({})
def describe_alarms(params = {}, options = {})
req = build_request(:describe_alarms, params)
req.send_request(options)
end
# Retrieves the alarms for the specified metric. To filter the results,
# specify a statistic, period, or unit.
#
# @option params [required, String] :metric_name
# The name of the metric.
#
# @option params [required, String] :namespace
# The namespace of the metric.
#
# @option params [String] :statistic
# The statistic for the metric, other than percentiles. For percentile
# statistics, use `ExtendedStatistics`.
#
# @option params [String] :extended_statistic
# The percentile statistic for the metric. Specify a value between p0.0
# and p100.
#
# @option params [Array<Types::Dimension>] :dimensions
# The dimensions associated with the metric. If the metric has any
# associated dimensions, you must specify them in order for the call to
# succeed.
#
# @option params [Integer] :period
# The period, in seconds, over which the statistic is applied.
#
# @option params [String] :unit
# The unit for the metric.
#
# @return [Types::DescribeAlarmsForMetricOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeAlarmsForMetricOutput#metric_alarms #metric_alarms} => Array<Types::MetricAlarm>
#
# @example Request syntax with placeholder values
#
# resp = client.describe_alarms_for_metric({
# metric_name: "MetricName", # required
# namespace: "Namespace", # required
# statistic: "SampleCount", # accepts SampleCount, Average, Sum, Minimum, Maximum
# extended_statistic: "ExtendedStatistic",
# dimensions: [
# {
# name: "DimensionName", # required
# value: "DimensionValue", # required
# },
# ],
# period: 1,
# unit: "Seconds", # accepts Seconds, Microseconds, Milliseconds, Bytes, Kilobytes, Megabytes, Gigabytes, Terabytes, Bits, Kilobits, Megabits, Gigabits, Terabits, Percent, Count, Bytes/Second, Kilobytes/Second, Megabytes/Second, Gigabytes/Second, Terabytes/Second, Bits/Second, Kilobits/Second, Megabits/Second, Gigabits/Second, Terabits/Second, Count/Second, None
# })
#
# @example Response structure
#
# resp.metric_alarms #=> Array
# resp.metric_alarms[0].alarm_name #=> String
# resp.metric_alarms[0].alarm_arn #=> String
# resp.metric_alarms[0].alarm_description #=> String
# resp.metric_alarms[0].alarm_configuration_updated_timestamp #=> Time
# resp.metric_alarms[0].actions_enabled #=> Boolean
# resp.metric_alarms[0].ok_actions #=> Array
# resp.metric_alarms[0].ok_actions[0] #=> String
# resp.metric_alarms[0].alarm_actions #=> Array
# resp.metric_alarms[0].alarm_actions[0] #=> String
# resp.metric_alarms[0].insufficient_data_actions #=> Array
# resp.metric_alarms[0].insufficient_data_actions[0] #=> String
# resp.metric_alarms[0].state_value #=> String, one of "OK", "ALARM", "INSUFFICIENT_DATA"
# resp.metric_alarms[0].state_reason #=> String
# resp.metric_alarms[0].state_reason_data #=> String
# resp.metric_alarms[0].state_updated_timestamp #=> Time
# resp.metric_alarms[0].metric_name #=> String
# resp.metric_alarms[0].namespace #=> String
# resp.metric_alarms[0].statistic #=> String, one of "SampleCount", "Average", "Sum", "Minimum", "Maximum"
# resp.metric_alarms[0].extended_statistic #=> String
# resp.metric_alarms[0].dimensions #=> Array
# resp.metric_alarms[0].dimensions[0].name #=> String
# resp.metric_alarms[0].dimensions[0].value #=> String
# resp.metric_alarms[0].period #=> Integer
# resp.metric_alarms[0].unit #=> String, one of "Seconds", "Microseconds", "Milliseconds", "Bytes", "Kilobytes", "Megabytes", "Gigabytes", "Terabytes", "Bits", "Kilobits", "Megabits", "Gigabits", "Terabits", "Percent", "Count", "Bytes/Second", "Kilobytes/Second", "Megabytes/Second", "Gigabytes/Second", "Terabytes/Second", "Bits/Second", "Kilobits/Second", "Megabits/Second", "Gigabits/Second", "Terabits/Second", "Count/Second", "None"
# resp.metric_alarms[0].evaluation_periods #=> Integer
# resp.metric_alarms[0].datapoints_to_alarm #=> Integer
# resp.metric_alarms[0].threshold #=> Float
# resp.metric_alarms[0].comparison_operator #=> String, one of "GreaterThanOrEqualToThreshold", "GreaterThanThreshold", "LessThanThreshold", "LessThanOrEqualToThreshold"
# resp.metric_alarms[0].treat_missing_data #=> String
# resp.metric_alarms[0].evaluate_low_sample_count_percentile #=> String
# resp.metric_alarms[0].metrics #=> Array
# resp.metric_alarms[0].metrics[0].id #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.metric.namespace #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.metric.metric_name #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.metric.dimensions #=> Array
# resp.metric_alarms[0].metrics[0].metric_stat.metric.dimensions[0].name #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.metric.dimensions[0].value #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.period #=> Integer
# resp.metric_alarms[0].metrics[0].metric_stat.stat #=> String
# resp.metric_alarms[0].metrics[0].metric_stat.unit #=> String, one of "Seconds", "Microseconds", "Milliseconds", "Bytes", "Kilobytes", "Megabytes", "Gigabytes", "Terabytes", "Bits", "Kilobits", "Megabits", "Gigabits", "Terabits", "Percent", "Count", "Bytes/Second", "Kilobytes/Second", "Megabytes/Second", "Gigabytes/Second", "Terabytes/Second", "Bits/Second", "Kilobits/Second", "Megabits/Second", "Gigabits/Second", "Terabits/Second", "Count/Second", "None"
# resp.metric_alarms[0].metrics[0].expression #=> String
# resp.metric_alarms[0].metrics[0].label #=> String
# resp.metric_alarms[0].metrics[0].return_data #=> Boolean
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/DescribeAlarmsForMetric AWS API Documentation
#
# @overload describe_alarms_for_metric(params = {})
# @param [Hash] params ({})
def describe_alarms_for_metric(params = {}, options = {})
req = build_request(:describe_alarms_for_metric, params)
req.send_request(options)
end
# Disables the actions for the specified alarms. When an alarm's
# actions are disabled, the alarm actions do not execute when the alarm
# state changes.
#
# @option params [required, Array<String>] :alarm_names
# The names of the alarms.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.disable_alarm_actions({
# alarm_names: ["AlarmName"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/DisableAlarmActions AWS API Documentation
#
# @overload disable_alarm_actions(params = {})
# @param [Hash] params ({})
def disable_alarm_actions(params = {}, options = {})
req = build_request(:disable_alarm_actions, params)
req.send_request(options)
end
# Enables the actions for the specified alarms.
#
# @option params [required, Array<String>] :alarm_names
# The names of the alarms.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.enable_alarm_actions({
# alarm_names: ["AlarmName"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/EnableAlarmActions AWS API Documentation
#
# @overload enable_alarm_actions(params = {})
# @param [Hash] params ({})
def enable_alarm_actions(params = {}, options = {})
req = build_request(:enable_alarm_actions, params)
req.send_request(options)
end
# Displays the details of the dashboard that you specify.
#
# To copy an existing dashboard, use `GetDashboard`, and then use the
# data returned within `DashboardBody` as the template for the new
# dashboard when you call `PutDashboard` to create the copy.
#
# @option params [required, String] :dashboard_name
# The name of the dashboard to be described.
#
# @return [Types::GetDashboardOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetDashboardOutput#dashboard_arn #dashboard_arn} => String
# * {Types::GetDashboardOutput#dashboard_body #dashboard_body} => String
# * {Types::GetDashboardOutput#dashboard_name #dashboard_name} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_dashboard({
# dashboard_name: "DashboardName", # required
# })
#
# @example Response structure
#
# resp.dashboard_arn #=> String
# resp.dashboard_body #=> String
# resp.dashboard_name #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/GetDashboard AWS API Documentation
#
# @overload get_dashboard(params = {})
# @param [Hash] params ({})
def get_dashboard(params = {}, options = {})
req = build_request(:get_dashboard, params)
req.send_request(options)
end
# You can use the `GetMetricData` API to retrieve as many as 100
# different metrics in a single request, with a total of as many as
# 100,800 datapoints. You can also optionally perform math expressions
# on the values of the returned statistics, to create new time series
# that represent new insights into your data. For example, using Lambda
# metrics, you could divide the Errors metric by the Invocations metric
# to get an error rate time series. For more information about metric
# math expressions, see [Metric Math Syntax and Functions][1] in the
# *Amazon CloudWatch User Guide*.
#
# Calls to the `GetMetricData` API have a different pricing structure
# than calls to `GetMetricStatistics`. For more information about
# pricing, see [Amazon CloudWatch Pricing][2].
#
# Amazon CloudWatch retains metric data as follows:
#
# * Data points with a period of less than 60 seconds are available for
# 3 hours. These data points are high-resolution metrics and are
# available only for custom metrics that have been defined with a
# `StorageResolution` of 1.
#
# * Data points with a period of 60 seconds (1-minute) are available for
# 15 days.
#
# * Data points with a period of 300 seconds (5-minute) are available
# for 63 days.
#
# * Data points with a period of 3600 seconds (1 hour) are available for
# 455 days (15 months).
#
# Data points that are initially published with a shorter period are
# aggregated together for long-term storage. For example, if you collect
# data using a period of 1 minute, the data remains available for 15
# days with 1-minute resolution. After 15 days, this data is still
# available, but is aggregated and retrievable only with a resolution of
# 5 minutes. After 63 days, the data is further aggregated and is
# available with a resolution of 1 hour.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/using-metric-math.html#metric-math-syntax
# [2]: https://aws.amazon.com/cloudwatch/pricing/
#
# @option params [required, Array<Types::MetricDataQuery>] :metric_data_queries
# The metric queries to be returned. A single `GetMetricData` call can
# include as many as 100 `MetricDataQuery` structures. Each of these
# structures can specify either a metric to retrieve, or a math
# expression to perform on retrieved data.
#
# @option params [required, Time,DateTime,Date,Integer,String] :start_time
# The time stamp indicating the earliest data to be returned.
#
# For better performance, specify `StartTime` and `EndTime` values that
# align with the value of the metric's `Period` and sync up with the
# beginning and end of an hour. For example, if the `Period` of a metric
# is 5 minutes, specifying 12:05 or 12:30 as `StartTime` can get a
# faster response from CloudWatch than setting 12:07 or 12:29 as the
# `StartTime`.
#
# @option params [required, Time,DateTime,Date,Integer,String] :end_time
# The time stamp indicating the latest data to be returned.
#
# For better performance, specify `StartTime` and `EndTime` values that
# align with the value of the metric's `Period` and sync up with the
# beginning and end of an hour. For example, if the `Period` of a metric
# is 5 minutes, specifying 12:05 or 12:30 as `EndTime` can get a faster
# response from CloudWatch than setting 12:07 or 12:29 as the `EndTime`.
#
# @option params [String] :next_token
# Include this value, if it was returned by the previous call, to get
# the next set of data points.
#
# @option params [String] :scan_by
# The order in which data points should be returned.
# `TimestampDescending` returns the newest data first and paginates when
# the `MaxDatapoints` limit is reached. `TimestampAscending` returns the
# oldest data first and paginates when the `MaxDatapoints` limit is
# reached.
#
# @option params [Integer] :max_datapoints
# The maximum number of data points the request should return before
# paginating. If you omit this, the default of 100,800 is used.
#
# @return [Types::GetMetricDataOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetMetricDataOutput#metric_data_results #metric_data_results} => Array<Types::MetricDataResult>
# * {Types::GetMetricDataOutput#next_token #next_token} => String
# * {Types::GetMetricDataOutput#messages #messages} => Array<Types::MessageData>
#
# @example Request syntax with placeholder values
#
# resp = client.get_metric_data({
# metric_data_queries: [ # required
# {
# id: "MetricId", # required
# metric_stat: {
# metric: { # required
# namespace: "Namespace",
# metric_name: "MetricName",
# dimensions: [
# {
# name: "DimensionName", # required
# value: "DimensionValue", # required
# },
# ],
# },
# period: 1, # required
# stat: "Stat", # required
# unit: "Seconds", # accepts Seconds, Microseconds, Milliseconds, Bytes, Kilobytes, Megabytes, Gigabytes, Terabytes, Bits, Kilobits, Megabits, Gigabits, Terabits, Percent, Count, Bytes/Second, Kilobytes/Second, Megabytes/Second, Gigabytes/Second, Terabytes/Second, Bits/Second, Kilobits/Second, Megabits/Second, Gigabits/Second, Terabits/Second, Count/Second, None
# },
# expression: "MetricExpression",
# label: "MetricLabel",
# return_data: false,
# },
# ],
# start_time: Time.now, # required
# end_time: Time.now, # required
# next_token: "NextToken",
# scan_by: "TimestampDescending", # accepts TimestampDescending, TimestampAscending
# max_datapoints: 1,
# })
#
# @example Response structure
#
# resp.metric_data_results #=> Array
# resp.metric_data_results[0].id #=> String
# resp.metric_data_results[0].label #=> String
# resp.metric_data_results[0].timestamps #=> Array
# resp.metric_data_results[0].timestamps[0] #=> Time
# resp.metric_data_results[0].values #=> Array
# resp.metric_data_results[0].values[0] #=> Float
# resp.metric_data_results[0].status_code #=> String, one of "Complete", "InternalError", "PartialData"
# resp.metric_data_results[0].messages #=> Array
# resp.metric_data_results[0].messages[0].code #=> String
# resp.metric_data_results[0].messages[0].value #=> String
# resp.next_token #=> String
# resp.messages #=> Array
# resp.messages[0].code #=> String
# resp.messages[0].value #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/GetMetricData AWS API Documentation
#
# @overload get_metric_data(params = {})
# @param [Hash] params ({})
def get_metric_data(params = {}, options = {})
req = build_request(:get_metric_data, params)
req.send_request(options)
end
# Gets statistics for the specified metric.
#
# The maximum number of data points returned from a single call is
# 1,440. If you request more than 1,440 data points, CloudWatch returns
# an error. To reduce the number of data points, you can narrow the
# specified time range and make multiple requests across adjacent time
# ranges, or you can increase the specified period. Data points are not
# returned in chronological order.
#
# CloudWatch aggregates data points based on the length of the period
# that you specify. For example, if you request statistics with a
# one-hour period, CloudWatch aggregates all data points with time
# stamps that fall within each one-hour period. Therefore, the number of
# values aggregated by CloudWatch is larger than the number of data
# points returned.
#
# CloudWatch needs raw data points to calculate percentile statistics.
# If you publish data using a statistic set instead, you can only
# retrieve percentile statistics for this data if one of the following
# conditions is true:
#
# * The SampleCount value of the statistic set is 1.
#
# * The Min and the Max values of the statistic set are equal.
#
# Percentile statistics are not available for metrics when any of the
# metric values are negative numbers.
#
# Amazon CloudWatch retains metric data as follows:
#
# * Data points with a period of less than 60 seconds are available for
# 3 hours. These data points are high-resolution metrics and are
# available only for custom metrics that have been defined with a
# `StorageResolution` of 1.
#
# * Data points with a period of 60 seconds (1-minute) are available for
# 15 days.
#
# * Data points with a period of 300 seconds (5-minute) are available
# for 63 days.
#
# * Data points with a period of 3600 seconds (1 hour) are available for
# 455 days (15 months).
#
# Data points that are initially published with a shorter period are
# aggregated together for long-term storage. For example, if you collect
# data using a period of 1 minute, the data remains available for 15
# days with 1-minute resolution. After 15 days, this data is still
# available, but is aggregated and retrievable only with a resolution of
# 5 minutes. After 63 days, the data is further aggregated and is
# available with a resolution of 1 hour.
#
# CloudWatch started retaining 5-minute and 1-hour metric data as of
# July 9, 2016.
#
# For information about metrics and dimensions supported by AWS
# services, see the [Amazon CloudWatch Metrics and Dimensions
# Reference][1] in the *Amazon CloudWatch User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CW_Support_For_AWS.html
#
# @option params [required, String] :namespace
# The namespace of the metric, with or without spaces.
#
# @option params [required, String] :metric_name
# The name of the metric, with or without spaces.
#
# @option params [Array<Types::Dimension>] :dimensions
# The dimensions. If the metric contains multiple dimensions, you must
# include a value for each dimension. CloudWatch treats each unique
# combination of dimensions as a separate metric. If a specific
# combination of dimensions was not published, you can't retrieve
# statistics for it. You must specify the same dimensions that were used
# when the metrics were created. For an example, see [Dimension
# Combinations][1] in the *Amazon CloudWatch User Guide*. For more
# information about specifying dimensions, see [Publishing Metrics][2]
# in the *Amazon CloudWatch User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#dimension-combinations
# [2]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/publishingMetrics.html
#
# @option params [required, Time,DateTime,Date,Integer,String] :start_time
# The time stamp that determines the first data point to return. Start
# times are evaluated relative to the time that CloudWatch receives the
# request.
#
# The value specified is inclusive; results include data points with the
# specified time stamp. The time stamp must be in ISO 8601 UTC format
# (for example, 2016-10-03T23:00:00Z).
#
# CloudWatch rounds the specified time stamp as follows:
#
# * Start time less than 15 days ago - Round down to the nearest whole
# minute. For example, 12:32:34 is rounded down to 12:32:00.
#
# * Start time between 15 and 63 days ago - Round down to the nearest
# 5-minute clock interval. For example, 12:32:34 is rounded down to
# 12:30:00.
#
# * Start time greater than 63 days ago - Round down to the nearest
# 1-hour clock interval. For example, 12:32:34 is rounded down to
# 12:00:00.
#
# If you set `Period` to 5, 10, or 30, the start time of your request is
# rounded down to the nearest time that corresponds to even 5-, 10-, or
# 30-second divisions of a minute. For example, if you make a query at
# (HH:mm:ss) 01:05:23 for the previous 10-second period, the start time
# of your request is rounded down and you receive data from 01:05:10 to
# 01:05:20. If you make a query at 15:07:17 for the previous 5 minutes
# of data, using a period of 5 seconds, you receive data timestamped
# between 15:02:15 and 15:07:15.
#
# @option params [required, Time,DateTime,Date,Integer,String] :end_time
# The time stamp that determines the last data point to return.
#
# The value specified is exclusive; results include data points up to
# the specified time stamp. The time stamp must be in ISO 8601 UTC
# format (for example, 2016-10-10T23:00:00Z).
#
# @option params [required, Integer] :period
# The granularity, in seconds, of the returned data points. For metrics
# with regular resolution, a period can be as short as one minute (60
# seconds) and must be a multiple of 60. For high-resolution metrics
# that are collected at intervals of less than one minute, the period
# can be 1, 5, 10, 30, 60, or any multiple of 60. High-resolution
# metrics are those metrics stored by a `PutMetricData` call that
# includes a `StorageResolution` of 1 second.
#
# If the `StartTime` parameter specifies a time stamp that is greater
# than 3 hours ago, you must specify the period as follows or no data
# points in that time range is returned:
#
# * Start time between 3 hours and 15 days ago - Use a multiple of 60
# seconds (1 minute).
#
# * Start time between 15 and 63 days ago - Use a multiple of 300
# seconds (5 minutes).
#
# * Start time greater than 63 days ago - Use a multiple of 3600 seconds
# (1 hour).
#
# @option params [Array<String>] :statistics
# The metric statistics, other than percentile. For percentile
# statistics, use `ExtendedStatistics`. When calling
# `GetMetricStatistics`, you must specify either `Statistics` or
# `ExtendedStatistics`, but not both.
#
# @option params [Array<String>] :extended_statistics
# The percentile statistics. Specify values between p0.0 and p100. When
# calling `GetMetricStatistics`, you must specify either `Statistics` or
# `ExtendedStatistics`, but not both. Percentile statistics are not
# available for metrics when any of the metric values are negative
# numbers.
#
# @option params [String] :unit
# The unit for a given metric. Metrics may be reported in multiple
# units. Not supplying a unit results in all units being returned. If
# you specify only a unit that the metric does not report, the results
# of the call are null.
#
# @return [Types::GetMetricStatisticsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetMetricStatisticsOutput#label #label} => String
# * {Types::GetMetricStatisticsOutput#datapoints #datapoints} => Array<Types::Datapoint>
#
# @example Request syntax with placeholder values
#
# resp = client.get_metric_statistics({
# namespace: "Namespace", # required
# metric_name: "MetricName", # required
# dimensions: [
# {
# name: "DimensionName", # required
# value: "DimensionValue", # required
# },
# ],
# start_time: Time.now, # required
# end_time: Time.now, # required
# period: 1, # required
# statistics: ["SampleCount"], # accepts SampleCount, Average, Sum, Minimum, Maximum
# extended_statistics: ["ExtendedStatistic"],
# unit: "Seconds", # accepts Seconds, Microseconds, Milliseconds, Bytes, Kilobytes, Megabytes, Gigabytes, Terabytes, Bits, Kilobits, Megabits, Gigabits, Terabits, Percent, Count, Bytes/Second, Kilobytes/Second, Megabytes/Second, Gigabytes/Second, Terabytes/Second, Bits/Second, Kilobits/Second, Megabits/Second, Gigabits/Second, Terabits/Second, Count/Second, None
# })
#
# @example Response structure
#
# resp.label #=> String
# resp.datapoints #=> Array
# resp.datapoints[0].timestamp #=> Time
# resp.datapoints[0].sample_count #=> Float
# resp.datapoints[0].average #=> Float
# resp.datapoints[0].sum #=> Float
# resp.datapoints[0].minimum #=> Float
# resp.datapoints[0].maximum #=> Float
# resp.datapoints[0].unit #=> String, one of "Seconds", "Microseconds", "Milliseconds", "Bytes", "Kilobytes", "Megabytes", "Gigabytes", "Terabytes", "Bits", "Kilobits", "Megabits", "Gigabits", "Terabits", "Percent", "Count", "Bytes/Second", "Kilobytes/Second", "Megabytes/Second", "Gigabytes/Second", "Terabytes/Second", "Bits/Second", "Kilobits/Second", "Megabits/Second", "Gigabits/Second", "Terabits/Second", "Count/Second", "None"
# resp.datapoints[0].extended_statistics #=> Hash
# resp.datapoints[0].extended_statistics["ExtendedStatistic"] #=> Float
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/GetMetricStatistics AWS API Documentation
#
# @overload get_metric_statistics(params = {})
# @param [Hash] params ({})
def get_metric_statistics(params = {}, options = {})
req = build_request(:get_metric_statistics, params)
req.send_request(options)
end
# You can use the `GetMetricWidgetImage` API to retrieve a snapshot
# graph of one or more Amazon CloudWatch metrics as a bitmap image. You
# can then embed this image into your services and products, such as
# wiki pages, reports, and documents. You could also retrieve images
# regularly, such as every minute, and create your own custom live
# dashboard.
#
# The graph you retrieve can include all CloudWatch metric graph
# features, including metric math and horizontal and vertical
# annotations.
#
# There is a limit of 20 transactions per second for this API. Each
# `GetMetricWidgetImage` action has the following limits:
#
# * As many as 100 metrics in the graph.
#
# * Up to 100 KB uncompressed payload.
#
# @option params [required, String] :metric_widget
# A JSON string that defines the bitmap graph to be retrieved. The
# string includes the metrics to include in the graph, statistics,
# annotations, title, axis limits, and so on. You can include only one
# `MetricWidget` parameter in each `GetMetricWidgetImage` call.
#
# For more information about the syntax of `MetricWidget` see
# CloudWatch-Metric-Widget-Structure.
#
# If any metric on the graph could not load all the requested data
# points, an orange triangle with an exclamation point appears next to
# the graph legend.
#
# @option params [String] :output_format
# The format of the resulting image. Only PNG images are supported.
#
# The default is `png`. If you specify `png`, the API returns an HTTP
# response with the content-type set to `text/xml`. The image data is in
# a `MetricWidgetImage` field. For example:
#
# ` <GetMetricWidgetImageResponse xmlns=<URLstring>>`
#
# ` <GetMetricWidgetImageResult>`
#
# ` <MetricWidgetImage>`
#
# ` iVBORw0KGgoAAAANSUhEUgAAAlgAAAGQEAYAAAAip...`
#
# ` </MetricWidgetImage>`
#
# ` </GetMetricWidgetImageResult>`
#
# ` <ResponseMetadata>`
#
# ` <RequestId>6f0d4192-4d42-11e8-82c1-f539a07e0e3b</RequestId>`
#
# ` </ResponseMetadata>`
#
# `</GetMetricWidgetImageResponse>`
#
# The `image/png` setting is intended only for custom HTTP requests. For
# most use cases, and all actions using an AWS SDK, you should use
# `png`. If you specify `image/png`, the HTTP response has a
# content-type set to `image/png`, and the body of the response is a PNG
# image.
#
# @return [Types::GetMetricWidgetImageOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetMetricWidgetImageOutput#metric_widget_image #metric_widget_image} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_metric_widget_image({
# metric_widget: "MetricWidget", # required
# output_format: "OutputFormat",
# })
#
# @example Response structure
#
# resp.metric_widget_image #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/GetMetricWidgetImage AWS API Documentation
#
# @overload get_metric_widget_image(params = {})
# @param [Hash] params ({})
def get_metric_widget_image(params = {}, options = {})
req = build_request(:get_metric_widget_image, params)
req.send_request(options)
end
# Returns a list of the dashboards for your account. If you include
# `DashboardNamePrefix`, only those dashboards with names starting with
# the prefix are listed. Otherwise, all dashboards in your account are
# listed.
#
# `ListDashboards` returns up to 1000 results on one page. If there are
# more than 1000 dashboards, you can call `ListDashboards` again and
# include the value you received for `NextToken` in the first call, to
# receive the next 1000 results.
#
# @option params [String] :dashboard_name_prefix
# If you specify this parameter, only the dashboards with names starting
# with the specified string are listed. The maximum length is 255, and
# valid characters are A-Z, a-z, 0-9, ".", "-", and "\_".
#
# @option params [String] :next_token
# The token returned by a previous call to indicate that there is more
# data available.
#
# @return [Types::ListDashboardsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListDashboardsOutput#dashboard_entries #dashboard_entries} => Array<Types::DashboardEntry>
# * {Types::ListDashboardsOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_dashboards({
# dashboard_name_prefix: "DashboardNamePrefix",
# next_token: "NextToken",
# })
#
# @example Response structure
#
# resp.dashboard_entries #=> Array
# resp.dashboard_entries[0].dashboard_name #=> String
# resp.dashboard_entries[0].dashboard_arn #=> String
# resp.dashboard_entries[0].last_modified #=> Time
# resp.dashboard_entries[0].size #=> Integer
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/ListDashboards AWS API Documentation
#
# @overload list_dashboards(params = {})
# @param [Hash] params ({})
def list_dashboards(params = {}, options = {})
req = build_request(:list_dashboards, params)
req.send_request(options)
end
# List the specified metrics. You can use the returned metrics with
# GetMetricData or GetMetricStatistics to obtain statistical data.
#
# Up to 500 results are returned for any one call. To retrieve
# additional results, use the returned token with subsequent calls.
#
# After you create a metric, allow up to fifteen minutes before the
# metric appears. Statistics about the metric, however, are available
# sooner using GetMetricData or GetMetricStatistics.
#
# @option params [String] :namespace
# The namespace to filter against.
#
# @option params [String] :metric_name
# The name of the metric to filter against.
#
# @option params [Array<Types::DimensionFilter>] :dimensions
# The dimensions to filter against.
#
# @option params [String] :next_token
# The token returned by a previous call to indicate that there is more
# data available.
#
# @return [Types::ListMetricsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListMetricsOutput#metrics #metrics} => Array<Types::Metric>
# * {Types::ListMetricsOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_metrics({
# namespace: "Namespace",
# metric_name: "MetricName",
# dimensions: [
# {
# name: "DimensionName", # required
# value: "DimensionValue",
# },
# ],
# next_token: "NextToken",
# })
#
# @example Response structure
#
# resp.metrics #=> Array
# resp.metrics[0].namespace #=> String
# resp.metrics[0].metric_name #=> String
# resp.metrics[0].dimensions #=> Array
# resp.metrics[0].dimensions[0].name #=> String
# resp.metrics[0].dimensions[0].value #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/ListMetrics AWS API Documentation
#
# @overload list_metrics(params = {})
# @param [Hash] params ({})
def list_metrics(params = {}, options = {})
req = build_request(:list_metrics, params)
req.send_request(options)
end
# Displays the tags associated with a CloudWatch resource. Alarms
# support tagging.
#
# @option params [required, String] :resource_arn
# The ARN of the CloudWatch resource that you want to view tags for. For
# more information on ARN format, see [Example ARNs][1] in the *Amazon
# Web Services General Reference*.
#
#
#
# [1]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-cloudwatch
#
# @return [Types::ListTagsForResourceOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListTagsForResourceOutput#tags #tags} => Array<Types::Tag>
#
# @example Request syntax with placeholder values
#
# resp = client.list_tags_for_resource({
# resource_arn: "AmazonResourceName", # required
# })
#
# @example Response structure
#
# resp.tags #=> Array
# resp.tags[0].key #=> String
# resp.tags[0].value #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/ListTagsForResource AWS API Documentation
#
# @overload list_tags_for_resource(params = {})
# @param [Hash] params ({})
def list_tags_for_resource(params = {}, options = {})
req = build_request(:list_tags_for_resource, params)
req.send_request(options)
end
# Creates a dashboard if it does not already exist, or updates an
# existing dashboard. If you update a dashboard, the entire contents are
# replaced with what you specify here.
#
# There is no limit to the number of dashboards in your account. All
# dashboards in your account are global, not region-specific.
#
# A simple way to create a dashboard using `PutDashboard` is to copy an
# existing dashboard. To copy an existing dashboard using the console,
# you can load the dashboard and then use the View/edit source command
# in the Actions menu to display the JSON block for that dashboard.
# Another way to copy a dashboard is to use `GetDashboard`, and then use
# the data returned within `DashboardBody` as the template for the new
# dashboard when you call `PutDashboard`.
#
# When you create a dashboard with `PutDashboard`, a good practice is to
# add a text widget at the top of the dashboard with a message that the
# dashboard was created by script and should not be changed in the
# console. This message could also point console users to the location
# of the `DashboardBody` script or the CloudFormation template used to
# create the dashboard.
#
# @option params [required, String] :dashboard_name
# The name of the dashboard. If a dashboard with this name already
# exists, this call modifies that dashboard, replacing its current
# contents. Otherwise, a new dashboard is created. The maximum length is
# 255, and valid characters are A-Z, a-z, 0-9, "-", and "\_". This
# parameter is required.
#
# @option params [required, String] :dashboard_body
# The detailed information about the dashboard in JSON format, including
# the widgets to include and their location on the dashboard. This
# parameter is required.
#
# For more information about the syntax, see
# CloudWatch-Dashboard-Body-Structure.
#
# @return [Types::PutDashboardOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::PutDashboardOutput#dashboard_validation_messages #dashboard_validation_messages} => Array<Types::DashboardValidationMessage>
#
# @example Request syntax with placeholder values
#
# resp = client.put_dashboard({
# dashboard_name: "DashboardName", # required
# dashboard_body: "DashboardBody", # required
# })
#
# @example Response structure
#
# resp.dashboard_validation_messages #=> Array
# resp.dashboard_validation_messages[0].data_path #=> String
# resp.dashboard_validation_messages[0].message #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/PutDashboard AWS API Documentation
#
# @overload put_dashboard(params = {})
# @param [Hash] params ({})
def put_dashboard(params = {}, options = {})
req = build_request(:put_dashboard, params)
req.send_request(options)
end
# Creates or updates an alarm and associates it with the specified
# metric or metric math expression.
#
# When this operation creates an alarm, the alarm state is immediately
# set to `INSUFFICIENT_DATA`. The alarm is then evaluated and its state
# is set appropriately. Any actions associated with the new state are
# then executed.
#
# When you update an existing alarm, its state is left unchanged, but
# the update completely overwrites the previous configuration of the
# alarm.
#
# If you are an IAM user, you must have Amazon EC2 permissions for some
# alarm operations:
#
# * `iam:CreateServiceLinkedRole` for all alarms with EC2 actions
#
# * `ec2:DescribeInstanceStatus` and `ec2:DescribeInstances` for all
# alarms on EC2 instance status metrics
#
# * `ec2:StopInstances` for alarms with stop actions
#
# * `ec2:TerminateInstances` for alarms with terminate actions
#
# * No specific permissions are needed for alarms with recover actions
#
# If you have read/write permissions for Amazon CloudWatch but not for
# Amazon EC2, you can still create an alarm, but the stop or terminate
# actions are not performed. However, if you are later granted the
# required permissions, the alarm actions that you created earlier are
# performed.
#
# If you are using an IAM role (for example, an EC2 instance profile),
# you cannot stop or terminate the instance using alarm actions.
# However, you can still see the alarm state and perform any other
# actions such as Amazon SNS notifications or Auto Scaling policies.
#
# If you are using temporary security credentials granted using AWS STS,
# you cannot stop or terminate an EC2 instance using alarm actions.
#
# The first time you create an alarm in the AWS Management Console, the
# CLI, or by using the PutMetricAlarm API, CloudWatch creates the
# necessary service-linked role for you. The service-linked role is
# called `AWSServiceRoleForCloudWatchEvents`. For more information, see
# [AWS service-linked role][1].
#
#
#
# [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-service-linked-role
#
# @option params [required, String] :alarm_name
# The name for the alarm. This name must be unique within your AWS
# account.
#
# @option params [String] :alarm_description
# The description for the alarm.
#
# @option params [Boolean] :actions_enabled
# Indicates whether actions should be executed during any changes to the
# alarm state. The default is TRUE.
#
# @option params [Array<String>] :ok_actions
# The actions to execute when this alarm transitions to an `OK` state
# from any other state. Each action is specified as an Amazon Resource
# Name (ARN).
#
# Valid Values: `arn:aws:automate:region:ec2:stop` \|
# `arn:aws:automate:region:ec2:terminate` \|
# `arn:aws:automate:region:ec2:recover` \|
# `arn:aws:automate:region:ec2:reboot` \|
# `arn:aws:sns:region:account-id:sns-topic-name ` \|
# `arn:aws:autoscaling:region:account-id:scalingPolicy:policy-idautoScalingGroupName/group-friendly-name:policyName/policy-friendly-name
# `
#
# Valid Values (for use with IAM roles):
# `arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0`
# \|
# `arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0`
# \|
# `arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0`
#
# @option params [Array<String>] :alarm_actions
# The actions to execute when this alarm transitions to the `ALARM`
# state from any other state. Each action is specified as an Amazon
# Resource Name (ARN).
#
# Valid Values: `arn:aws:automate:region:ec2:stop` \|
# `arn:aws:automate:region:ec2:terminate` \|
# `arn:aws:automate:region:ec2:recover` \|
# `arn:aws:automate:region:ec2:reboot` \|
# `arn:aws:sns:region:account-id:sns-topic-name ` \|
# `arn:aws:autoscaling:region:account-id:scalingPolicy:policy-idautoScalingGroupName/group-friendly-name:policyName/policy-friendly-name
# `
#
# Valid Values (for use with IAM roles):
# `arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0`
# \|
# `arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0`
# \|
# `arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0`
#
# @option params [Array<String>] :insufficient_data_actions
# The actions to execute when this alarm transitions to the
# `INSUFFICIENT_DATA` state from any other state. Each action is
# specified as an Amazon Resource Name (ARN).
#
# Valid Values: `arn:aws:automate:region:ec2:stop` \|
# `arn:aws:automate:region:ec2:terminate` \|
# `arn:aws:automate:region:ec2:recover` \|
# `arn:aws:automate:region:ec2:reboot` \|
# `arn:aws:sns:region:account-id:sns-topic-name ` \|
# `arn:aws:autoscaling:region:account-id:scalingPolicy:policy-idautoScalingGroupName/group-friendly-name:policyName/policy-friendly-name
# `
#
# Valid Values (for use with IAM roles):
# `>arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0`
# \|
# `arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0`
# \|
# `arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0`
#
# @option params [String] :metric_name
# The name for the metric associated with the alarm.
#
# If you are creating an alarm based on a math expression, you cannot
# specify this parameter, or any of the `Dimensions`, `Period`,
# `Namespace`, `Statistic`, or `ExtendedStatistic` parameters. Instead,
# you specify all this information in the `Metrics` array.
#
# @option params [String] :namespace
# The namespace for the metric associated specified in `MetricName`.
#
# @option params [String] :statistic
# The statistic for the metric specified in `MetricName`, other than
# percentile. For percentile statistics, use `ExtendedStatistic`. When
# you call `PutMetricAlarm` and specify a `MetricName`, you must specify
# either `Statistic` or `ExtendedStatistic,` but not both.
#
# @option params [String] :extended_statistic
# The percentile statistic for the metric specified in `MetricName`.
# Specify a value between p0.0 and p100. When you call `PutMetricAlarm`
# and specify a `MetricName`, you must specify either `Statistic` or
# `ExtendedStatistic,` but not both.
#
# @option params [Array<Types::Dimension>] :dimensions
# The dimensions for the metric specified in `MetricName`.
#
# @option params [Integer] :period
# The length, in seconds, used each time the metric specified in
# `MetricName` is evaluated. Valid values are 10, 30, and any multiple
# of 60.
#
# Be sure to specify 10 or 30 only for metrics that are stored by a
# `PutMetricData` call with a `StorageResolution` of 1. If you specify a
# period of 10 or 30 for a metric that does not have sub-minute
# resolution, the alarm still attempts to gather data at the period rate
# that you specify. In this case, it does not receive data for the
# attempts that do not correspond to a one-minute data resolution, and
# the alarm may often lapse into INSUFFICENT\_DATA status. Specifying 10
# or 30 also sets this alarm as a high-resolution alarm, which has a
# higher charge than other alarms. For more information about pricing,
# see [Amazon CloudWatch Pricing][1].
#
# An alarm's total current evaluation period can be no longer than one
# day, so `Period` multiplied by `EvaluationPeriods` cannot be more than
# 86,400 seconds.
#
#
#
# [1]: https://aws.amazon.com/cloudwatch/pricing/
#
# @option params [String] :unit
# The unit of measure for the statistic. For example, the units for the
# Amazon EC2 NetworkIn metric are Bytes because NetworkIn tracks the
# number of bytes that an instance receives on all network interfaces.
# You can also specify a unit when you create a custom metric. Units
# help provide conceptual meaning to your data. Metric data points that
# specify a unit of measure, such as Percent, are aggregated separately.
#
# If you specify a unit, you must use a unit that is appropriate for the
# metric. Otherwise, the CloudWatch alarm can get stuck in the
# `INSUFFICIENT DATA` state.
#
# @option params [required, Integer] :evaluation_periods
# The number of periods over which data is compared to the specified
# threshold. If you are setting an alarm that requires that a number of
# consecutive data points be breaching to trigger the alarm, this value
# specifies that number. If you are setting an "M out of N" alarm,
# this value is the N.
#
# An alarm's total current evaluation period can be no longer than one
# day, so this number multiplied by `Period` cannot be more than 86,400
# seconds.
#
# @option params [Integer] :datapoints_to_alarm
# The number of datapoints that must be breaching to trigger the alarm.
# This is used only if you are setting an "M out of N" alarm. In that
# case, this value is the M. For more information, see [Evaluating an
# Alarm][1] in the *Amazon CloudWatch User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/AlarmThatSendsEmail.html#alarm-evaluation
#
# @option params [required, Float] :threshold
# The value against which the specified statistic is compared.
#
# @option params [required, String] :comparison_operator
# The arithmetic operation to use when comparing the specified statistic
# and threshold. The specified statistic value is used as the first
# operand.
#
# @option params [String] :treat_missing_data
# Sets how this alarm is to handle missing data points. If
# `TreatMissingData` is omitted, the default behavior of `missing` is
# used. For more information, see [Configuring How CloudWatch Alarms
# Treats Missing Data][1].
#
# Valid Values: `breaching | notBreaching | ignore | missing`
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/AlarmThatSendsEmail.html#alarms-and-missing-data
#
# @option params [String] :evaluate_low_sample_count_percentile
# Used only for alarms based on percentiles. If you specify `ignore`,
# the alarm state does not change during periods with too few data
# points to be statistically significant. If you specify `evaluate` or
# omit this parameter, the alarm is always evaluated and possibly
# changes state no matter how many data points are available. For more
# information, see [Percentile-Based CloudWatch Alarms and Low Data
# Samples][1].
#
# Valid Values: `evaluate | ignore`
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/AlarmThatSendsEmail.html#percentiles-with-low-samples
#
# @option params [Array<Types::MetricDataQuery>] :metrics
# An array of `MetricDataQuery` structures that enable you to create an
# alarm based on the result of a metric math expression. Each item in
# the `Metrics` array either retrieves a metric or performs a math
# expression.
#
# One item in the `Metrics` array is the expression that the alarm
# watches. You designate this expression by setting `ReturnValue` to
# true for this object in the array. For more information, see
# MetricDataQuery.
#
# If you use the `Metrics` parameter, you cannot include the
# `MetricName`, `Dimensions`, `Period`, `Namespace`, `Statistic`, or
# `ExtendedStatistic` parameters of `PutMetricAlarm` in the same
# operation. Instead, you retrieve the metrics you are using in your
# math expression as part of the `Metrics` array.
#
# @option params [Array<Types::Tag>] :tags
# A list of key-value pairs to associate with the alarm or dashboard.
# You can associate as many as 50 tags with an alarm.
#
# Tags can help you organize and categorize your resources. You can also
# use them to scope user permissions, by granting a user permission to
# access or change only resources with certain tag values.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.put_metric_alarm({
# alarm_name: "AlarmName", # required
# alarm_description: "AlarmDescription",
# actions_enabled: false,
# ok_actions: ["ResourceName"],
# alarm_actions: ["ResourceName"],
# insufficient_data_actions: ["ResourceName"],
# metric_name: "MetricName",
# namespace: "Namespace",
# statistic: "SampleCount", # accepts SampleCount, Average, Sum, Minimum, Maximum
# extended_statistic: "ExtendedStatistic",
# dimensions: [
# {
# name: "DimensionName", # required
# value: "DimensionValue", # required
# },
# ],
# period: 1,
# unit: "Seconds", # accepts Seconds, Microseconds, Milliseconds, Bytes, Kilobytes, Megabytes, Gigabytes, Terabytes, Bits, Kilobits, Megabits, Gigabits, Terabits, Percent, Count, Bytes/Second, Kilobytes/Second, Megabytes/Second, Gigabytes/Second, Terabytes/Second, Bits/Second, Kilobits/Second, Megabits/Second, Gigabits/Second, Terabits/Second, Count/Second, None
# evaluation_periods: 1, # required
# datapoints_to_alarm: 1,
# threshold: 1.0, # required
# comparison_operator: "GreaterThanOrEqualToThreshold", # required, accepts GreaterThanOrEqualToThreshold, GreaterThanThreshold, LessThanThreshold, LessThanOrEqualToThreshold
# treat_missing_data: "TreatMissingData",
# evaluate_low_sample_count_percentile: "EvaluateLowSampleCountPercentile",
# metrics: [
# {
# id: "MetricId", # required
# metric_stat: {
# metric: { # required
# namespace: "Namespace",
# metric_name: "MetricName",
# dimensions: [
# {
# name: "DimensionName", # required
# value: "DimensionValue", # required
# },
# ],
# },
# period: 1, # required
# stat: "Stat", # required
# unit: "Seconds", # accepts Seconds, Microseconds, Milliseconds, Bytes, Kilobytes, Megabytes, Gigabytes, Terabytes, Bits, Kilobits, Megabits, Gigabits, Terabits, Percent, Count, Bytes/Second, Kilobytes/Second, Megabytes/Second, Gigabytes/Second, Terabytes/Second, Bits/Second, Kilobits/Second, Megabits/Second, Gigabits/Second, Terabits/Second, Count/Second, None
# },
# expression: "MetricExpression",
# label: "MetricLabel",
# return_data: false,
# },
# ],
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/PutMetricAlarm AWS API Documentation
#
# @overload put_metric_alarm(params = {})
# @param [Hash] params ({})
def put_metric_alarm(params = {}, options = {})
req = build_request(:put_metric_alarm, params)
req.send_request(options)
end
# Publishes metric data points to Amazon CloudWatch. CloudWatch
# associates the data points with the specified metric. If the specified
# metric does not exist, CloudWatch creates the metric. When CloudWatch
# creates a metric, it can take up to fifteen minutes for the metric to
# appear in calls to ListMetrics.
#
# You can publish either individual data points in the `Value` field, or
# arrays of values and the number of times each value occurred during
# the period by using the `Values` and `Counts` fields in the
# `MetricDatum` structure. Using the `Values` and `Counts` method
# enables you to publish up to 150 values per metric with one
# `PutMetricData` request, and supports retrieving percentile statistics
# on this data.
#
# Each `PutMetricData` request is limited to 40 KB in size for HTTP POST
# requests. You can send a payload compressed by gzip. Each request is
# also limited to no more than 20 different metrics.
#
# Although the `Value` parameter accepts numbers of type `Double`,
# CloudWatch rejects values that are either too small or too large.
# Values must be in the range of 8.515920e-109 to 1.174271e+108 (Base
# 10) or 2e-360 to 2e360 (Base 2). In addition, special values (for
# example, NaN, +Infinity, -Infinity) are not supported.
#
# You can use up to 10 dimensions per metric to further clarify what
# data the metric collects. Each dimension consists of a Name and Value
# pair. For more information about specifying dimensions, see
# [Publishing Metrics][1] in the *Amazon CloudWatch User Guide*.
#
# Data points with time stamps from 24 hours ago or longer can take at
# least 48 hours to become available for GetMetricData or
# GetMetricStatistics from the time they are submitted.
#
# CloudWatch needs raw data points to calculate percentile statistics.
# If you publish data using a statistic set instead, you can only
# retrieve percentile statistics for this data if one of the following
# conditions is true:
#
# * The `SampleCount` value of the statistic set is 1 and `Min`, `Max`,
# and `Sum` are all equal.
#
# * The `Min` and `Max` are equal, and `Sum` is equal to `Min`
# multiplied by `SampleCount`.
#
#
#
# [1]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/publishingMetrics.html
#
# @option params [required, String] :namespace
# The namespace for the metric data.
#
# You cannot specify a namespace that begins with "AWS/". Namespaces
# that begin with "AWS/" are reserved for use by Amazon Web Services
# products.
#
# @option params [required, Array<Types::MetricDatum>] :metric_data
# The data for the metric. The array can include no more than 20 metrics
# per call.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.put_metric_data({
# namespace: "Namespace", # required
# metric_data: [ # required
# {
# metric_name: "MetricName", # required
# dimensions: [
# {
# name: "DimensionName", # required
# value: "DimensionValue", # required
# },
# ],
# timestamp: Time.now,
# value: 1.0,
# statistic_values: {
# sample_count: 1.0, # required
# sum: 1.0, # required
# minimum: 1.0, # required
# maximum: 1.0, # required
# },
# values: [1.0],
# counts: [1.0],
# unit: "Seconds", # accepts Seconds, Microseconds, Milliseconds, Bytes, Kilobytes, Megabytes, Gigabytes, Terabytes, Bits, Kilobits, Megabits, Gigabits, Terabits, Percent, Count, Bytes/Second, Kilobytes/Second, Megabytes/Second, Gigabytes/Second, Terabytes/Second, Bits/Second, Kilobits/Second, Megabits/Second, Gigabits/Second, Terabits/Second, Count/Second, None
# storage_resolution: 1,
# },
# ],
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/PutMetricData AWS API Documentation
#
# @overload put_metric_data(params = {})
# @param [Hash] params ({})
def put_metric_data(params = {}, options = {})
req = build_request(:put_metric_data, params)
req.send_request(options)
end
# Temporarily sets the state of an alarm for testing purposes. When the
# updated state differs from the previous value, the action configured
# for the appropriate state is invoked. For example, if your alarm is
# configured to send an Amazon SNS message when an alarm is triggered,
# temporarily changing the alarm state to `ALARM` sends an SNS message.
# The alarm returns to its actual state (often within seconds). Because
# the alarm state change happens quickly, it is typically only visible
# in the alarm's **History** tab in the Amazon CloudWatch console or
# through DescribeAlarmHistory.
#
# @option params [required, String] :alarm_name
# The name for the alarm. This name must be unique within the AWS
# account. The maximum length is 255 characters.
#
# @option params [required, String] :state_value
# The value of the state.
#
# @option params [required, String] :state_reason
# The reason that this alarm is set to this specific state, in text
# format.
#
# @option params [String] :state_reason_data
# The reason that this alarm is set to this specific state, in JSON
# format.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.set_alarm_state({
# alarm_name: "AlarmName", # required
# state_value: "OK", # required, accepts OK, ALARM, INSUFFICIENT_DATA
# state_reason: "StateReason", # required
# state_reason_data: "StateReasonData",
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/SetAlarmState AWS API Documentation
#
# @overload set_alarm_state(params = {})
# @param [Hash] params ({})
def set_alarm_state(params = {}, options = {})
req = build_request(:set_alarm_state, params)
req.send_request(options)
end
# Assigns one or more tags (key-value pairs) to the specified CloudWatch
# resource. Tags can help you organize and categorize your resources.
# You can also use them to scope user permissions, by granting a user
# permission to access or change only resources with certain tag values.
# In CloudWatch, alarms can be tagged.
#
# Tags don't have any semantic meaning to AWS and are interpreted
# strictly as strings of characters.
#
# You can use the `TagResource` action with a resource that already has
# tags. If you specify a new tag key for the resource, this tag is
# appended to the list of tags associated with the resource. If you
# specify a tag key that is already associated with the resource, the
# new tag value that you specify replaces the previous value for that
# tag.
#
# You can associate as many as 50 tags with a resource.
#
# @option params [required, String] :resource_arn
# The ARN of the CloudWatch resource that you're adding tags to. For
# more information on ARN format, see [Example ARNs][1] in the *Amazon
# Web Services General Reference*.
#
#
#
# [1]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-cloudwatch
#
# @option params [required, Array<Types::Tag>] :tags
# The list of key-value pairs to associate with the resource.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.tag_resource({
# resource_arn: "AmazonResourceName", # required
# tags: [ # required
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/TagResource AWS API Documentation
#
# @overload tag_resource(params = {})
# @param [Hash] params ({})
def tag_resource(params = {}, options = {})
req = build_request(:tag_resource, params)
req.send_request(options)
end
# Removes one or more tags from the specified resource.
#
# @option params [required, String] :resource_arn
# The ARN of the CloudWatch resource that you're removing tags from.
# For more information on ARN format, see [Example ARNs][1] in the
# *Amazon Web Services General Reference*.
#
#
#
# [1]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-cloudwatch
#
# @option params [required, Array<String>] :tag_keys
# The list of tag keys to remove from the resource.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.untag_resource({
# resource_arn: "AmazonResourceName", # required
# tag_keys: ["TagKey"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/monitoring-2010-08-01/UntagResource AWS API Documentation
#
# @overload untag_resource(params = {})
# @param [Hash] params ({})
def untag_resource(params = {}, options = {})
req = build_request(:untag_resource, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-cloudwatch'
context[:gem_version] = '1.19.0'
Seahorse::Client::Request.new(handlers, context)
end
# Polls an API operation until a resource enters a desired state.
#
# ## Basic Usage
#
# A waiter will call an API operation until:
#
# * It is successful
# * It enters a terminal state
# * It makes the maximum number of attempts
#
# In between attempts, the waiter will sleep.
#
# # polls in a loop, sleeping between attempts
# client.wait_until(waiter_name, params)
#
# ## Configuration
#
# You can configure the maximum number of polling attempts, and the
# delay (in seconds) between each polling attempt. You can pass
# configuration as the final arguments hash.
#
# # poll for ~25 seconds
# client.wait_until(waiter_name, params, {
# max_attempts: 5,
# delay: 5,
# })
#
# ## Callbacks
#
# You can be notified before each polling attempt and before each
# delay. If you throw `:success` or `:failure` from these callbacks,
# it will terminate the waiter.
#
# started_at = Time.now
# client.wait_until(waiter_name, params, {
#
# # disable max attempts
# max_attempts: nil,
#
# # poll for 1 hour, instead of a number of attempts
# before_wait: -> (attempts, response) do
# throw :failure if Time.now - started_at > 3600
# end
# })
#
# ## Handling Errors
#
# When a waiter is unsuccessful, it will raise an error.
# All of the failure errors extend from
# {Aws::Waiters::Errors::WaiterFailed}.
#
# begin
# client.wait_until(...)
# rescue Aws::Waiters::Errors::WaiterFailed
# # resource did not enter the desired state in time
# end
#
# ## Valid Waiters
#
# The following table lists the valid waiter names, the operations they call,
# and the default `:delay` and `:max_attempts` values.
#
# | waiter_name | params | :delay | :max_attempts |
# | ------------ | ------------------ | -------- | ------------- |
# | alarm_exists | {#describe_alarms} | 5 | 40 |
#
# @raise [Errors::FailureStateError] Raised when the waiter terminates
# because the waiter has entered a state that it will not transition
# out of, preventing success.
#
# @raise [Errors::TooManyAttemptsError] Raised when the configured
# maximum number of attempts have been made, and the waiter is not
# yet successful.
#
# @raise [Errors::UnexpectedError] Raised when an error is encounted
# while polling for a resource that is not expected.
#
# @raise [Errors::NoSuchWaiterError] Raised when you request to wait
# for an unknown state.
#
# @return [Boolean] Returns `true` if the waiter was successful.
# @param [Symbol] waiter_name
# @param [Hash] params ({})
# @param [Hash] options ({})
# @option options [Integer] :max_attempts
# @option options [Integer] :delay
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def wait_until(waiter_name, params = {}, options = {})
w = waiter(waiter_name, options)
yield(w.waiter) if block_given? # deprecated
w.wait(params)
end
# @api private
# @deprecated
def waiter_names
waiters.keys
end
private
# @param [Symbol] waiter_name
# @param [Hash] options ({})
def waiter(waiter_name, options = {})
waiter_class = waiters[waiter_name]
if waiter_class
waiter_class.new(options.merge(client: self))
else
raise Aws::Waiters::Errors::NoSuchWaiterError.new(waiter_name, waiters.keys)
end
end
def waiters
{
alarm_exists: Waiters::AlarmExists
}
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 45.946322 | 466 | 0.662325 |
b97c471625fb43956d24eccb409f0aa437ffabfd | 168 | class CreatePosts < ActiveRecord::Migration
create_table :posts do |p|
p.text :post_content
p.belongs_to :group
p.belongs_to :user
p.timestamps
end
end
| 18.666667 | 43 | 0.72619 |
014f2c432ed228c0400a61beee9c5b75384be5e6 | 472 | require_relative "../lib/enigma/decrypt_file"
module Enigma
describe "Decrypt file" do
it "should Decrypt a file" do
file_decrypt = Enigma::File_Decryptor.new
encrypted_text = file_decrypt.decrypt_file("encrypt_test", "sample.txt", "93633", "090915")
expect(File.exist?("sample.txt")).to be true
secret_message = File.open("encrypt_test", "r").read
expect(secret_message).to eq("3k. u40nl9gx ,xaxx cx fat y0 3u")
end
end
end | 27.764706 | 97 | 0.686441 |
335ce745efa544e46364830730e27fc40498dc32 | 401 | class Dish < ApplicationRecord
has_many :menu_dishes
has_many :menus, through: :menu_dishes
has_many :restaurants, through: :menus
has_many :orders
has_many :users, through: :orders
has_many :favorites
# name: {type: Sequelize.STRING,
# allowNull: false},
# description: Sequelize.TEXT,
# price: Sequelize.FLOAT,
# category: Sequelize.ARRAY(Sequelize.STRING)
end
| 28.642857 | 47 | 0.708229 |
18f9c412ffacab786fab51a6615e7104d5e4a495 | 6,503 | require 'test_helper'
require 'ostruct'
class MockController < ApplicationController
attr_accessor :env
def request
self
end
def path
''
end
def index
end
def host_with_port
"test.host:3000"
end
def protocol
"http"
end
def script_name
""
end
def symbolized_path_parameters
{}
end
end
class ControllerAuthenticableTest < ActionController::TestCase
tests MockController
def setup
@mock_warden = OpenStruct.new
@controller.env = { 'warden' => @mock_warden }
end
test 'setup warden' do
assert_not_nil @controller.warden
end
test 'provide access to warden instance' do
assert_equal @controller.warden, @controller.env['warden']
end
test 'proxy signed_in? to authenticated' do
@mock_warden.expects(:authenticate?).with(:scope => :my_scope)
@controller.signed_in?(:my_scope)
end
test 'proxy anybody_signed_in? to signed_in?' do
Devise.mappings.keys.each { |scope| # :user, :admin, :manager
@controller.expects(:signed_in?).with(scope)
}
@controller.anybody_signed_in?
end
test 'proxy current_admin to authenticate with admin scope' do
@mock_warden.expects(:authenticate).with(:scope => :admin)
@controller.current_admin
end
test 'proxy current_user to authenticate with user scope' do
@mock_warden.expects(:authenticate).with(:scope => :user)
@controller.current_user
end
test 'proxy user_authenticate! to authenticate with user scope' do
@mock_warden.expects(:authenticate!).with(:scope => :user)
@controller.authenticate_user!
end
test 'proxy admin_authenticate! to authenticate with admin scope' do
@mock_warden.expects(:authenticate!).with(:scope => :admin)
@controller.authenticate_admin!
end
test 'proxy user_signed_in? to authenticate? with user scope' do
@mock_warden.expects(:authenticate?).with(:scope => :user)
@controller.user_signed_in?
end
test 'proxy admin_signed_in? to authenticate? with admin scope' do
@mock_warden.expects(:authenticate?).with(:scope => :admin)
@controller.admin_signed_in?
end
test 'proxy user_session to session scope in warden' do
@mock_warden.expects(:authenticate).with(:scope => :user).returns(true)
@mock_warden.expects(:session).with(:user).returns({})
@controller.user_session
end
test 'proxy admin_session to session scope in warden' do
@mock_warden.expects(:authenticate).with(:scope => :admin).returns(true)
@mock_warden.expects(:session).with(:admin).returns({})
@controller.admin_session
end
test 'sign in proxy to set_user on warden' do
user = User.new
@mock_warden.expects(:set_user).with(user, :scope => :user).returns(true)
@controller.sign_in(:user, user)
end
test 'sign in accepts a resource as argument' do
user = User.new
@mock_warden.expects(:set_user).with(user, :scope => :user).returns(true)
@controller.sign_in(user)
end
test 'sign out proxy to logout on warden' do
@mock_warden.expects(:user).with(:user).returns(true)
@mock_warden.expects(:logout).with(:user).returns(true)
@controller.sign_out(:user)
end
test 'sign out accepts a resource as argument' do
@mock_warden.expects(:user).with(:user).returns(true)
@mock_warden.expects(:logout).with(:user).returns(true)
@controller.sign_out(User.new)
end
test 'stored location for returns the location for a given scope' do
assert_nil @controller.stored_location_for(:user)
@controller.session[:"user_return_to"] = "/foo.bar"
assert_equal "/foo.bar", @controller.stored_location_for(:user)
end
test 'stored location for accepts a resource as argument' do
assert_nil @controller.stored_location_for(:user)
@controller.session[:"user_return_to"] = "/foo.bar"
assert_equal "/foo.bar", @controller.stored_location_for(User.new)
end
test 'stored location cleans information after reading' do
@controller.session[:"user_return_to"] = "/foo.bar"
assert_equal "/foo.bar", @controller.stored_location_for(:user)
assert_nil @controller.session[:"user_return_to"]
end
test 'after sign in path defaults to root path if none by was specified for the given scope' do
assert_equal root_path, @controller.after_sign_in_path_for(:user)
end
test 'after sign in path defaults to the scoped root path' do
assert_equal admin_root_path, @controller.after_sign_in_path_for(:admin)
end
test 'after update path defaults to root path if none by was specified for the given scope' do
assert_equal root_path, @controller.after_update_path_for(:user)
end
test 'after update path defaults to the scoped root path' do
assert_equal admin_root_path, @controller.after_update_path_for(:admin)
end
test 'after sign out path defaults to the root path' do
assert_equal root_path, @controller.after_sign_out_path_for(:admin)
assert_equal root_path, @controller.after_sign_out_path_for(:user)
end
test 'sign in and redirect uses the stored location' do
user = User.new
@controller.session[:"user_return_to"] = "/foo.bar"
@mock_warden.expects(:user).with(:user).returns(nil)
@mock_warden.expects(:set_user).with(user, :scope => :user).returns(true)
@controller.expects(:redirect_to).with("/foo.bar")
@controller.sign_in_and_redirect(user)
end
test 'sign in and redirect uses the configured after sign in path' do
admin = Admin.new
@mock_warden.expects(:user).with(:admin).returns(nil)
@mock_warden.expects(:set_user).with(admin, :scope => :admin).returns(true)
@controller.expects(:redirect_to).with(admin_root_path)
@controller.sign_in_and_redirect(admin)
end
test 'sign in and redirect does not sign in again if user is already signed' do
admin = Admin.new
@mock_warden.expects(:user).with(:admin).returns(admin)
@mock_warden.expects(:set_user).never
@controller.expects(:redirect_to).with(admin_root_path)
@controller.sign_in_and_redirect(admin)
end
test 'sign out and redirect uses the configured after sign out path' do
@mock_warden.expects(:user).with(:admin).returns(true)
@mock_warden.expects(:logout).with(:admin).returns(true)
@controller.expects(:redirect_to).with(admin_root_path)
@controller.instance_eval "def after_sign_out_path_for(resource); admin_root_path; end"
@controller.sign_out_and_redirect(:admin)
end
test 'is not a devise controller' do
assert_not @controller.devise_controller?
end
end
| 31.721951 | 97 | 0.727049 |
62d2b1acee555d721b81f61657bc7ac5f4b522d7 | 545 |
# Setup integration system for the integration suite
Dir.chdir "#{File.dirname(__FILE__)}/integration/app/" do
`ps awx`.split("\n").grep(/4304[1-3]/).map do |process|
system("kill -9 #{process.to_i}")
end
LOG = "/tmp/memcached.log"
system "memcached -vv -p 43042 >> #{LOG} 2>&1 &"
system "memcached -vv -p 43043 >> #{LOG} 2>&1 &"
Dir.chdir "vendor/plugins" do
system "rm interlock; ln -s ../../../../../ interlock"
end
system "rake db:create"
system "rake db:migrate"
system "rake db:fixtures:load"
end
| 23.695652 | 58 | 0.620183 |
e9759d6abed714e5f7e515f519f31d8b332e28e8 | 136 | class AddEntryCompanyDateToUser < ActiveRecord::Migration[6.1]
def change
add_column :users, :entry_company_date, :date
end
end
| 22.666667 | 62 | 0.772059 |
083456b40b3b67633209ff4a281333dc89130ad4 | 654 | class User < ApplicationRecord
has_many :created_products, foreign_key: "user_id", class_name: "Product"
has_many :products
has_many :buildings, through: :products
has_secure_password #created from user table / column password_digest. Password_digest avoids passwords from being stolen by encrypting before stored in the database.
validates :username, :email, presence: true
validates :username, uniqueness: true #validates that only one user
# def self.create_with_omniauth(auth)
# create! do |user|
# user.provider = auth["provider"]
# user.uid = auth["uid"]
# user.name = auth["info"]["username"]
#end
#end
end
| 36.333333 | 168 | 0.730887 |
398b7ae9de1823f66db294870e49088830b4b3d6 | 3,227 | require 'optparse'
require 'ostruct'
require 'pp'
require 'extension'
require 'logger'
require 'helper'
include Helper
def sec2hm(secs)
time = secs.round
time /= 60
mins = time % 60
time /= 60
hrs = time
[ hrs, mins ]
end
def say_in_telegram(username, match, url, timeleft)
hour, min = sec2hm(timeleft)
"Bro *#{username.escape_telegram_markdown}* tinggal punya sisa waktu #{hour} jam #{min} menit " +
"[#{match.escape_telegram_markdown}](#{url})\n"
end
######################
# main
logger = Logger.new(STDERR, Logger::DEBUG)
options = OpenStruct.new
OptionParser.new do |opts|
opts.separator ""
opts.separator "Options are ..."
opts.on_tail("-h", "--help", "-H", "Display this help message.") do
puts opts
exit
end
opts.on('-u', '--username USERNAME', 'Username on chess.com') {|val|
options.usernames ||= []
options.usernames.push(val)
}
opts.on('-m', '--match MATCH_ID', 'Match ID') {|val|
options.match_ids ||= []
options.match_ids.push(val)
}
opts.on('-w', '--warning WARNING', 'Warning threshold, in number of hours left. Default to 3 hours.') {|val|
options.warn_threshold = val.to_i
}
opts.on('-v', '--verbose', 'Verbose output') do
options.verbose = true
end
end.parse!
options.warn_threshold ||= 3 # default 3 hours left
unless options.match_ids
$stderr.puts "Match ID is not specified."
exit 1
end
unless options.usernames
$stderr.puts "Username is not specified."
exit 1
end
VERBOSE = options.verbose
# only check 'basic' users
# usernames = options.usernames.select {|e| retrieve(player_url(e))["status"] == "basic"}
# skip closed accounts
usernames = options.usernames.reject {|e| retrieve(player_url(e))["status"] =~ /^closed/}
monitored_players = options.match_ids.inject({}) do |m,match_id|
match = retrieve(team_match_api_url(match_id))
next m unless match["status"] == "in_progress"
%w[team1 team2].each do |team|
players = match["teams"][team]["players"].
reject {|e| e["played_as_white"] and e["played_as_black"]}.
select {|e| usernames.include?(e["username"])}.
reject {|e| m.has_key?(e["username"]) and m[e["username"]][match["name"]]}
if VERBOSE
unless players.empty?
logger.info "Match *#{match["name"]}* - players on #{team}: #{players.map {|e| e["username"]}.sort.join(', ')}"
end
end
players.each do |player|
games_to_move(player).each do |game|
next if game["move_by"] == 0
now = Time.now.to_i
delta_in_seconds = game["move_by"] - now
next if delta_in_seconds < 0
unless delta_in_seconds > options.warn_threshold * 3600
m[player["username"]] ||= {}
m[player["username"]][match["name"]] = []
m[player["username"]][match["name"]].push([ game["url"], delta_in_seconds ])
end
end
end
end
m
end
message = monitored_players.inject('') do |m,o|
username, match = o
match.each do |match_name, games|
unless games.empty?
game = games.sort_by(&:last).first # pick the shortest time left
m += say_in_telegram(username, match_name, game[0], game[1])
end
end
m
end
print 'TIMEOUT WARNING!! ' + message unless message.empty?
| 27.818966 | 119 | 0.639913 |
79d29fa50dfaa9d6d45b11d5364306d68ecded1c | 132 | module LambdaMoo
class NullStatement
# Does nothing
def execute(runtime = nil)
StatementResult.ok
end
end
end
| 14.666667 | 30 | 0.681818 |
1dd25e4e83968bad94d05e37a554ff071f6ff0a0 | 207 | class Ccc::SpPageElement < ActiveRecord::Base
belongs_to :sp_pages, class_name: 'Ccc::SpPage', foreign_key: :page_id
belongs_to :sp_elements, class_name: 'Ccc::SpElement', foreign_key: :element_id
end
| 29.571429 | 81 | 0.768116 |
08ab30b74a7001d84ba869a0eab450b1d935fd63 | 245 | PAIR_DELIMITER = ";"
KEY_VALUE_DELIMITER = "|"
def parse_sites
sites_str = ENV.fetch('SITES')
pairs = sites_str.split(PAIR_DELIMITER)
pairs.map do |pair|
from, to = pair.split KEY_VALUE_DELIMITER
{ from: from, to: to }
end
end
| 18.846154 | 45 | 0.685714 |
1d710cc3d81708ffe4b44b2af86a85da5e2b5bb3 | 120 | class RemoveIdFromUnreadEntries < ActiveRecord::Migration
def change
remove_column :unread_entries, :id
end
end
| 20 | 57 | 0.791667 |
181736904ac52b8ed83468bcb70f2df9306ee840 | 1,589 | # This class handels SQLite3-specific behaviour.
class Baza::Driver::Sqlite3Java < Baza::JdbcDriver
AutoAutoloader.autoload_sub_classes(self, __FILE__)
attr_reader :mutex_statement_reader
# Helper to enable automatic registering of database using Baza::Db.from_object
def self.from_object(args)
if args[:object].class.name == "Java::OrgSqlite::SQLiteConnection"
return {
type: :success,
args: {
type: :sqlite3_java,
conn: args[:object]
}
}
end
end
def self.args
[{
label: "Path",
name: "path"
}]
end
# Constructor. This should not be called manually.
def initialize(db)
super
@path = @db.opts[:path] if @db.opts[:path]
@preload_results = true
if @db.opts[:conn]
@conn = @db.opts[:conn]
else
org.sqlite.JDBC
reconnect
end
end
def reconnect
raise "No path was given." unless @path
@stmt = nil
@conn = java.sql.DriverManager.getConnection("jdbc:sqlite:#{@path}")
end
# Escapes a string to be safe to used in a query.
def escape(string)
# This code is taken directly from the documentation so we dont have to rely on the SQLite3::Database class. This way it can also be used with JRuby and IronRuby...
# http://sqlite-ruby.rubyforge.org/classes/SQLite/Database.html
string.to_s.gsub(/'/, "''")
end
def transaction
query_no_result_set("BEGIN TRANSACTION")
begin
yield @db
query_no_result_set("COMMIT")
rescue
query_no_result_set("ROLLBACK")
raise
end
end
end
| 23.367647 | 168 | 0.648206 |
1a70bc930524c62fda39e98bd5a3028914ff9b74 | 2,003 | ######################################################################
# Copyright (c) 2008-2016, Alliance for Sustainable Energy.
# All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
######################################################################
# call like: ruby IddDiff.rb /path/to/old_idd /path/to/new_idd
require 'openstudio'
old_idd = OpenStudio::IddFile::load(OpenStudio::Path.new(ARGV[0])).get
new_idd = OpenStudio::IddFile::load(OpenStudio::Path.new(ARGV[1])).get
old_object_hash = Hash.new
new_object_hash = Hash.new
old_idd.objects.each do |object|
old_object_hash[object.name] = object
end
new_idd.objects.each do |object|
new_object_hash[object.name] = object
end
puts
# things in old that are not in new
old_object_hash.each_key do |name|
if new_object_hash[name].nil?
puts "Object '#{name}' was in old Idd but not in new Idd"
end
end
puts
# things in new that are not in old
new_object_hash.each_key do |name|
if old_object_hash[name].nil?
puts "Object '#{name}' is in new Idd but not in old Idd"
end
end
puts
# things in both
old_object_hash.each_pair do |name, old_object|
new_object = new_object_hash[name]
next if not new_object
if new_object != old_object
puts "Object '#{name}' has changed"
end
end | 29.028986 | 81 | 0.683974 |
accffe6756d94a1994e95a198db367b8811c0e6b | 3,101 | #!/usr/bin/env ruby
#--
# Portions copyright 2004 by Jim Weirich ([email protected]).
# Portions copyright 2005 by Sam Ruby ([email protected]).
# All rights reserved.
# Permission is granted for use, copying, modification, distribution,
# and distribution of modified versions of this work as long as the
# above copyright notice is included.
#++
require 'test/unit'
require 'test/preload'
require 'builder'
require 'builder/css'
class TestCSS < Test::Unit::TestCase
def setup
@css = Builder::CSS.new
end
def test_create
assert_not_nil @css
end
def test_no_block
@css.body
assert_equal 'body', @css.target!
end
def test_block
@css.body {
color 'green'
}
assert_equal "body {\n color: green;\n}\n\n", @css.target!
end
def test_id
@css.id!('nav') { color 'green' }
assert_equal "#nav {\n color: green;\n}\n\n", @css.target!
end
def test_class
@css.class!('nav') { color 'green' }
assert_equal ".nav {\n color: green;\n}\n\n", @css.target!
end
def test_elem_with_id
@css.div(:id => 'nav') { color 'green' }
assert_equal "div#nav {\n color: green;\n}\n\n", @css.target!
end
def test_elem_with_class
@css.div(:class => 'nav') { color 'green' }
assert_equal "div.nav {\n color: green;\n}\n\n", @css.target!
end
def test_comment
@css.comment!('foo')
assert_equal "/* foo */\n", @css.target!
end
def test_selector
@css.a(:hover) { color 'green' }
assert_equal "a:hover {\n color: green;\n}\n\n", @css.target!
end
def test_plus
@css.h1 + @css.span
assert_equal "h1 + span", @css.target!
end
def test_plus_with_block
@css.h1 + @css.span { color 'green' }
assert_equal "h1 + span {\n color: green;\n}\n\n", @css.target!
end
def test_contextual
@css.h1 >> @css.span
assert_equal "h1 span", @css.target!
end
def test_contextual_with_block
@css.h1 >> @css.span { color 'green' }
assert_equal "h1 span {\n color: green;\n}\n\n", @css.target!
end
def test_child
@css.h1 > @css.span
assert_equal "h1 > span", @css.target!
end
def test_child_with_block
@css.h1 > @css.span { color 'green' }
assert_equal "h1 > span {\n color: green;\n}\n\n", @css.target!
end
def test_multiple_op
@css.h1 + @css.span + @css.span
assert_equal "h1 + span + span", @css.target!
end
def test_all
@css.h1 | @css.h2 { color 'green' }
assert_equal "h1 , h2 {\n color: green;\n}\n\n", @css.target!
end
def test_all_with_atts
@css.h1(:class => 'foo') | @css.h2(:class => 'bar') { color 'green' }
assert_equal "h1.foo , h2.bar {\n color: green;\n}\n\n", @css.target!
end
def test_multiple_basic
@css.body { color 'green' }
@css.h1 { color 'green' }
assert_equal "body {\n color: green;\n}\n\nh1 {\n color: green;\n}\n\n", @css.target!
end
def test_multiple_ops
@css.body { color 'green' }
@css.body > @css.h1 { color 'green' }
assert_equal "body {\n color: green;\n}\n\nbody > h1 {\n color: green;\n}\n\n", @css.target!
end
end
| 24.611111 | 98 | 0.622702 |
e8012dcd8fa2956e5954ed68115e0bffb9a043f3 | 1,658 | # encoding: utf-8
require 'spec_helper'
describe Equatable, '#==' do
let(:name) { 'Value' }
let(:value) { 11 }
let(:super_klass) {
::Class.new do
include Equatable
attr_reader :value
def initialize(value)
@value = value
end
end
}
let(:klass) { Class.new(super_klass) }
let(:object) { klass.new(value) }
subject { object == other }
context 'with the same object' do
let(:other) { object }
it { is_expected.to eql(true) }
it 'is symmetric' do
is_expected.to eql(other == object)
end
end
context 'with an equivalent object' do
let(:other) { object.dup }
it { is_expected.to eql(true) }
it 'is symmetric' do
is_expected.to eql(other == object)
end
end
context 'with an equivalent object of a subclass' do
let(:other) { ::Class.new(klass).new(value) }
it { is_expected.to eql(true) }
it 'is not symmetric' do
# LSP, any equality for type should work for subtype but
# not the other way
is_expected.not_to eql(other == object)
end
end
context 'with an equivalent object of a superclass' do
let(:other) { super_klass.new(value) }
it { is_expected.to eql(false) }
it 'is not symmetric' do
is_expected.not_to eql(other == object)
end
end
context 'with an object with a different interface' do
let(:other) { Object.new }
it { is_expected.to eql(false) }
end
context 'with an object of another class' do
let(:other) { Class.new.new }
it { is_expected.to eql(false) }
it 'is symmetric' do
is_expected.to eql(other == object)
end
end
end
| 19.505882 | 62 | 0.618818 |
ac591cb2c35a6c8560f360065c70cfd603555e16 | 669 | # frozen_string_literal: true
require_dependency "renalware"
module Renalware
# Helper methods to mix in to a Presenter class when displaying objects which include
# the Accountable module - ie they have updated_by/at properties.
module AccountablePresentation
extend ActiveSupport::Concern
def effective_updated_by
return unless been_updated?
updated_by&.full_name
end
def effective_updated_at_date
return unless been_updated?
updated_at&.to_date
end
def effective_updated_at
return unless been_updated?
updated_at
end
def been_updated?
updated_at > created_at
end
end
end
| 19.676471 | 87 | 0.730942 |
ab403502b7726010e488f8e9acfd5bceacf730c9 | 272 | root = "#{Dir.getwd}"
bind "unix://#{root}/tmp/puma/socket"
pidfile "#{root}/tmp/puma/pid"
state_path "#{root}/tmp/puma/state"
rackup "#{root}/config/config.ru"
threads 4, 8
#stdout_redirect "#{root}/log/stdout.log", "#{root}/log/stderr.log", true
activate_control_app | 22.666667 | 73 | 0.691176 |
7974e424d5edd3c7795955469449a30541ef38c3 | 148 | require 'spec_helper'
describe 'filebeat' do
context 'with defaults for all parameters' do
it { should contain_class('filebeat') }
end
end
| 18.5 | 47 | 0.72973 |
ffc27dfb8826b31f3b1a7aef9a9f7aa743bc730a | 877 | # frozen_string_literal: true
module DevOops
module Commands
class EditScriptSh < Thor::Group
include Thor::Actions
def self.source_root
"#{File.dirname(__FILE__)}/../../../"
end
argument :script_name
class_option :global,
desc: 'force the script to be global',
aliases: ['g'],
type: :boolean
def self.banner
"#{$PROGRAM_NAME} edit_sh SCRIPT_NAME"
end
def edit
script_dir =
if options[:global]
ScriptsLoader::GLOBAL_DIR
else
ScriptsLoader.script_dir(script_name)
end
path = "#{script_dir}/#{script_name}.sh"
create_file(path) unless File.exist?(path)
FileUtils.chmod(0o750, path)
system("#{ENV['EDITOR'] || 'vim'} #{path}")
end
end
end
end
| 24.361111 | 57 | 0.54732 |
1a838d6c7c3c20942e1ab489661ba57426737560 | 7,076 | # -*- encoding: binary -*-
require_relative '../../../spec_helper'
require_relative '../fixtures/classes'
require_relative 'shared/basic'
require_relative 'shared/taint'
describe "String#unpack with format 'B'" do
it_behaves_like :string_unpack_basic, 'B'
it_behaves_like :string_unpack_no_platform, 'B'
it_behaves_like :string_unpack_taint, 'B'
it "decodes one bit from each byte for each format character starting with the most significant bit" do
[ ["\x00", "B", ["0"]],
["\x80", "B", ["1"]],
["\x0f", "B", ["0"]],
["\x8f", "B", ["1"]],
["\x7f", "B", ["0"]],
["\xff", "B", ["1"]],
["\x80\x00", "BB", ["1", "0"]],
["\x8f\x00", "BB", ["1", "0"]],
["\x80\x0f", "BB", ["1", "0"]],
["\x80\x8f", "BB", ["1", "1"]],
["\x80\x80", "BB", ["1", "1"]],
["\x0f\x80", "BB", ["0", "1"]]
].should be_computed_by(:unpack)
end
it "decodes only the number of bits in the string when passed a count" do
"\x83".unpack("B25").should == ["10000011"]
end
it "decodes multiple differing bit counts from a single string" do
str = "\xaa\xaa\xaa\xaa\x55\xaa\xd4\xc3\x6b\xd7\xaa\xd7\xc3\xd4\xaa\x6b\xd7\xaa"
array = str.unpack("B5B6B7B8B9B10B13B14B16B17")
array.should == ["10101", "101010", "1010101", "10101010", "010101011",
"1101010011", "0110101111010", "10101010110101",
"1100001111010100", "10101010011010111"]
end
it "decodes a directive with a '*' modifier after a directive with a count modifier" do
"\xd4\xc3\x6b\xd7".unpack("B5B*").should == ["11010", "110000110110101111010111"]
end
it "decodes a directive with a count modifier after a directive with a '*' modifier" do
"\xd4\xc3\x6b\xd7".unpack("B*B5").should == ["11010100110000110110101111010111", ""]
end
it "decodes the number of bits specified by the count modifier" do
[ ["\x00", "B0", [""]],
["\x80", "B1", ["1"]],
["\x7f", "B2", ["01"]],
["\x8f", "B3", ["100"]],
["\x7f", "B4", ["0111"]],
["\xff", "B5", ["11111"]],
["\xf8", "B6", ["111110"]],
["\x9c", "B7", ["1001110"]],
["\xbd", "B8", ["10111101"]],
["\x80\x80", "B9", ["100000001"]],
["\x80\x70", "B10", ["1000000001"]],
["\x80\x20", "B11", ["10000000001"]],
["\x8f\x10", "B12", ["100011110001"]],
["\x8f\x0f", "B13", ["1000111100001"]],
["\x80\x0f", "B14", ["10000000000011"]],
["\x80\x8f", "B15", ["100000001000111"]],
["\x0f\x81", "B16", ["0000111110000001"]]
].should be_computed_by(:unpack)
end
it "decodes all the bits when passed the '*' modifier" do
[ ["", [""]],
["\x00", ["00000000"]],
["\x80", ["10000000"]],
["\x7f", ["01111111"]],
["\x81", ["10000001"]],
["\x0f", ["00001111"]],
["\x80\x80", ["1000000010000000"]],
["\x8f\x10", ["1000111100010000"]],
["\x00\x10", ["0000000000010000"]]
].should be_computed_by(:unpack, "B*")
end
it "adds an empty string for each element requested beyond the end of the String" do
[ ["", ["", "", ""]],
["\x80", ["1", "", ""]],
["\x80\x08", ["1", "0", ""]]
].should be_computed_by(:unpack, "BBB")
end
it "ignores NULL bytes between directives" do
"\x80\x00".unpack("B\x00B").should == ["1", "0"]
end
it "ignores spaces between directives" do
"\x80\x00".unpack("B B").should == ["1", "0"]
end
end
describe "String#unpack with format 'b'" do
it_behaves_like :string_unpack_basic, 'b'
it_behaves_like :string_unpack_no_platform, 'b'
it_behaves_like :string_unpack_taint, 'b'
it "decodes one bit from each byte for each format character starting with the least significant bit" do
[ ["\x00", "b", ["0"]],
["\x01", "b", ["1"]],
["\xf0", "b", ["0"]],
["\xf1", "b", ["1"]],
["\xfe", "b", ["0"]],
["\xff", "b", ["1"]],
["\x01\x00", "bb", ["1", "0"]],
["\xf1\x00", "bb", ["1", "0"]],
["\x01\xf0", "bb", ["1", "0"]],
["\x01\xf1", "bb", ["1", "1"]],
["\x01\x01", "bb", ["1", "1"]],
["\xf0\x01", "bb", ["0", "1"]]
].should be_computed_by(:unpack)
end
it "decodes only the number of bits in the string when passed a count" do
"\x83".unpack("b25").should == ["11000001"]
end
it "decodes multiple differing bit counts from a single string" do
str = "\xaa\xaa\xaa\xaa\x55\xaa\xd4\xc3\x6b\xd7\xaa\xd7\xc3\xd4\xaa\x6b\xd7\xaa"
array = str.unpack("b5b6b7b8b9b10b13b14b16b17")
array.should == ["01010", "010101", "0101010", "01010101", "101010100",
"0010101111", "1101011011101", "01010101111010",
"1100001100101011", "01010101110101101"]
end
it "decodes a directive with a '*' modifier after a directive with a count modifier" do
"\xd4\xc3\x6b\xd7".unpack("b5b*").should == ["00101", "110000111101011011101011"]
end
it "decodes a directive with a count modifier after a directive with a '*' modifier" do
"\xd4\xc3\x6b\xd7".unpack("b*b5").should == ["00101011110000111101011011101011", ""]
end
it "decodes the number of bits specified by the count modifier" do
[ ["\x00", "b0", [""]],
["\x01", "b1", ["1"]],
["\xfe", "b2", ["01"]],
["\xfc", "b3", ["001"]],
["\xf7", "b4", ["1110"]],
["\xff", "b5", ["11111"]],
["\xfe", "b6", ["011111"]],
["\xce", "b7", ["0111001"]],
["\xbd", "b8", ["10111101"]],
["\x01\xff", "b9", ["100000001"]],
["\x01\xfe", "b10", ["1000000001"]],
["\x01\xfc", "b11", ["10000000001"]],
["\xf1\xf8", "b12", ["100011110001"]],
["\xe1\xf1", "b13", ["1000011110001"]],
["\x03\xe0", "b14", ["11000000000001"]],
["\x47\xc0", "b15", ["111000100000001"]],
["\x81\x0f", "b16", ["1000000111110000"]]
].should be_computed_by(:unpack)
end
it "decodes all the bits when passed the '*' modifier" do
[ ["", [""]],
["\x00", ["00000000"]],
["\x80", ["00000001"]],
["\x7f", ["11111110"]],
["\x81", ["10000001"]],
["\x0f", ["11110000"]],
["\x80\x80", ["0000000100000001"]],
["\x8f\x10", ["1111000100001000"]],
["\x00\x10", ["0000000000001000"]]
].should be_computed_by(:unpack, "b*")
end
it "adds an empty string for each element requested beyond the end of the String" do
[ ["", ["", "", ""]],
["\x01", ["1", "", ""]],
["\x01\x80", ["1", "0", ""]]
].should be_computed_by(:unpack, "bbb")
end
it "ignores NULL bytes between directives" do
"\x01\x00".unpack("b\x00b").should == ["1", "0"]
end
it "ignores spaces between directives" do
"\x01\x00".unpack("b b").should == ["1", "0"]
end
it "decodes into US-ASCII string values" do
str = "s".force_encoding('UTF-8').unpack("b*")[0]
str.encoding.name.should == 'US-ASCII'
end
end
| 36.474227 | 106 | 0.518089 |
1da82ef8418b354200b46e0ac794db4fc5216ed9 | 178 | require 'test_helper'
class VotesControllerTest < ActionDispatch::IntegrationTest
test "should get create" do
get votes_create_url
assert_response :success
end
end
| 17.8 | 59 | 0.780899 |
3938da03daeba8ca7581ea8ac98fbdfaf7514ced | 12,924 | require_relative "helper"
class TestConnectionPool < Minitest::Test
class NetworkConnection
SLEEP_TIME = 0.1
def initialize
@x = 0
end
def do_something(*_args, increment: 1)
@x += increment
sleep SLEEP_TIME
@x
end
def do_something_with_positional_hash(options)
@x += options[:increment] || 1
sleep SLEEP_TIME
@x
end
def fast
@x += 1
end
def do_something_with_block
@x += yield
sleep SLEEP_TIME
@x
end
def respond_to?(method_id, *args)
method_id == :do_magic || super(method_id, *args)
end
end
class Recorder
def initialize
@calls = []
end
attr_reader :calls
def do_work(label)
@calls << label
end
end
def use_pool(pool, size)
Array.new(size) {
Thread.new do
pool.with { sleep }
end
}.each do |thread|
Thread.pass until thread.status == "sleep"
end
end
def kill_threads(threads)
threads.each do |thread|
thread.kill
thread.join
end
end
def test_basic_multithreaded_usage
pool_size = 5
pool = ConnectionPool.new(size: pool_size) { NetworkConnection.new }
start = Time.new
generations = 3
result = Array.new(pool_size * generations) {
Thread.new do
pool.with do |net|
net.do_something
end
end
}.map(&:value)
finish = Time.new
assert_equal((1..generations).cycle(pool_size).sort, result.sort)
assert_operator(finish - start, :>, generations * NetworkConnection::SLEEP_TIME)
end
def test_timeout
pool = ConnectionPool.new(timeout: 0, size: 1) { NetworkConnection.new }
thread = Thread.new {
pool.with do |net|
net.do_something
sleep 0.01
end
}
Thread.pass while thread.status == "run"
assert_raises Timeout::Error do
pool.with { |net| net.do_something }
end
thread.join
pool.with do |conn|
refute_nil conn
end
end
def test_with
pool = ConnectionPool.new(timeout: 0, size: 1) { Object.new }
pool.with do
Thread.new {
assert_raises Timeout::Error do
pool.checkout
end
}.join
end
assert Thread.new { pool.checkout }.join
end
def test_then
pool = ConnectionPool.new { Object.new }
assert_equal pool.method(:then), pool.method(:with)
end
def test_with_timeout
pool = ConnectionPool.new(timeout: 0, size: 1) { Object.new }
assert_raises Timeout::Error do
Timeout.timeout(0.01) do
pool.with do |obj|
assert_equal 0, pool.available
sleep 0.015
end
end
end
assert_equal 1, pool.available
end
def test_invalid_size
assert_raises ArgumentError, TypeError do
ConnectionPool.new(timeout: 0, size: nil) { Object.new }
end
assert_raises ArgumentError, TypeError do
ConnectionPool.new(timeout: 0, size: "") { Object.new }
end
end
def test_handle_interrupt_ensures_checkin
pool = ConnectionPool.new(timeout: 0, size: 1) { Object.new }
def pool.checkout(options)
sleep 0.015
super
end
did_something = false
action = lambda do
Timeout.timeout(0.01) do
pool.with do |obj|
did_something = true
# Timeout::Error will be triggered by any non-trivial Ruby code
# executed here since it couldn't be raised during checkout.
# It looks like setting the local variable above does not trigger
# the Timeout check in MRI 2.2.1.
obj.tap { obj.hash }
end
end
end
if RUBY_ENGINE == "ruby"
# These asserts rely on the Ruby implementation reaching `did_something =
# true` before the interrupt is detected by the thread. Interrupt
# detection timing is implementation-specific in practice, with JRuby,
# Rubinius, and TruffleRuby all having different interrupt timings to MRI.
# In fact they generally detect interrupts more quickly than MRI, so they
# may not reach `did_something = true` before detecting the interrupt.
assert_raises Timeout::Error, &action
assert did_something
else
action.call
end
assert_equal 1, pool.available
end
def test_explicit_return
pool = ConnectionPool.new(timeout: 0, size: 1) {
mock = Minitest::Mock.new
def mock.disconnect!
raise "should not disconnect upon explicit return"
end
mock
}
pool.with do |conn|
return true
end
end
def test_with_timeout_override
pool = ConnectionPool.new(timeout: 0, size: 1) { NetworkConnection.new }
t = Thread.new {
pool.with do |net|
net.do_something
sleep 0.01
end
}
Thread.pass while t.status == "run"
assert_raises Timeout::Error do
pool.with { |net| net.do_something }
end
pool.with(timeout: 2 * NetworkConnection::SLEEP_TIME) do |conn|
refute_nil conn
end
end
def test_checkin
pool = ConnectionPool.new(timeout: 0, size: 1) { NetworkConnection.new }
conn = pool.checkout
Thread.new {
assert_raises Timeout::Error do
pool.checkout
end
}.join
pool.checkin
assert_same conn, Thread.new { pool.checkout }.value
end
def test_returns_value
pool = ConnectionPool.new(timeout: 0, size: 1) { Object.new }
assert_equal 1, pool.with { |o| 1 }
end
def test_checkin_never_checkout
pool = ConnectionPool.new(timeout: 0, size: 1) { Object.new }
e = assert_raises(ConnectionPool::Error) { pool.checkin }
assert_equal "no connections are checked out", e.message
end
def test_checkin_no_current_checkout
pool = ConnectionPool.new(timeout: 0, size: 1) { Object.new }
pool.checkout
pool.checkin
assert_raises ConnectionPool::Error do
pool.checkin
end
end
def test_checkin_twice
pool = ConnectionPool.new(timeout: 0, size: 1) { Object.new }
pool.checkout
pool.checkout
pool.checkin
Thread.new {
assert_raises Timeout::Error do
pool.checkout
end
}.join
pool.checkin
assert Thread.new { pool.checkout }.join
end
def test_checkout
pool = ConnectionPool.new(size: 1) { NetworkConnection.new }
conn = pool.checkout
assert_kind_of NetworkConnection, conn
assert_same conn, pool.checkout
end
def test_checkout_multithread
pool = ConnectionPool.new(size: 2) { NetworkConnection.new }
conn = pool.checkout
t = Thread.new {
pool.checkout
}
refute_same conn, t.value
end
def test_checkout_timeout
pool = ConnectionPool.new(timeout: 0, size: 0) { Object.new }
assert_raises Timeout::Error do
pool.checkout
end
end
def test_checkout_timeout_override
pool = ConnectionPool.new(timeout: 0, size: 1) { NetworkConnection.new }
thread = Thread.new {
pool.with do |net|
net.do_something
sleep 0.01
end
}
Thread.pass while thread.status == "run"
assert_raises Timeout::Error do
pool.checkout
end
assert pool.checkout timeout: 2 * NetworkConnection::SLEEP_TIME
end
def test_passthru
pool = ConnectionPool.wrap(timeout: 2 * NetworkConnection::SLEEP_TIME, size: 1) { NetworkConnection.new }
assert_equal 1, pool.do_something
assert_equal 2, pool.do_something
assert_equal 5, pool.do_something_with_block { 3 }
assert_equal 6, pool.with { |net| net.fast }
assert_equal 8, pool.do_something(increment: 2)
assert_equal 10, pool.do_something_with_positional_hash({ increment: 2, symbol_key: 3, "string_key" => 4 })
end
def test_passthru_respond_to
pool = ConnectionPool.wrap(timeout: 2 * NetworkConnection::SLEEP_TIME, size: 1) { NetworkConnection.new }
assert pool.respond_to?(:with)
assert pool.respond_to?(:do_something)
assert pool.respond_to?(:do_magic)
refute pool.respond_to?(:do_lots_of_magic)
end
def test_return_value
pool = ConnectionPool.new(timeout: 2 * NetworkConnection::SLEEP_TIME, size: 1) { NetworkConnection.new }
result = pool.with { |net|
net.fast
}
assert_equal 1, result
end
def test_heavy_threading
pool = ConnectionPool.new(timeout: 0.5, size: 3) { NetworkConnection.new }
threads = Array.new(20) {
Thread.new do
pool.with do |net|
sleep 0.01
end
end
}
threads.map { |thread| thread.join }
end
def test_reuses_objects_when_pool_not_saturated
pool = ConnectionPool.new(size: 5) { NetworkConnection.new }
ids = 10.times.map {
pool.with { |c| c.object_id }
}
assert_equal 1, ids.uniq.size
end
def test_nested_checkout
recorder = Recorder.new
pool = ConnectionPool.new(size: 1) { recorder }
pool.with do |r_outer|
@other = Thread.new { |t|
pool.with do |r_other|
r_other.do_work("other")
end
}
pool.with do |r_inner|
r_inner.do_work("inner")
end
Thread.pass
r_outer.do_work("outer")
end
@other.join
assert_equal ["inner", "outer", "other"], recorder.calls
end
def test_shutdown_is_executed_for_all_connections
recorders = []
pool = ConnectionPool.new(size: 3) {
Recorder.new.tap { |r| recorders << r }
}
threads = use_pool pool, 3
pool.shutdown do |recorder|
recorder.do_work("shutdown")
end
kill_threads(threads)
assert_equal [["shutdown"]] * 3, recorders.map { |r| r.calls }
end
def test_raises_error_after_shutting_down
pool = ConnectionPool.new(size: 1) { true }
pool.shutdown {}
assert_raises ConnectionPool::PoolShuttingDownError do
pool.checkout
end
end
def test_runs_shutdown_block_asynchronously_if_connection_was_in_use
recorders = []
pool = ConnectionPool.new(size: 3) {
Recorder.new.tap { |r| recorders << r }
}
threads = use_pool pool, 2
pool.checkout
pool.shutdown do |recorder|
recorder.do_work("shutdown")
end
kill_threads(threads)
assert_equal [["shutdown"], ["shutdown"], []], recorders.map { |r| r.calls }
pool.checkin
assert_equal [["shutdown"], ["shutdown"], ["shutdown"]], recorders.map { |r| r.calls }
end
def test_raises_an_error_if_shutdown_is_called_without_a_block
pool = ConnectionPool.new(size: 1) {}
assert_raises ArgumentError do
pool.shutdown
end
end
def test_shutdown_is_executed_for_all_connections_in_wrapped_pool
recorders = []
wrapper = ConnectionPool::Wrapper.new(size: 3) {
Recorder.new.tap { |r| recorders << r }
}
threads = use_pool wrapper, 3
wrapper.pool_shutdown do |recorder|
recorder.do_work("shutdown")
end
kill_threads(threads)
assert_equal [["shutdown"]] * 3, recorders.map { |r| r.calls }
end
def test_wrapper_wrapped_pool
wrapper = ConnectionPool::Wrapper.new { NetworkConnection.new }
assert_equal ConnectionPool, wrapper.wrapped_pool.class
end
def test_wrapper_method_missing
wrapper = ConnectionPool::Wrapper.new { NetworkConnection.new }
assert_equal 1, wrapper.fast
end
def test_wrapper_respond_to_eh
wrapper = ConnectionPool::Wrapper.new { NetworkConnection.new }
assert_respond_to wrapper, :with
assert_respond_to wrapper, :fast
refute_respond_to wrapper, :"nonexistent method"
end
def test_wrapper_with
wrapper = ConnectionPool::Wrapper.new(timeout: 0, size: 1) { Object.new }
wrapper.with do
Thread.new {
assert_raises Timeout::Error do
wrapper.with { flunk "connection checked out :(" }
end
}.join
end
assert Thread.new { wrapper.with {} }.join
end
class ConnWithEval
def eval(arg)
"eval'ed #{arg}"
end
end
def test_wrapper_kernel_methods
wrapper = ConnectionPool::Wrapper.new(timeout: 0, size: 1) { ConnWithEval.new }
assert_equal "eval'ed 1", wrapper.eval(1)
end
def test_wrapper_with_connection_pool
recorder = Recorder.new
pool = ConnectionPool.new(size: 1) { recorder }
wrapper = ConnectionPool::Wrapper.new(pool: pool)
pool.with { |r| r.do_work("with") }
wrapper.do_work("wrapped")
assert_equal ["with", "wrapped"], recorder.calls
end
def test_stats_without_active_connection
pool = ConnectionPool.new(size: 2) { NetworkConnection.new }
assert_equal(2, pool.size)
assert_equal(2, pool.available)
end
def test_stats_with_active_connection
pool = ConnectionPool.new(size: 2) { NetworkConnection.new }
pool.with do
assert_equal(1, pool.available)
end
end
def test_stats_with_string_size
pool = ConnectionPool.new(size: "2") { NetworkConnection.new }
pool.with do
assert_equal(2, pool.size)
assert_equal(1, pool.available)
end
end
end
| 22.753521 | 111 | 0.655215 |
036943eda8249a8d2123123f67a10fcc36bac14e | 40 | module SmartSMS
VERSION = '0.1.1'
end
| 10 | 19 | 0.675 |
b9e0ebfe30d34428ca9c63b733bf7223cc11bc3f | 1,343 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'piculet/version'
Gem::Specification.new do |spec|
spec.name = "piculet"
spec.version = Piculet::VERSION
spec.authors = ["winebarrel"]
spec.email = ["[email protected]"]
spec.description = "Piculet is a tool to manage EC2 Security Group. It defines the state of EC2 Security Group using DSL, and updates EC2 Security Group according to DSL."
spec.summary = "Piculet is a tool to manage EC2 Security Group."
spec.homepage = "http://piculet.codenize.tools/"
spec.license = "MIT"
spec.files = %w(README.md) + Dir.glob('bin/**/*') + Dir.glob('lib/**/*')
spec.add_dependency "aws-sdk-v1", ">= 1.48.0"
spec.add_dependency "term-ansicolor", ">= 1.2.2"
spec.add_dependency "diffy"
spec.add_dependency "hashie"
#spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
#spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", "~> 2.14.1"
spec.add_development_dependency "rspec-instafail"
end
| 41.96875 | 175 | 0.664929 |
626867d20e7ca95b05aa5c2251363dc7b8f0d637 | 437 | # frozen_string_literal: true
module JekyllAssetPipeline
# Allows classes that extend this to return an array of their subclasses
module SubclassTracking
# Record subclasses of this class (this method is automatically called by
# ruby)
def inherited(base)
subclasses << base
end
# Return an array of classes that are subclasses of this object
def subclasses
@subclasses ||= []
end
end
end
| 24.277778 | 77 | 0.71167 |
0352fefc6035e8309c58a3b9ab1365d979a0a2aa | 342 | cask "hostsx" do
version "2.8.1"
sha256 "acf2f6b909215ec2b822a2c88bc7a19b4f0f231b86d0184883f481de8b535bd6"
url "https://github.com/ZzzM/HostsX/releases/download/#{version}/HostsX.dmg"
name "HostsX"
desc "Local hosts update tool"
homepage "https://github.com/ZzzM/HostsX"
depends_on macos: ">= :sierra"
app "HostsX.app"
end
| 24.428571 | 78 | 0.739766 |
f70b656aa697c70c3874bd55ca79d783a1ef26ce | 1,251 | #
# Cookbook:: nexus_repository_manager
# Copyright:: Copyright (c) 2017-present Sonatype, Inc. Apache License, Version 2.0.
default['java']['jdk_version'] = '8'
default['java']['install_flavor'] = 'openjdk'
default['java']['accept_license_agreement'] = true
# nexus configuration
default['nexus_repository_manager']['version'] = '3.18.1-01'
default['nexus_repository_manager']['nexus_download_url'] = "https://download.sonatype.com/nexus/3/nexus-#{node['nexus_repository_manager']['version']}-unix.tar.gz"
default['nexus_repository_manager']['nexus_download_sha256'] = '6d3e2c32b220a7eee0e1ff81a8dd15a48e5712fc3f379b6102717ba06058707c'
default['nexus_repository_manager']['sonatype']['path'] = '/opt/sonatype'
default['nexus_repository_manager']['nexus_data']['path'] = '/nexus-data'
default['nexus_repository_manager']['license_s3_bucket'] = nil
default['nexus_repository_manager']['license_s3_path'] = nil
# nexus.properties configuration
default['nexus_repository_manager']['properties']['application_port'] = '8081'
default['nexus_repository_manager']['properties']['application_host'] = '0.0.0.0'
default['nexus_repository_manager']['properties']['context_path'] = '/'
default['nexus_repository_manager']['properties']['license_file'] = nil
| 52.125 | 164 | 0.768985 |
f71aacb819c5a72b701537d69984520d9ece5678 | 1,058 | cask "netnewswire" do
version "5.1.3"
sha256 "5f08f9ee986621d146c57f3ab3e98297944af368ce7ff1382a8e3a6c31fab5ea"
url "https://github.com/brentsimmons/NetNewsWire/releases/download/mac-#{version}/NetNewsWire#{version}.zip",
verified: "github.com/brentsimmons/NetNewsWire/"
name "NetNewsWire"
desc "Free and open-source RSS reader"
homepage "https://ranchero.com/netnewswire/"
auto_updates true
conflicts_with cask: "homebrew/cask-versions/netnewswire-beta"
depends_on macos: ">= :catalina"
app "NetNewsWire.app"
zap trash: [
"~/Library/Application Scripts/com.ranchero.NetNewsWire-Evergreen.Subscribe-to-Feed",
"~/Library/Application Support/NetNewsWire",
"~/Library/Caches/com.ranchero.NetNewsWire-Evergreen",
"~/Library/Containers/com.ranchero.NetNewsWire-Evergreen.Subscribe-to-Feed",
"~/Library/Preferences/com.ranchero.NetNewsWire-Evergreen.plist",
"~/Library/Saved Application State/com.ranchero.NetNewsWire-Evergreen.savedState",
"~/Library/WebKit/com.ranchero.NetNewsWire-Evergreen",
]
end
| 39.185185 | 111 | 0.76087 |
b98ba386d2afe3affbb61a39b174a877f2eb0663 | 848 | # -*- encoding: utf-8 -*-
require File.expand_path('../lib/dm-migrations/version', __FILE__)
Gem::Specification.new do |gem|
gem.name = 'ardm-migrations'
gem.version = DataMapper::Migrations::VERSION
gem.authors = ["Martin Emde", "Paul Sadauskas"]
gem.email = ['[email protected]', 'psadauskas [a] gmail [d] com']
gem.description = "Ardm fork of dm-migrations"
gem.summary = gem.description
gem.license = "MIT"
gem.files = `git ls-files`.split("\n")
gem.test_files = `git ls-files -- {spec}/*`.split("\n")
gem.extra_rdoc_files = %w[LICENSE README.md]
gem.homepage = "http://github.com/martinemde/ardm-migrations"
gem.require_paths = ["lib"]
gem.add_runtime_dependency 'ardm-core', '~> 1.2'
gem.add_development_dependency 'rake', '~> 10.0'
gem.add_development_dependency 'rspec', '~> 2.0'
end
| 30.285714 | 67 | 0.665094 |
e9c40c5eaf57665cfd3042a0685e9e3cd6baf758 | 14,946 | # encoding: utf-8
include ActionView::Helpers::SanitizeHelper
include ApplicationHelper
RSpec.describe Note do
before(:example) do
ENV['versions'] = 'true'
ENV['version_gap_distance'] = '10'
ENV['version_gap_minutes'] = '60'
end
let(:note) { FactoryGirl.create(:note, external_updated_at: 200.minutes.ago, external_created_at: 200.minutes.ago) }
subject { note }
it { is_expected.to be_valid }
it { is_expected.to respond_to(:active) }
it { is_expected.to respond_to(:altitude) }
it { is_expected.to respond_to(:author) }
it { is_expected.to respond_to(:body) }
it { is_expected.to respond_to(:distance) }
it { is_expected.to respond_to(:external_updated_at) }
it { is_expected.to respond_to(:feature) }
it { is_expected.to respond_to(:fx) }
it { is_expected.to respond_to(:hide) }
it { is_expected.to respond_to(:introduction) }
it { is_expected.to respond_to(:is_embeddable_source_url) }
it { is_expected.to respond_to(:last_edited_by) }
it { is_expected.to respond_to(:latitude) }
it { is_expected.to respond_to(:longitude) }
it { is_expected.to respond_to(:place) }
it { is_expected.to respond_to(:feature_id) }
it { is_expected.to respond_to(:source) }
it { is_expected.to respond_to(:source_application) }
it { is_expected.to respond_to(:source_url) }
it { is_expected.to respond_to(:title) }
it { is_expected.to respond_to(:word_count) }
it { is_expected.to respond_to(:url) }
it { is_expected.to respond_to(:url_title) }
it { is_expected.to respond_to(:url_author) }
it { is_expected.to respond_to(:url_accessed_at) }
it { is_expected.to respond_to(:url_updated_at) }
it { is_expected.to respond_to(:url_html) }
it { is_expected.to respond_to(:url_lang) }
it { is_expected.to have_many(:evernote_notes) }
it { is_expected.to have_many(:instructions).through(:instruction_taggings) }
it { is_expected.to have_many(:resources) }
it { is_expected.to have_many(:tags).through(:tag_taggings) }
it { is_expected.to have_many(:versions) }
it { is_expected.to validate_presence_of(:external_updated_at) }
it { is_expected.to validate_presence_of(:title) }
describe 'rejects update when body, embeddable url and resources are all nil' do
before do
note.body = nil
notesource_url = nil
note.save
end
it { is_expected.not_to be_valid }
it 'has 1 error_on' do
expect(subject.error_on(:note).size).to eq(1)
end
end
describe 'saves the correct content type' do
it 'note by default' do
expect(note.content_type).to eq('note')
end
context 'when note has __QUOTE tag' do
before do
note.instruction_list = %w(__QUOTE)
note.save
end
it 'content_type is Citation' do
expect(note.content_type).to eq('citation')
end
end
context 'when note has __LINK tag' do
before do
note.instruction_list = %w(__LINK)
note.save
end
it 'content_type is Link' do
expect(note.content_type).to eq('link')
end
end
end
# Not yet implemented
# RSpec.describe "refuses update when external_updated_at is unchanged" do
# before do
# note.update_attributes(
# title: "New Title",
# external_updated_at: note.external_updated_at
# )
# end
# it { should_not be_valid }
# it { should have(1).error_on(:external_updated_at) }
# end
# Not yet implemented
# RSpec.describe "refuses update when external_updated_at is older" do
# before {
# note.update_attributes(
# title: "New Title",
# external_updated_at: note.external_updated_at - 1
# )
# }
# it { should_not be_valid }
# it { should have(1).error_on(:external_updated_at) }
# end
# TODO: Test scopes
describe 'versioning', versioning: true do
context 'when title is changed' do
before do
note.title = 'New Title'
note.external_updated_at = 1.minute.ago
note.save
end
it 'saves a version' do
expect(note.versions).not_to be_empty
end
end
context 'when versions are turned off' do
before do
ENV['versions'] = 'false'
note.title = 'New Title'
note.external_updated_at = 1.minute.ago
note.save
end
it 'does not save a version' do
expect(note.versions).to be_empty
end
end
context 'when body is changed' do
before do
note.body = 'New Body'
note.external_updated_at = 1.minute.ago
note.save
end
it 'saves a version' do
expect(note.versions).not_to be_empty
end
end
context 'when other attributes (e.g. altitude) is changed' do
before do
note.altitude = 1
note.external_updated_at = 1.minute.ago
note.save
end
it 'does not save a version' do
expect(note.versions).to be_empty
end
end
context 'when note is tagged to __RESET' do
before do
note.instruction_list = %w(__RESET)
note.body = 'New Body'
note.external_updated_at = 1.minute.ago
note.save
end
it 'does not save a version' do
expect(note.versions).to be_empty
end
end
context 'when a note is not much older or different than the last version' do
before do
note.body = note.body + 'a'
note.external_updated_at = 199.minutes.ago
note.save!
end
it 'does not save a version' do
expect(note.versions).to be_empty
end
end
context 'when a note is not much older but is longer from the last version' do
before do
note.body = note.body + ' More than ten words, enough to go over threshold in constants.'
note.external_updated_at = 199.minutes.ago
note.save!
end
it 'saves a version' do
expect(note.versions).not_to be_empty
end
end
context 'when a note is not much older, is the same length, but is different from the last version' do
before do
note.body = note.body.reverse
note.external_updated_at = 199.minutes.ago
note.save!
end
it 'saves a version' do
expect(note.versions).not_to be_empty
end
end
context 'when a version is saved' do
before do
note.body = 'First Body'
note.tag_list = %w(first_tag)
note.instruction_list = %w(__FIRST_INSTRUCTION)
note.external_updated_at = 100.minutes.ago
note.save
note.body = 'Second Body with more words'
note.tag_list = %w(second_tag)
note.instruction_list = %w(__SECOND_INSTRUCTION)
note.external_updated_at = 1.minute.ago
note.save
end
it 'saves metadata' do
expect(note.versions.last.external_updated_at.to_i).to eq(note.versions.last.reify.external_updated_at.to_i)
note.versions.last.instruction_list = %w(__FIRST_INSTRUCTION)
expect(note.versions.last.sequence).to eq(note.versions.size)
note.versions.last.tag_list = %w(first_tag)
expect(note.versions.last.word_count).to eq(2)
expect(note.versions.last.distance).to eq(26)
end
end
end
describe '#has_instruction?' do
before do
ENV['instructions_hide'] = '__HIDESYNONYM'
ENV['instructions_default'] = '__DEFAULT_INSTRUCTION'
note.instruction_list = %w(__NOTEINSTRUCTION __HIDESYNONYM)
end
context 'when an instruction has synonyms in Settings' do
it 'returns true' do
expect(note.has_instruction?('hide')).to be_truthy
end
end
context 'when an instruction is set as a synonym' do
it 'returns true' do
expect(note.has_instruction?('hidesynonym')).to be_truthy
end
end
context 'when an instruction is set in default for all' do
it 'returns true' do
expect(note.has_instruction?('default_instruction')).to be_truthy
end
end
context 'when a note is tagged with an instruction' do
it 'returns true' do
expect(note.has_instruction?('noteinstruction')).to be_truthy
end
end
context 'when an instruction is not present' do
it 'returns false' do
expect(note.has_instruction?('notpresent')).to be_falsey
end
end
end
describe '#headline' do
context 'when title is present' do
it 'returns title' do
expect(note.headline).to eq(note.title)
end
end
context 'when title is missing' do
before do
note.title = I18n.t('notes.untitled_synonyms').first
end
it 'returns preformatted title (e.g. Note 1)' do
expect(note.headline).to eq(I18n.t('notes.show.title', id: note.id))
end
end
context 'when title is missing (but in a different case from untitled synonyms)' do
before do
note.title = I18n.t('notes.untitled_synonyms').first.upcase
end
it 'returns preformatted title (e.g. Note 1)' do
expect(note.headline).to eq(I18n.t('notes.show.title', id: note.id))
end
end
context 'when note is a citation' do
before do
note.citation!
end
it 'returns preformatted title (e.g. Citation 1)' do
expect(note.headline).to eq(I18n.t('citations.show.title', id: note.id))
end
end
end
describe '#inferred_url' do
context 'when source url exists' do
before do
note.source_url = 'http://example.com'
note.save
end
it 'returns source url' do
expect(note.inferred_url).to eq('http://example.com')
end
end
context 'when source urldoes not exist' do
before do
note.source_url = nil
note.body = 'Normal body. http://example2.com'
note.save
end
it 'returns the first url from the body' do
expect(note.inferred_url).to eq('http://example2.com')
end
end
end
describe 'is taggable' do
before { note.update_attributes(tag_list: %w(tag1 tag2 tag3)) }
its(:tag_list) { is_expected.to eq(%w(tag1 tag2 tag3)) }
end
describe 'is findable by tag' do
# before { note.update_attributes(tag_list: 'tag4') }
# Note.tagged_with('tag4').last.should == note
end
describe 'accepts special characters in tags' do
before do
note.tag_list = %w(Žižek Café 井戸端)
note.save
end
its(:tag_list) { is_expected.to eq(['Žižek', 'Café', '井戸端']) }
end
describe '#clean_body_with_instructions' do
# pending 'TODO'
end
describe '#clean_body' do
# pending 'TODO'
end
describe '#is_embeddable_source_url' do
context 'when source_url is not known to be embeddable' do
before { note.source_url = 'http://www.example.com' }
its(:is_embeddable_source_url) { is_expected.to be_falsey }
end
context 'when source_url is a youtube link' do
before { note.source_url = 'http://youtube.com?v=ABCDEF' }
its(:is_embeddable_source_url) { is_expected.to be_truthy }
end
context 'when source_url is a vimeo link' do
before { note.source_url = 'http://vimeo.com/video/ABCDEF' }
its(:is_embeddable_source_url) { is_expected.to be_truthy }
end
context 'when source_url is a soundcloud link' do
before { note.source_url = 'http://soundcloud.com?v=ABCDEF' }
its (:is_embeddable_source_url) { is_expected.to be_truthy }
end
end
describe '#feature_id' do
context 'when title has no feature_id' do
before { note.title = 'Title' }
its (:feature_id) { is_expected.to be_nil }
end
context 'when title has a numerical feature_id' do
before { note.update_attributes(title: '1. Title') }
its (:feature_id) { is_expected.to eq('1') }
end
context 'when title has an alphabetic feature_id' do
before { note.update_attributes(title: 'a. Title') }
its (:feature_id) { is_expected.to eq('a') }
end
context 'when title has a word as feature_id' do
before { note.update_attributes(title: 'First. Title') }
its (:feature_id) { is_expected.to eq('first') }
end
context 'when title has a subtitle' do
before { note.update_attributes(title: 'Main Title: Subtitle') }
its (:feature_id) { is_expected.to eq('subtitle') }
end
context 'when title has more than one word before a period' do
before { note.update_attributes(title: 'Two words. Title') }
its (:feature_id) { is_expected.to be_nil }
end
end
describe '#feature' do
ENV['instructions_feature_first'] = '__FEATURE_FIRST'
ENV['instructions_feature_last'] = '__FEATURE_LAST'
before { note.update_attributes(title: 'Title Has Three Words') }
context 'when note has no instruction' do
its (:feature) { is_expected.to be_nil }
end
context 'when note has feature instruction' do
before { note.update_attributes(instruction_list: %w(__FEATURE)) }
its (:feature) { is_expected.to eq('title-has-three-words') }
end
context 'when note has an instruction to use the first word' do
before { note.update_attributes(instruction_list: %w(__FEATURE __FEATURE_FIRST)) }
its (:feature) { is_expected.to eq('title') }
end
context 'when note has an instruction to use the last word' do
before { note.update_attributes(instruction_list: %w(__FEATURE __FEATURE_LAST)) }
its (:feature) { is_expected.to eq('words') }
end
end
describe '#fx should return fx for images' do
before { note.instruction_list = %w(__FX_ABC __FX_DEF) }
its (:fx) { is_expected.to eq(['abc', 'def']) }
end
describe 'lang_from_cloud' do
ENV['detect_language_sample_length'] = '100'
context 'when text is in Enlish' do
before do
note.update_attributes(title: 'The Anatomy of Melancholy', body: "Burton's book consists mostly of a.", instruction_list: [])
end
it 'returns en' do
expect(note.lang).to eq('en')
end
end
context 'when language is given via an instruction' do
before do
note.update_attributes(title: 'The Anatomy of Melancholy', body: "Burton's book consists mostly of a.", lang: nil, instruction_list: ['__LANG_MT'])
end
it 'does not overwrite it' do
expect(note.lang).to eq('mt')
end
end
context 'when text is in Russian' do
before do
note.update_attributes(title: 'Анатомия меланхолии', body: 'Автор книги — оксфордский прелат Роберт Бёртон — продолжал дополнять и дописывать книгу до самой смерти в 1640 году.', instruction_list: [])
note.save!
end
it 'returns ru' do
expect(note.lang).to eq('ru')
end
end
context 'when text is in Malaysian' do
before do
note.update_attributes(title: 'അനാട്ടമി ഓഫ് മെലൻകൊളീ', body: "'അനാട്ടമി'-യുടെ കർത്താവായ", instruction_list: [])
end
it 'returns ml' do
expect(note.lang).to eq('ml')
end
end
end
end
| 32.993377 | 207 | 0.651278 |
185b4731706cbcebb59c67a02c56feb62c5d410b | 1,213 | #
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Shell
module Commands
class Flush < Command
def help
return <<-EOF
Flush all regions in passed table or pass a region row to
flush an individual region. For example:
hbase> flush 'TABLENAME'
hbase> flush 'REGIONNAME'
EOF
end
def command(table_or_region_name)
format_simple_command do
admin.flush(table_or_region_name)
end
end
end
end
end
| 29.585366 | 74 | 0.731245 |
7a8bfbbe7a06ef00f2d3c7f216e460e010f2d17b | 151 | # frozen_string_literal: true
String.class_eval do
def calc_sum
return to_i unless include? ','
split(',').map(&:to_i).reduce :+
end
end
| 15.1 | 36 | 0.668874 |
5dc4659efe30fd3459facfa07e914be0f065d41a | 352 | # == Schema Information
#
# Table name: student_course_registrations
#
# id :bigint(8) not null, primary key
# course_id :bigint(8) not null
# student_id :bigint(8) not null
# team :text
#
FactoryBot.define do
factory :student_course_registration do
association :student
association :course
end
end
| 20.705882 | 53 | 0.653409 |
7a4f5694baa3c4b840f52c8e7efefa714ceb2cec | 824 | require 'chefspec'
describe 'locking_resource::default' do
let :chef_run do
ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '14.04',
cookbook_path: File.join(File.expand_path(Dir.pwd),
'berks-cookbooks'))
end
it 'should serialize locking resource' do
chef_run.converge(described_recipe)
%w(make patch gcc).each do |pkg|
expect(chef_run).to install_package(pkg)
end
expect(chef_run).to install_gem_package('zookeeper')
expect(chef_run).to run_execute('correct-gem-permissions')
allow(Gem).to receive(:clear_paths)
allow(Gem).to receive(:clear_paths).and_return(true)
allow(Kernel).to receive(:require)
allow(Kernel).to receive(:require).with('zookeeper').and_return(true)
end
end
| 35.826087 | 80 | 0.65534 |
38ac83b3432c275a921eb33c09b6e8f66dcc1fda | 876 | # frozen_string_literal: true
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
require "csv"
require_relative "seeds/lib/utils"
env = Rails.env
glob = "*.rb"
only = ENV['ONLY'].presence
glob = "*{#{only}}.*rb" if glob
filenames = Dir.glob("#{Rails.root.join('db', 'seeds')}/#{glob}").sort
envdirname = Rails.root.join("db", "seeds", env.to_s)
filenames += Dir.glob("#{envdirname}/#{glob}").sort if Dir.exist?(envdirname)
puts "Seeding #{env} environment"
filenames.each do |filename|
puts "Seeding #{File.basename(filename)}"
require filename
end
| 32.444444 | 111 | 0.699772 |
3931f74171e21e0df45312f88894109335936256 | 686 | # frozen_string_literal: true
require 'rails_helper'
require_relative '../shared/json_serialization'
RSpec.describe Curator::ControlledTerms::AuthoritySerializer, type: :serializers do
let!(:authority_count) { 3 }
let!(:record) { create(:curator_controlled_terms_authority) }
let!(:record_collection) { create_list(:curator_controlled_terms_authority, authority_count) }
describe 'Serialization' do
it_behaves_like 'json_serialization' do
let(:json_record) { record }
let(:json_array) { record_collection }
let(:expected_as_json_options) do
{
root: true,
only: [:name, :code, :base_url]
}
end
end
end
end
| 28.583333 | 96 | 0.699708 |
0140557a3da25bf4bdf9e1b1a6a20be2b1d66ca9 | 1,101 | class OpenidConnectJavaSpringServer < Formula
desc "OpenID Connect server implementation"
homepage "https://github.com/mitreid-connect/"
url "https://github.com/mitreid-connect/OpenID-Connect-Java-Spring-Server/archive/mitreid-connect-1.2.5.tar.gz"
version "1.2.5"
sha256 "ad8315921a563d838cfada3dab9848f0ff73d0b5cecd9054094208d84d2de0c5"
depends_on "maven32"
def install
prefix.install Dir['*']
cd prefix
system "mvn", "clean", "install"
end
test do
# `test do` will create, run in and delete a temporary directory.
#
# This test will fail and we won't accept that! It's enough to just replace
# "false" with the main program this formula installs, but it'd be nice if you
# were more thorough. Run the test with `brew test OpenID-Connect-Java-Spring-Server`. Options passed
# to `brew install` such as `--HEAD` also need to be provided to `brew test`.
#
# The installed folder is not in the path, so use the entire path to any
# executables being tested: `system "#{bin}/program", "do", "something"`.
system "false"
end
end
| 37.965517 | 113 | 0.711172 |
62b5b84cff0fead0be82384f0c973c058c59f6bf | 1,382 | class TweetsController < ApplicationController
get '/tweets' do
if logged_in?
@user = current_user
@tweets = Tweet.all
erb :'/tweets/tweets'
else
redirect to "/login"
end
end
get '/tweets/new' do
if logged_in?
erb :'/tweets/new'
else
redirect to "/login"
end
end
get '/tweets/:id' do
if logged_in?
@tweet = Tweet.find_by_id(params[:id])
erb :'/tweets/show_tweet'
else
redirect to "/login"
end
end
post '/tweets' do
if params[:content] != ""
@tweet = Tweet.create(content: params[:content])
@tweet.user = current_user
@tweet.save
redirect to "/tweets/#{@tweet.id}"
else
redirect to "/tweets/new"
end
end
get '/tweets/:id/edit' do
if logged_in?
@tweet = Tweet.find_by_id(params[:id])
erb :'/tweets/edit_tweet'
else
redirect to "/login"
end
end
patch '/tweets/:id' do
@tweet = Tweet.find_by_id(params[:id])
if params[:content] != ""
@tweet.update(content: params[:content])
redirect to "/tweets/#{@tweet.id}"
else
redirect to "/tweets/#{@tweet.id}/edit"
end
end
delete '/tweets/:id/delete' do
if logged_in?
@tweet = Tweet.find_by_id(params[:id])
@user = User.find_by_id(@tweet.user_id)
if @user == current_user
@tweet.destroy
end
redirect to "/tweets"
end
end
end
| 19.464789 | 52 | 0.604197 |
acc6dc2f8ce6038d8e848a64e8b2434add3364a2 | 994 | =begin
#Tatum API
## Authentication <!-- ReDoc-Inject: <security-definitions> -->
OpenAPI spec version: 3.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.31
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for Tatum::InlineResponse20076
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'InlineResponse20076' do
before do
# run before each test
@instance = Tatum::InlineResponse20076.new
end
after do
# run after each test
end
describe 'test an instance of InlineResponse20076' do
it 'should create an instance of InlineResponse20076' do
expect(@instance).to be_instance_of(Tatum::InlineResponse20076)
end
end
describe 'test attribute "key"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 24.243902 | 102 | 0.736419 |
6acca7f38cf021bbb778a030e8888ebbb7cad621 | 5,107 | #
# Be sure to run `pod spec lint SLHCategories.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "SLHCategories"
s.version = "0.0.1"
s.summary = "category, uitableviewcell, uicollectionviewcell"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = "quickly set content for uitableviewcell or uicollectionviewcell"
s.homepage = "https://github.com/shilihuan/SLHCategories"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "stone" => "[email protected]" }
# Or just: s.author = "stone"
# s.authors = { "stone" => "[email protected]" }
# s.social_media_url = "http://twitter.com/stone"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
s.platform = :ios, "7.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/shilihuan/SLHCategories.git", :tag => "#{s.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "SLHCategories", "SLHCategories/**/*.{h,m}"
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 37.551471 | 103 | 0.598786 |
abbffcd48f216897504b2407ccda48aa33ebf03c | 75 | # frozen_string_literal: true
module Dependabot
VERSION = "0.117.0"
end
| 12.5 | 29 | 0.746667 |
e2527228d7d6b1c7ec15bd76681b3c2d0af35375 | 28 | module GraveyardsHelper
end
| 9.333333 | 23 | 0.892857 |
91b92adff45451e044c9d4d30e2869a8db8ef22d | 2,449 | if defined?(Rails) and Rails.version.match(/^2.+$/)
APIRUNNER_ROOT = File.dirname(__FILE__).to_s + "/../../"
TEST_EXAMPLES_PATH = APIRUNNER_ROOT + "examples/test/api_runner"
CONFIG_EXAMPLE_PATH = APIRUNNER_ROOT + "examples/config"
end
begin
config = YAML.load_file("#{Rails.root}/config/api_runner.yml")
rescue
end
namespace :api do
namespace :run do
config.delete_if{ |key| key == "general" }.each_key do |env|
desc "runs a series of nessecary api calls and parses their response in environment #{env}"
task env.to_sym => :environment do
puts "Running API tests in environment #{env}"
api_runner = ApiRunner.new(env)
api_runner.run
puts "\nTestrun finished\n\n"
end
end unless config.nil?
end
namespace:performance do
config.delete_if{ |key| key == "general" }.each_key do |env|
desc "runs a series of nessecary api calls for performance measuring and parses their response in environment #{env}"
task env.to_sym => :environment do
puts "Running API performance tests in environment #{env}"
api_runner = ApiRunner.new(env, performance=true)
api_runner.run
puts "\nPerformance testrun finished\n\n"
end
end unless config.nil?
end
desc "generates configuration and a skeleton for apirunner tests as well as excludes"
task :scaffold do
TEST_EXAMPLES_PATH="test/api_runner"
CONFIG_EXAMPLE_PATH="config"
FileUtils.mkdir_p( TEST_EXAMPLES_PATH )
FileUtils.mkdir_p( CONFIG_EXAMPLE_PATH )
Dir.glob("#{APIRUNNER_ROOT}/examples/#{TEST_EXAMPLES_PATH}/*.yml").each do |file|
unless File.exists?("#{TEST_EXAMPLES_PATH}/#{File.basename(file)}")
FileUtils.cp(file, "#{TEST_EXAMPLES_PATH}")
puts "%-50s .... created" % "#{TEST_EXAMPLES_PATH}/#{File.basename(file)}"
else
puts "%-50s .... already exists" % "#{TEST_EXAMPLES_PATH}/#{File.basename(file)}"
end
end
Dir.glob("#{APIRUNNER_ROOT}/examples/#{CONFIG_EXAMPLE_PATH}/*.yml").each do |file|
unless File.exists?("#{CONFIG_EXAMPLE_PATH}/#{File.basename(file)}")
FileUtils.cp(file, "#{CONFIG_EXAMPLE_PATH}") unless File.exists?("#{CONFIG_EXAMPLE_PATH}/#{File.basename(file)}")
puts "%-50s .... created" % "#{CONFIG_EXAMPLE_PATH}/#{File.basename(file)}"
else
puts "%-50s .... already exists" % "#{CONFIG_EXAMPLE_PATH}/#{File.basename(file)}"
end
end
end
end
| 40.816667 | 123 | 0.672111 |
799ef671a254712f3f8f64f698f3d63ad078522b | 5,919 | require 'rails_helper'
describe SimilarityMachine::QuestionsCalculator do
before do
@question = create(:question)
@team = create(:team)
end
describe '#questions_similarity' do
context "when questions aren't informed" do
subject {
described_class.new(
user_1: create(:user),
user_2: create(:user),
team: create(:team)
).calculate_similarity([])
}
it {
expect { subject }.to raise_error(ArgumentError)
}
end
context "when questions are informed" do
context "when none answers of the users have its similarity calculated already" do
it "returns 0% of similarity" do
users = create_list(:user, 3)
teams = create_pair(:team)
questions = create_list(:question, 3)
answers = answers_hash(
users: users,
teams: teams,
questions: questions
)
create_connections_who_must_be_avoided(answers)
similarity = described_class.new(
user_1: users[0],
user_2: users[1],
team: teams[0]
).calculate_similarity(questions.first(2))
expect(similarity).to eq 0
end
end
context "when some users answers have its similarity calculated already" do
context "when they're completely similar" do
it "returns 100% of similarity" do
users = create_list(:user, 3)
teams = create_pair(:team)
questions = create_list(:question, 3)
answers = answers_hash(
users: users,
teams: teams,
questions: questions
)
create(
:answer_connection,
answer_1: answers[:user_0][:team_0][:question_0],
answer_2: answers[:user_1][:team_0][:question_0],
similarity: 100
)
create(
:answer_connection,
answer_1: answers[:user_0][:team_0][:question_1],
answer_2: answers[:user_1][:team_0][:question_1],
similarity: 100
)
create_connections_who_must_be_avoided(answers)
similarity = described_class.new(
user_1: users[0],
user_2: users[1],
team: teams[0]
).calculate_similarity(questions.first(2))
expect(similarity).to eq 100
end
end
context "when they're similar in some level" do
it "returns something >= 0% and <= 100% of similarity" do
users = create_list(:user, 3)
teams = create_pair(:team)
questions = create_list(:question, 3)
answers = answers_hash(
users: users,
teams: teams,
questions: questions
)
create(
:answer_connection,
answer_1: answers[:user_0][:team_0][:question_0],
answer_2: answers[:user_1][:team_0][:question_0],
similarity: 100
)
create(
:answer_connection,
answer_1: answers[:user_0][:team_0][:question_1],
answer_2: answers[:user_1][:team_0][:question_1],
similarity: 50
)
create_connections_who_must_be_avoided(answers)
similarity = described_class.new(
user_1: users[0],
user_2: users[1],
team: teams[0]
).calculate_similarity(questions.first(2))
expect(similarity).to eq 75
end
end
context "when they're completely different" do
it "returns 0% of similarity" do
users = create_list(:user, 3)
teams = create_pair(:team)
questions = create_list(:question, 3)
answers = answers_hash(
users: users,
teams: teams,
questions: questions
)
create(
:answer_connection,
answer_1: answers[:user_0][:team_0][:question_0],
answer_2: answers[:user_1][:team_0][:question_0],
similarity: 0
)
create(
:answer_connection,
answer_1: answers[:user_0][:team_0][:question_1],
answer_2: answers[:user_1][:team_0][:question_1],
similarity: 0
)
create_connections_who_must_be_avoided(answers)
similarity = described_class.new(
user_1: users[0],
user_2: users[1],
team: teams[0]
).calculate_similarity(questions.first(2))
expect(similarity).to eq 0
end
end
end
end
end
# This method shares common data setup from the tests, creating answers
# connections who must not be considered because doesn't match with the
# arguments informed do AnswersCalculator and its calculate_similarity method
# (this helps to test unexpected behavior in the tests).
# The tests area configured with:
# - 3 users, where users[2] must be avoided;
# - 2 teams, where teams[1] must be avoided;
# - 3 questions, where questions[2] must be avoided.
def create_connections_who_must_be_avoided(answers)
create(
:answer_connection,
answer_1: answers[:user_0][:team_1][:question_0],
answer_2: answers[:user_2][:team_1][:question_0],
similarity: 94
)
create(
:answer_connection,
answer_1: answers[:user_0][:team_1][:question_0],
answer_2: answers[:user_1][:team_1][:question_0],
similarity: 86
)
create(
:answer_connection,
answer_1: answers[:user_0][:team_0][:question_2],
answer_2: answers[:user_1][:team_0][:question_2],
similarity: 32
)
end
end
| 29.893939 | 88 | 0.554655 |
ff46bcf8b09e37dec1481c5462bc75915eea99ca | 386 | cask 'font-noto-sans-cypriot' do
version :latest
sha256 :no_check
# noto-website-2.storage.googleapis.com/ was verified as official when first introduced to the cask
url 'https://noto-website-2.storage.googleapis.com/pkgs/NotoSansCypriot-unhinted.zip'
name 'Noto Sans Cypriot'
homepage 'https://www.google.com/get/noto/#sans-cprt'
font 'NotoSansCypriot-Regular.ttf'
end
| 32.166667 | 101 | 0.761658 |
edcbcdb39f7cb610c493cc78946fea68bc440510 | 638 | require "metanorma"
require "metanorma-generic"
require "metanorma/iho/processor"
module Metanorma
module IHO
class Configuration < Metanorma::Generic::Configuration
def initialize(*args)
super
end
end
class << self
extend Forwardable
attr_accessor :configuration
Configuration::CONFIG_ATTRS.each do |attr_name|
def_delegator :@configuration, attr_name
end
def configure
self.configuration ||= Configuration.new
yield(configuration)
end
end
configure {}
end
end
Metanorma::Registry.instance.register(Metanorma::IHO::Processor)
| 19.333333 | 64 | 0.680251 |
0301ff7769e0296c40a42fa473f08e721dc67165 | 399 | class CreateCollections < ActiveRecord::Migration[5.1]
def change
create_table :collections do |t|
t.uuid :uuid, null: false, default: 'uuid_generate_v4()'
t.string :name
t.jsonb :metadata, null: false, default: '{}'
t.index :metadata, using: :gin
t.integer :import_type, default: 0, index: true
t.string :import_folder
t.timestamps
end
end
end
| 28.5 | 62 | 0.651629 |
03d0512bbb529ffd77bb192f9c67f5a1636f9d95 | 342 | # frozen_string_literal: true
module Quails
module Generators
class HelperGenerator < NamedBase # :nodoc:
check_class_collision suffix: "Helper"
def create_helper_files
template "helper.rb", File.join("app/helpers", class_path, "#{file_name}_helper.rb")
end
hook_for :test_framework
end
end
end
| 21.375 | 92 | 0.695906 |
397d77fce6f1409df2207241b9581ebce01e7766 | 1,117 | # Be sure to restart your server when you modify this file.
# Define an application-wide content security policy
# For further information see the following documentation
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy
# Rails.application.config.content_security_policy do |policy|
# policy.default_src :self, :https
# policy.font_src :self, :https, :data
# policy.img_src :self, :https, :data
# policy.object_src :none
# policy.script_src :self, :https
# policy.style_src :self, :https
# # Specify URI for violation reports
# # policy.report_uri "/csp-violation-report-endpoint"
# end
# If you are using UJS then enable automatic nonce generation
# Rails.application.config.content_security_policy_nonce_generator = -> request { SecureRandom.base64(16) }
# Report CSP violations to a specified URI
# For further information see the following documentation:
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy-Report-Only
# Rails.application.config.content_security_policy_report_only = true
| 42.961538 | 108 | 0.752014 |
5dd088a4623f5236352e34d575fda911199a853e | 2,158 | require 'presenters/v3/base_presenter'
require 'presenters/mixins/metadata_presentation_helpers'
module VCAP::CloudController::Presenters::V3
class DeploymentPresenter < BasePresenter
include VCAP::CloudController::Presenters::Mixins::MetadataPresentationHelpers
def to_hash
{
guid: deployment.guid,
created_at: deployment.created_at,
updated_at: deployment.updated_at,
status: {
value: deployment.status_value,
reason: deployment.status_reason,
details: {
last_successful_healthcheck: deployment.last_healthy_at
}
},
strategy: deployment.strategy,
droplet: {
guid: deployment.droplet_guid
},
previous_droplet: {
guid: deployment.previous_droplet_guid
},
new_processes: new_processes,
revision: revision,
relationships: {
app: {
data: {
guid: deployment.app.guid
}
}
},
metadata: {
labels: hashified_labels(deployment.labels),
annotations: hashified_annotations(deployment.annotations),
},
links: build_links,
}
end
private
def deployment
@resource
end
def revision
(deployment.app.revisions_enabled && deployment.revision_guid) ? { guid: deployment.revision_guid, version: deployment.revision_version } : nil
end
def new_processes
deployment.historical_related_processes.map do |drp|
{
guid: drp.process_guid,
type: drp.process_type
}
end
end
def build_links
{
self: {
href: url_builder.build_url(path: "/v3/deployments/#{deployment.guid}")
},
app: {
href: url_builder.build_url(path: "/v3/apps/#{deployment.app.guid}")
},
}.tap do |links|
if deployment.cancelable?
links[:cancel] = {
href: url_builder.build_url(path: "/v3/deployments/#{deployment.guid}/actions/cancel"),
method: 'POST'
}
end
end
end
end
end
| 26.317073 | 149 | 0.592215 |
bfdf315143197d3eb77db2767148d23378112bbd | 526 | require 'formula'
class Sdf < Formula
homepage 'http://strategoxt.org/Sdf/WebHome'
url 'ftp://ftp.stratego-language.org/pub/stratego/StrategoXT/strategoxt-0.17/sdf2-bundle-2.4.tar.gz'
sha1 'b9be75d56503e7f06fcc9cc543303bae123c0845'
depends_on 'aterm'
def install
system "./configure", "--prefix=#{prefix}"
# Man pages are distributed as 0B files.
# Delete them so they'll get recreated properly
rm %w(pgen/src/sdf2table.1 sglr/doc/sglr.1 sglr/doc/sglr-api.3)
system "make install"
end
end
| 26.3 | 102 | 0.718631 |
626140699ae58f01e8ebd9805284bd9086715ff5 | 388 | cask "kube-forwarder" do
version "1.5.1"
sha256 "683bcd380885787d653bfb21b39fda05ed786b09abe311c856ae4032859f3d5f"
url "https://github.com/pixel-point/kube-forwarder/releases/download/v#{version}/kube-forwarder.dmg",
verified: "github.com/pixel-point/kube-forwarder/"
name "Kube Forwarder"
homepage "https://kube-forwarder.pixelpoint.io/"
app "Kube Forwarder.app"
end
| 32.333333 | 103 | 0.762887 |
62fb7c382b18ef87493377a2d658e1539ab2b711 | 1,782 | # frozen_string_literal: true
require 'begin/output'
require 'begin/path'
require 'begin/template'
require 'git'
require 'uri'
module Begin
# Provides centralised access to the local repository of templates
# on the machine
class Repository
def initialize(name = '.begin', parent_dir = '~')
@parent_dir = Path.new(parent_dir, '.', 'Repository Parent')
@parent_dir.ensure_dir_exists
@repo_dir = Path.new(name, @parent_dir, 'Repository directory')
@template_dir = Path.new('templates', @repo_dir, 'Templates directory')
end
def install(source_uri, name)
path = install_prerequisites(name)
begin
GitTemplate.install source_uri, path
rescue StandardError
return SymlinkTemplate.install source_uri, path unless source_uri.include? '://'
raise
end
end
def install_prerequisites(name)
@repo_dir.make_dir
@template_dir.make_dir
path = template_path name
raise "A template is already installed at: #{path}" if path.exists?
Output.info "Installing to '#{path}'"
path
end
def each
templates = @template_dir.dir_contents
templates.each { |x| yield template_name x }
end
def template(name)
path = template_path name
template_from_path path
end
def template_name(uri)
uri = URI(uri)
path_bits = uri.path.split '/'
name = path_bits.last
name.slice! 'begin-'
name.slice! 'begin_'
name.chomp! '.git'
name
end
def template_path(template_name)
Path.new template_name, @template_dir, 'Template directory'
end
def template_from_path(path)
return SymlinkTemplate.new(path) if File.symlink? path
GitTemplate.new path
end
end
end
| 24.75 | 88 | 0.6633 |
3928a195f189cb3d1dec8fbf1700dbb33f59e6ab | 1,011 | require 'support/parser_helpers'
describe "The if keyword" do
it "should return an s(:if) with given truthy and falsy bodies" do
parsed("if 1; 2; else; 3; end").should == [:if, [:int, 1], [:int, 2], [:int, 3]]
end
it "uses nil as fasly body if not given else-then" do
parsed("if 1; 2; end").should == [:if, [:int, 1], [:int, 2], nil]
end
it "is treats elsif parts as sub if expressions for else body" do
parsed("if 1; 2; elsif 3; 4; else; 5; end").should == [:if, [:int, 1], [:int, 2], [:if, [:int, 3], [:int, 4], [:int, 5]]]
parsed("if 1; 2; elsif 3; 4; end").should == [:if, [:int, 1], [:int, 2], [:if, [:int, 3], [:int, 4], nil]]
end
it "returns a simple s(:if) with nil else body for prefix if statement" do
parsed("1 if 2").should == [:if, [:int, 2], [:int, 1], nil]
end
end
describe "The ternary operator" do
it "gets converted into an if statement with true and false parts" do
parsed("1 ? 2 : 3").should == [:if, [:int, 1], [:int, 2], [:int, 3]]
end
end
| 37.444444 | 125 | 0.576657 |
1a2ade12b35574f128520d60ae6814e940e2ad52 | 464 | require 'json'
module JSONable
module ClassMethods
attr_accessor :attributes
def attr_accessor(*attrs)
self.attributes = Array attrs
super
end
end
def self.included(base)
base.extend(ClassMethods)
end
def as_json
serialized = Hash.new
self.class.attributes.each do |attribute|
serialized[attribute] = self.public_send attribute
end
serialized
end
def to_json(*a)
as_json.to_json *a
end
end | 16.571429 | 56 | 0.685345 |
e246e700e44aa470d27ae0a841af57d77483c466 | 84 | copy_file "lib/tasks/auto_annotate_models.rake"
copy_file "lib/tasks/coverage.rake"
| 28 | 47 | 0.833333 |
03f2ac2edbd1abb3b4d4ca02e0149331e4c3a843 | 671 | class Subject < ActiveRecord::Base
has_and_belongs_to_many :articles, :conditions => {:state => 'approved'}, :order => "approved_at DESC"
has_and_belongs_to_many :feeds
has_and_belongs_to_many :keywords, :uniq => true do
# normalize on downcase and ensure uniqueness when setting
def <<(kw)
kw.downcase!
self.concat Keyword.find_or_create_by_name(kw) unless proxy_target.map(&:name).include?(kw)
end
# double quote delimited list of keywords
def to_s
map { |term| %Q("#{term.name}") }.join(' ')
end
end
cattr_reader :per_page
@@per_page = 10
def to_param
self.name.gsub(/\W/, '-').squeeze('-')
end
end | 27.958333 | 104 | 0.66617 |
1c7727993c79f0c222ac023b9ea0fbf9cf63b742 | 1,963 | require_relative 'scraper'
class Articlecli::Cli
def initialize
@sc = Scraper.new
end
def call
@sc.create_articles
@newspaper = @sc.newspaper
@articles = @newspaper.articles
start
end
def start
puts 'Welcome!'
input = ''
while input != 'quit'
print_commands
input = gets.strip
case input
when "all"
list_articles
when "cat"
list_category
when "random"
show_random_article
when "article"
show_article
end
end
end
def print_commands
puts "To see all article titles type 'all'"
puts "To see the category type 'cat'"
puts "To see a random article type 'random'"
puts "To see a specific article type 'article'"
puts "To exit type 'quit'"
puts '************************************'
end
def list_articles
@articles.each_with_index do |x,i|
puts '---------'
puts "#{i+1}. #{x.title}"
puts '---------'
end
end
def list_category
@articles.each do |x|
puts '---------'
puts x.category
puts '---------'
end
end
def show_random_article
article = @articles.sample
puts '---------'
puts article.title
puts '---------'
puts article.content
puts '---------'
end
def show_article
list_articles
puts 'Pick an article number!'
input = gets.strip
input = input.to_i - 1
if input >= 0 && input < @articles.size
puts '---------'
puts @articles[input].title
puts '---------'
puts @articles[input].content
puts '---------'
end
end
end | 21.811111 | 55 | 0.449822 |
f83b73431e364babdf3223753a830edc6a01fd95 | 1,497 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2015 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
class RemoveContentFromTimelinesTimelines < ActiveRecord::Migration[4.2]
def self.up
change_table(:timelines_timelines) do |t|
t.remove :content
end
end
def self.down
change_table(:timelines_timelines) do |t|
t.text :content
end
end
end
| 34.813953 | 91 | 0.748831 |
d55c59ec4fc536c9d1c09209b946d81c0643f5c5 | 244 | class Github::FakeAdapter
def initialize(_post); end
def create_pr
Rails.logger.debug("Create a PR")
end
def update_pr
Rails.logger.debug("Update the PR")
end
def close_pr
Rails.logger.debug("Close the PR")
end
end
| 15.25 | 39 | 0.688525 |
33589886ea2e1e3b903ebb4966a4862b8d115960 | 1,356 | # encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_address_auto_complete'
s.version = '3.7.0'
s.summary = 'This is to use google places api to complete addresses in spree.'
s.description = 'Using this extension, we can integrate google places to complete the billing and shipping address of any user.'
s.required_ruby_version = '>= 2.2.7'
s.author = 'Gaurav Mahajan'
s.email = '[email protected]'
s.homepage = 'http://vinsol.com'
s.license = 'BSD-3'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
s.add_dependency 'spree', '>= 3.2.0', '< 4.0'
s.add_development_dependency 'appraisal'
s.add_development_dependency 'capybara', '~> 2.6'
s.add_development_dependency 'coffee-rails'
s.add_development_dependency 'database_cleaner'
s.add_development_dependency 'factory_girl', '~> 4.5'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails', '~> 3.4'
s.add_development_dependency 'sass-rails', '~> 5.0.0'
s.add_development_dependency 'selenium-webdriver'
s.add_development_dependency 'simplecov'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'rails-controller-testing'
end
| 38.742857 | 130 | 0.705752 |
9170ecbea1aba4be976f59162ca0a1ab480cbc6e | 632 |
# frozen_string_literal: true
module RailsWorkflow
module OperationTemplates
module Assignments
extend ActiveSupport::Concern
included do
belongs_to :assignment, polymorphic: true, required: false
scope :for_user, ->(user) {
keys = RailsWorkflow.config.assignment_by.select { |k| user.respond_to? k }
assignment_condition = keys.map do |key|
"rails_workflow_operation_templates.#{key} = ?"
end.join(' or ')
where(
assignment_condition,
*keys.map { |k| user.send(k) }
)
}
end
end
end
end
| 23.407407 | 85 | 0.594937 |
391273fcf884f7e8f585b950e537f6d75c967ab8 | 316 | default["rwci"]["user"] = "rwci"
default["rwci"]["group"] = "rwci"
default["rwci"]["rvm_ruby"] = "ruby-1.9.3-p194"
default["rwci"]["rvm_gemset"] = "rwci"
default["rwci"]["git"]["url"] = "git://github.com/rashidkpc/Kibana.git"
default["rwci"]["git"]["reference"] = "v0.1.0"
| 45.142857 | 77 | 0.531646 |
38faf0fe7eac45195bcb51ffde420a75521a0fad | 915 | Pod::Spec.new do |s|
s.name = "Seafile"
s.version = "2.6.2"
s.summary = "iOS client for seafile."
s.homepage = "https://github.com/haiwen/seafile-iOS"
s.license = 'MIT'
s.author = { "wei.wang" => "[email protected]" }
s.source = { :git => "https://github.com/haiwen/seafile-iOS.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/Seafile'
s.source_files = 'Pod/Classes/*.{h,m}'
s.resource_bundles = { 'Seafile' => 'Pod/Resources/*' }
s.platform = :ios, '8.0'
s.requires_arc = true
s.frameworks = 'AssetsLibrary'
s.dependency 'AFNetworking', '~> 2.6.1'
s.dependency 'OpenSSL-Universal', '~> 1.0.1.p'
s.pod_target_xcconfig = {
'LIBRARY_SEARCH_PATHS' => '$(inherited) $(PODS_ROOT)/OpenSSL-Universal/lib-ios/',
'OTHER_LDFLAGS' => '$(inherited) -lssl -lcrypto'
}
end
| 41.590909 | 102 | 0.578142 |
03afb2cdedd78eed28d5256cf845703833effb83 | 10,705 | #
# Author:: Joshua Timberman <[email protected]>
# Author:: Seth Chisamore <[email protected]>
#
# Copyright:: Copyright (c) 2012, Opscode, Inc. <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
$:.unshift(File.join(File.dirname(__FILE__), '..'))
require 'spec_helper'
describe Chef::Resource::RunitService do
subject(:resource) { Chef::Resource::RunitService.new(service_name, run_context) }
let(:service_name) { 'getty.service' }
let(:node) { Chef::Node.new }
let(:events) { Chef::EventDispatch::Dispatcher.new }
let(:run_context) { Chef::RunContext.new(node, {}, events) }
its(:class) { should be Chef::Resource::RunitService }
its(:resource_name) { should eq(:runit_service)}
its(:provider) { should eq(Chef::Provider::Service::Runit) }
its(:service_name) { should eq('getty.service') }
its(:sv_dir) { should eq('/etc/sv') }
its(:sv_bin) { should eq("/usr/bin/sv") }
its(:lsb_init_dir) { should eq("/etc/init.d") }
describe "setting supported default values from node attributes" do
let(:sv_bin) { "/fake/bin/sv_bin" }
let(:sv_dir) { "/fake/sv_dir/path" }
let(:service_dir) { "/fake/service_dir" }
let(:lsb_init_dir) { "/fake/lsb_init_dir" }
let(:node) do
node = Chef::Node.new
node.set['runit']['sv_bin'] = sv_bin
node.set['runit']['sv_dir'] = sv_dir
node.set['runit']['service_dir'] = service_dir
node.set['runit']['lsb_init_dir'] = lsb_init_dir
node
end
its(:sv_bin) { should eq sv_bin }
its(:sv_dir) { should eq sv_dir }
its(:service_dir) { should eq service_dir }
its(:lsb_init_dir) { should eq lsb_init_dir }
end
describe "backward compatiblility hack" do
let(:simple_service_name) { "service[#{service_name}]" }
it "creates a simple service with the same name" do
resource_collection = resource.run_context.resource_collection
simple_service = resource_collection.find(simple_service_name)
simple_service.to_s.should eq(simple_service_name)
simple_service.class.should be Chef::Resource::Service
simple_service.provider.should be Chef::Provider::Service::Simple
end
end
it 'has an sv_dir parameter that can be set' do
resource.sv_dir('/var/lib/sv')
resource.sv_dir.should eq('/var/lib/sv')
end
it 'allows sv_dir parameter to be set false (so users can use an existing sv dir)' do
resource.sv_dir(false)
resource.sv_dir.should be_false
end
it 'has a service_dir parameter set to /etc/service by default' do
resource.service_dir.should eq('/etc/service')
end
it 'has a service_dir parameter that can be set' do
resource.service_dir('/var/service')
resource.service_dir.should eq('/var/service')
end
it 'has a lsb_init_dir parameter set to /etc/init.d by default' do
resource.lsb_init_dir.should eq('/etc/init.d')
end
it 'has a lsb_init_dir parameter that can be set' do
resource.lsb_init_dir('/other/lsb_init_dir')
resource.lsb_init_dir.should eq('/other/lsb_init_dir')
end
it 'has a control parameter that can be set as an array of service control characters' do
resource.control(['s', 'u'])
resource.control.should eq(['s', 'u'])
end
it 'has an options parameter that can be set as a hash of arbitrary options' do
resource.options({:binary => '/usr/bin/noodles'})
resource.options.should have_key(:binary)
resource.options[:binary].should eq('/usr/bin/noodles')
end
it 'has an env parameter that can be set as a hash of environment variables' do
resource.env({'PATH' => '$PATH:/usr/local/bin'})
resource.env.should have_key('PATH')
resource.env['PATH'].should include('/usr/local/bin')
end
it 'adds :env_dir to options if env is set' do
resource.env({'PATH' => '/bin'})
resource.options.should have_key(:env_dir)
resource.options[:env_dir].should eq(::File.join(resource.sv_dir, resource.service_name, 'env'))
end
it 'has a log parameter to control whether a log service is setup' do
resource.log.should be_true
end
it 'has a log parameter that can be set to false' do
resource.log(false)
resource.log.should be_false
end
it 'raises an exception if the log parameter is set to nil' do
resource.log(nil)
resource.log.should raise_exception
end
it 'has a cookbook parameter that can be set' do
resource.cookbook('noodles')
resource.cookbook.should eq('noodles')
end
it 'has a check parameter that is false by default' do
resource.check.should be_false
end
it 'hash a check parameter that controls whether a check script is created' do
resource.check(true)
resource.check.should be_true
end
it 'has a finish parameter that is false by default' do
resource.finish.should be_false
end
it 'hash a finish parameter that controls whether a finish script is created' do
resource.finish(true)
resource.finish.should be_true
end
it 'has an owner parameter that can be set' do
resource.owner('monkey')
resource.owner.should eq('monkey')
end
it 'has a group parameter that can be set' do
resource.group('primates')
resource.group.should eq('primates')
end
it 'has an enabled parameter to determine if the current resource is enabled' do
resource.enabled.should be_false
end
it 'has a running parameter to determine if the current resource is running' do
resource.running.should be_false
end
it 'has a default_logger parameter that is false by default' do
resource.default_logger.should be_false
end
it 'has a default_logger parameter that controls whether a default log template should be created' do
resource.default_logger(true)
resource.default_logger.should be_true
end
it 'has a restart_on_update parameter that is true by default' do
resource.restart_on_update.should be_true
end
it 'has a restart_on_update parameter that controls whether a the service is restarted when the run script is updated' do
resource.restart_on_update(false)
resource.restart_on_update.should be_false
end
it 'sets the run_template_name to the service_name by default' do
resource.run_template_name.should eq(resource.service_name)
end
it 'sets the log_template_name to the service_name by default' do
resource.log_template_name.should eq(resource.service_name)
end
it 'has a run_template_name parameter to allow a custom template name for the run run script' do
resource.run_template_name('foo_bar')
resource.run_template_name.should eq('foo_bar')
end
it 'has a template_name parameter to allow a custom template name for the run run script for backwards compatiblility' do
resource.template_name('foo_baz')
resource.run_template_name.should eq('foo_baz')
end
it 'has a log_template_name parameter to allow a custom template name for the log run script' do
resource.log_template_name('write_noodles')
resource.log_template_name.should eq('write_noodles')
end
it 'sets the control_template_names for each control character to the service_name by default' do
resource.control(['s', 'u'])
resource.control_template_names.should have_key('s')
resource.control_template_names.should have_key('u')
resource.control_template_names['s'].should eq(resource.service_name)
resource.control_template_names['u'].should eq(resource.service_name)
end
it 'has a control_template_names parameter to allow custom template names for the control scripts' do
resource.control_template_names({
's' => 'banana_start',
'u' => 'noodle_up'
})
resource.control_template_names.should have_key('s')
resource.control_template_names.should have_key('u')
resource.control_template_names['s'].should eq('banana_start')
resource.control_template_names['u'].should eq('noodle_up')
end
it 'sets the check_script_template_name to the service_name by default' do
resource.check_script_template_name.should eq(resource.service_name)
end
it 'has a check_script_template_name parameter to allow a custom template name for the check script' do
resource.check_script_template_name('eat_bananas')
resource.check_script_template_name.should eq('eat_bananas')
end
it 'sets the finish_script_template_name to the service_name by default' do
resource.finish_script_template_name.should eq(resource.service_name)
end
it 'has a finish_script_template_name parameter to allow a custom template name for the finish script' do
resource.finish_script_template_name('eat_bananas')
resource.finish_script_template_name.should eq('eat_bananas')
end
it 'has a sv_templates parameter to control whether the sv_dir templates are created' do
resource.sv_templates(false)
resource.sv_templates.should be_false
end
it "has a log_size parameter to control the maximum log size" do
resource.log_size(1000000)
resource.log_size.should eq(1000000)
end
it "has a log_num parameter to control the maximum number of logs" do
resource.log_num(10)
resource.log_num.should eq(10)
end
it "has a log_min parameter to control the minimum number of logs" do
resource.log_min(5)
resource.log_min.should eq(5)
end
it "has a log_timeout parameter to control the maximum age of a log file" do
resource.log_timeout(60 * 60)
resource.log_timeout.should eq(60 * 60)
end
it "has a log_processor parameter to allow logs to be fed through it after rotation" do
resource.log_processor("/usr/local/bin/process")
resource.log_processor.should eq("/usr/local/bin/process")
end
it "has a log_socket parameter to allow log lines to be sent to a UDP socket" do
resource.log_socket("127.0.0.1:1514")
resource.log_socket.should eq("127.0.0.1:1514")
end
it "has a log_prefix parameter to allow log lines to be prefixed with a fixed string" do
resource.log_prefix("myservice:")
resource.log_prefix.should eq("myservice:")
end
it "has a log_config_append parameter to allow arbitrary configuration entries to be added to the configuration" do
resource.log_config_append("-bogus")
resource.log_config_append.should eq("-bogus")
end
end
| 35.330033 | 123 | 0.73134 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.